Add error call stack if call stack exceeds maximum capacity
continuous-integration/drone/push Build is passing Details

This commit is contained in:
Dominic Grimm 2023-08-05 20:19:52 +02:00
parent 411665061d
commit 7490439223
9 changed files with 138 additions and 102 deletions

View File

@ -17,10 +17,13 @@ struct hf__interpreter__word {
extern const hf__hashmap__free_value_t hf__interpreter__word_free;
#define HF__INTERPRETER__CALL_STACK_CAP_MAX 1024
struct hf__interpreter {
struct hf__node *call_stack;
size_t call_stack_len;
size_t call_stack_size;
size_t call_stack_cap;
size_t call_stack_cap_max;
struct hf__hashmap words; // struct hf__interpreter__word *
@ -41,7 +44,7 @@ extern const hf__interpreter__word_func_t
HF__INTERPRETER__WORD_FUNCTION[__HF__NODE_TYPE__N];
extern void hf__interpreter__stack_push(long **arr, size_t *const len,
size_t *const size, long item);
size_t *const cap, long item);
extern struct hf__result
hf__interpreter__run(struct hf__interpreter *const interpreter);

View File

@ -24,6 +24,6 @@ struct hf__token {
extern void hf__lex(const char *const src, const size_t src_len,
struct hf__token **tokens, size_t *const len,
size_t *const size);
size_t *const cap);
#endif

View File

@ -74,12 +74,12 @@ extern void hf__parser__init_keyword_map(struct hf__hashmap *const map,
size_t cap);
extern void hf__parser__node_array_push(struct hf__node **arr,
size_t *const len, size_t *const size,
size_t *const len, size_t *const cap,
struct hf__node item);
extern struct hf__result
hf__parse(struct hf__parser *const parser, const char *const src,
const struct hf__token *const tokens, const size_t tokens_len,
struct hf__node **nodes, size_t *const len, size_t *const size);
struct hf__node **nodes, size_t *const len, size_t *const cap);
#endif

View File

@ -9,6 +9,7 @@ enum hf__error {
HF__ERROR__PARSER__WORD_DEF_INVALID_NAME,
HF__ERROR__PARSER__WORD_DEF_IS_KEYWORD,
HF__ERROR__INTERPRETER__CALL_STACK_TOO_BIG,
HF__ERROR__INTERPRETER__UNKNOWN_WORD,
HF__ERROR__INTERPRETER__WORD_ALREADY_DEF,
HF__ERROR__INTERPRETER__STACK_UNDERFLOW,
@ -31,12 +32,27 @@ static const char *const HF__ERROR_STR[__HF__ERROR__N] = {
"Invalid token type for word name",
[HF__ERROR__PARSER__WORD_DEF_IS_KEYWORD] = "Word name is already a keyword",
[HF__ERROR__INTERPRETER__CALL_STACK_TOO_BIG] = "Call stack is too big",
[HF__ERROR__INTERPRETER__UNKNOWN_WORD] = "Unknown word",
[HF__ERROR__INTERPRETER__WORD_ALREADY_DEF] = "Word is already defined",
[HF__ERROR__INTERPRETER__STACK_UNDERFLOW] = "Stack underflow",
};
extern void hf__print_error(struct hf__error_wrapper *error);
static const bool HF__ERROR_PANIC[__HF__ERROR__N] = {
[HF__ERROR__PARSER__UNEXPECTED] = false,
[HF__ERROR__PARSER__INVALID_NUMBER] = false,
[HF__ERROR__PARSER__INVALID_CHAR] = false,
[HF__ERROR__PARSER__WORD_DEF_INCOMPLETE] = false,
[HF__ERROR__PARSER__WORD_DEF_INVALID_NAME] = false,
[HF__ERROR__PARSER__WORD_DEF_IS_KEYWORD] = false,
[HF__ERROR__INTERPRETER__CALL_STACK_TOO_BIG] = true,
[HF__ERROR__INTERPRETER__UNKNOWN_WORD] = false,
[HF__ERROR__INTERPRETER__WORD_ALREADY_DEF] = false,
[HF__ERROR__INTERPRETER__STACK_UNDERFLOW] = false,
};
extern void hf__handle_error_light(struct hf__error_wrapper *error);
struct hf__result {
bool ok;

View File

@ -56,9 +56,9 @@ struct hf__result words__word(struct hf__interpreter *const interpreter,
}
for (size_t i = (*word)->body_len - 1; i != 0 - 1; i--) {
hf__parser__node_array_push(
&interpreter->call_stack, &interpreter->call_stack_len,
&interpreter->call_stack_size, (*word)->body[i]);
hf__parser__node_array_push(&interpreter->call_stack,
&interpreter->call_stack_len,
&interpreter->call_stack_cap, (*word)->body[i]);
interpreter->call_stack[interpreter->call_stack_len - 1].is_owner = false;
}
@ -343,12 +343,12 @@ const hf__interpreter__word_func_t
};
void hf__interpreter__stack_push(long **arr, size_t *const len,
size_t *const size, long item) {
if (*len > *size) {
size_t *const cap, long item) {
if (*len > *cap) {
return;
} else if (*len == *size) {
*size += 1 + (*size / 2);
*arr = realloc(*arr, sizeof(long) * (*size));
} else if (*len == *cap) {
*cap += 1 + (*cap / 2);
*arr = realloc(*arr, sizeof(long) * (*cap));
}
(*arr)[*len] = item;
@ -363,15 +363,27 @@ hf__interpreter__run(struct hf__interpreter *const interpreter) {
const struct hf__node *const top =
interpreter->call_stack + --interpreter->call_stack_len;
SET_8_VALUE_COLOUR(TXT_RED);
printf("--- type = %u ---\n", top->type);
if (top->type == HF__NODE_TYPE__WORD) {
printf("--- word name = \"%s\"\n", top->value.word.value);
}
SET_8_VALUE_COLOUR(TXT_DEFAULT);
const hf__interpreter__word_func_t func =
HF__INTERPRETER__WORD_FUNCTION[top->type];
return func ? func(interpreter, top) : HF__OK;
if (func) {
struct hf__result res = func(interpreter, top);
if (res.ok && interpreter->call_stack_cap_max != 0 &&
interpreter->call_stack_cap >= interpreter->call_stack_cap_max) {
const size_t required_size =
snprintf(NULL, 0, "%lu >= %lu", interpreter->call_stack_cap,
interpreter->call_stack_cap_max);
char *msg = malloc(sizeof(char) * (required_size + 1));
snprintf(msg, required_size + 1, "%lu >= %lu",
interpreter->call_stack_cap, interpreter->call_stack_cap_max);
return HF__ERR_CUSTOM(HF__ERROR__INTERPRETER__CALL_STACK_TOO_BIG, msg,
true);
} else {
return res;
}
} else {
return HF__OK;
}
}

View File

@ -17,7 +17,7 @@ void token_array_push(struct hf__token **arr, size_t *const len,
}
void hf__lex(const char *const src, const size_t src_len,
struct hf__token **tokens, size_t *const len, size_t *const size) {
struct hf__token **tokens, size_t *const len, size_t *const cap) {
size_t i = 0;
while (i < src_len) {
if (hf__is_space_like(src[i]) || src[i] == '\n') {
@ -28,7 +28,12 @@ void hf__lex(const char *const src, const size_t src_len,
size_t start = i;
struct hf__token token;
if (src[i] == '\'') {
while (!hf__is_space_like(src[i]) && src[i] != '\n' && i < src_len) {
i++;
}
const size_t str_len = i - start;
if (str_len == 1 && src[start] == '\'') {
const size_t char_start = start;
i++;
start = i;
@ -45,76 +50,69 @@ void hf__lex(const char *const src, const size_t src_len,
token.location.end = i - 1;
i++;
} else {
while (!hf__is_space_like(src[i]) && src[i] != '\n' && i < src_len) {
} else if (hf__is_numeric(src[start]) ||
(src[start] == '-' && str_len > 1 &&
hf__is_numeric(src[start + 1]))) {
token.type = HF__TOKEN_TYPE__NUMBER;
token.location.start = start;
token.location.end = i - 1;
} else if (str_len == 1 && src[start] == ':') {
token.type = HF__TOKEN_TYPE__COLON;
token.location.start = start;
token.location.end = i - 1;
} else if (str_len == 1 && src[start] == ';') {
token.type = HF__TOKEN_TYPE__SEMICOLON;
token.location.start = start;
token.location.end = i - 1;
} else if (str_len == 1 && src[start] == '(' && hf__is_space_like(src[i])) {
i++;
bool got_end = false;
while (i < src_len) {
if (src[i] == ')' && hf__is_space_like(src[i - 1])) {
got_end = true;
break;
}
i++;
}
const size_t str_len = i - start;
if (hf__is_numeric(src[start]) || (src[start] == '-' && str_len > 1 &&
hf__is_numeric(src[start + 1]))) {
token.type = HF__TOKEN_TYPE__NUMBER;
token.location.start = start;
token.location.end = i - 1;
} else if (str_len == 1 && src[start] == ':') {
token.type = HF__TOKEN_TYPE__COLON;
token.location.start = start;
token.location.end = i - 1;
} else if (str_len == 1 && src[start] == ';') {
token.type = HF__TOKEN_TYPE__SEMICOLON;
token.location.start = start;
token.location.end = i - 1;
} else if (str_len == 1 && src[start] == '(' &&
hf__is_space_like(src[i])) {
if (got_end) {
token.type = HF__TOKEN_TYPE__PAREN_COMMENT;
token.location.start = start + 2;
token.location.end = i - 2;
i++;
bool got_end = false;
while (i < src_len) {
if (src[i] == ')' && hf__is_space_like(src[i - 1])) {
got_end = true;
break;
}
i++;
}
if (got_end) {
token.type = HF__TOKEN_TYPE__PAREN_COMMENT;
token.location.start = start + 2;
token.location.end = i - 2;
i++;
} else {
i = start + 1;
goto TOKEN_IS_WORD;
}
} else if (str_len == 1 && src[start] == '\\' &&
hf__is_space_like(src[i])) {
token.type = HF__TOKEN_TYPE__BACKSLASH_COMMENT;
start = ++i;
while (src[i] != '\n' && i < src_len) {
i++;
}
token.location.start = start;
token.location.end = i - 1;
} else if (str_len == 2 && strncmp(src + start, "--", 2) == 0 &&
(hf__is_space_like(src[i]) || src[i] == '\0')) {
token.type = HF__TOKEN_TYPE__DASH_COMMENT;
start = ++i;
while (src[i] != '\n' && i < src_len) {
i++;
}
token.location.start = start;
token.location.end = i - 1;
} else {
TOKEN_IS_WORD:
token.type = HF__TOKEN_TYPE__WORD;
token.location.start = start;
token.location.end = i - 1;
i = start + 1;
goto TOKEN_IS_WORD;
}
} else if (str_len == 1 && src[start] == '\\' &&
hf__is_space_like(src[i])) {
token.type = HF__TOKEN_TYPE__BACKSLASH_COMMENT;
start = ++i;
while (src[i] != '\n' && i < src_len) {
i++;
}
token.location.start = start;
token.location.end = i - 1;
} else if (str_len == 2 && strncmp(src + start, "--", 2) == 0 &&
(hf__is_space_like(src[i]) || src[i] == '\0')) {
token.type = HF__TOKEN_TYPE__DASH_COMMENT;
start = ++i;
while (src[i] != '\n' && i < src_len) {
i++;
}
token.location.start = start;
token.location.end = i - 1;
} else {
TOKEN_IS_WORD:
token.type = HF__TOKEN_TYPE__WORD;
token.location.start = start;
token.location.end = i - 1;
}
token_array_push(tokens, len, size, token);
token_array_push(tokens, len, cap, token);
}
}

View File

@ -21,12 +21,12 @@ void hf__parser__init_keyword_map(struct hf__hashmap *const map, size_t cap) {
}
void hf__parser__node_array_push(struct hf__node **arr, size_t *const len,
size_t *const size, struct hf__node item) {
if (*len > *size) {
size_t *const cap, struct hf__node item) {
if (*len > *cap) {
return;
} else if (*len == *size) {
*size += 1 + (*size / 2);
*arr = realloc(*arr, sizeof(struct hf__node) * (*size));
} else if (*len == *cap) {
*cap += 1 + (*cap / 2);
*arr = realloc(*arr, sizeof(struct hf__node) * (*cap));
}
(*arr)[*len] = item;

View File

@ -4,7 +4,7 @@
#include "hydroforth/hydroforth.h"
void hf__print_error(struct hf__error_wrapper *error) {
void hf__handle_error_light(struct hf__error_wrapper *error) {
SET_8_VALUE_COLOUR(TXT_RED);
printf("Error: %s", HF__ERROR_STR[error->error]);
@ -23,4 +23,9 @@ void hf__print_error(struct hf__error_wrapper *error) {
printf("[0x%x]", error->error);
SET_8_VALUE_COLOUR(TXT_DEFAULT);
if (HF__ERROR_PANIC[error->error]) {
putchar('\n');
exit(1);
}
}

View File

@ -107,7 +107,8 @@ struct hf__result shell(const struct arguments *const arguments) {
struct hf__interpreter interpreter = {
.call_stack = malloc(sizeof(struct hf__node) * 10),
.call_stack_len = 0,
.call_stack_size = 10,
.call_stack_cap = 10,
.call_stack_cap_max = HF__INTERPRETER__CALL_STACK_CAP_MAX,
.words =
{
@ -148,7 +149,7 @@ struct hf__result shell(const struct arguments *const arguments) {
free(tokens);
free(input);
if (!parse_res.ok) {
hf__print_error(&parse_res.error);
hf__handle_error_light(&parse_res.error);
putchar('\n');
continue;
}
@ -156,7 +157,7 @@ struct hf__result shell(const struct arguments *const arguments) {
for (size_t i = nodes_len - 1; i != 0 - 1; i--) {
hf__parser__node_array_push(&interpreter.call_stack,
&interpreter.call_stack_len,
&interpreter.call_stack_size, nodes[i]);
&interpreter.call_stack_cap, nodes[i]);
}
free(nodes);
@ -166,7 +167,7 @@ struct hf__result shell(const struct arguments *const arguments) {
while (interpreter.call_stack_len != 0 && interpreter.is_running) {
struct hf__result res = hf__interpreter__run(&interpreter);
if (!res.ok) {
hf__print_error(&res.error);
hf__handle_error_light(&res.error);
putchar('\n');
}
}
@ -229,7 +230,7 @@ int main(int argc, char *argv[]) {
}
if (!res.ok) {
hf__print_error(&res.error);
hf__handle_error_light(&res.error);
putchar('\n');
return 1;
}
@ -256,7 +257,8 @@ int main(int argc, char *argv[]) {
struct hf__interpreter interpreter = {
.call_stack = malloc(sizeof(struct hf__node) * 10),
.call_stack_len = 0,
.call_stack_size = 10,
.call_stack_cap = 10,
.call_stack_cap_max = HF__INTERPRETER__CALL_STACK_CAP_MAX,
.words =
{
@ -283,7 +285,7 @@ int main(int argc, char *argv[]) {
struct hf__result parse_res = hf__parse(&parser, src, tokens, tokens_len,
&nodes, &nodes_len, &nodes_size);
if (!parse_res.ok) {
hf__print_error(&parse_res.error);
hf__handle_error_light(&parse_res.error);
putchar('\n');
return 1;
}
@ -293,7 +295,7 @@ int main(int argc, char *argv[]) {
for (size_t i = nodes_len - 1; i != 0 - 1; i--) {
hf__parser__node_array_push(&interpreter.call_stack,
&interpreter.call_stack_len,
&interpreter.call_stack_size, nodes[i]);
&interpreter.call_stack_cap, nodes[i]);
}
free(nodes);
@ -301,7 +303,7 @@ int main(int argc, char *argv[]) {
while (interpreter.call_stack_len != 0 && interpreter.is_running) {
struct hf__result res = hf__interpreter__run(&interpreter);
if (!res.ok) {
hf__print_error(&res.error);
hf__handle_error_light(&res.error);
putchar('\n');
if (arguments.debug) {