Add error call stack if call stack exceeds maximum capacity
All checks were successful
continuous-integration/drone/push Build is passing

This commit is contained in:
Dominic Grimm 2023-08-05 20:19:52 +02:00
parent 411665061d
commit 7490439223
9 changed files with 138 additions and 102 deletions

View file

@ -17,10 +17,13 @@ struct hf__interpreter__word {
extern const hf__hashmap__free_value_t hf__interpreter__word_free; extern const hf__hashmap__free_value_t hf__interpreter__word_free;
#define HF__INTERPRETER__CALL_STACK_CAP_MAX 1024
struct hf__interpreter { struct hf__interpreter {
struct hf__node *call_stack; struct hf__node *call_stack;
size_t call_stack_len; size_t call_stack_len;
size_t call_stack_size; size_t call_stack_cap;
size_t call_stack_cap_max;
struct hf__hashmap words; // struct hf__interpreter__word * struct hf__hashmap words; // struct hf__interpreter__word *
@ -41,7 +44,7 @@ extern const hf__interpreter__word_func_t
HF__INTERPRETER__WORD_FUNCTION[__HF__NODE_TYPE__N]; HF__INTERPRETER__WORD_FUNCTION[__HF__NODE_TYPE__N];
extern void hf__interpreter__stack_push(long **arr, size_t *const len, extern void hf__interpreter__stack_push(long **arr, size_t *const len,
size_t *const size, long item); size_t *const cap, long item);
extern struct hf__result extern struct hf__result
hf__interpreter__run(struct hf__interpreter *const interpreter); hf__interpreter__run(struct hf__interpreter *const interpreter);

View file

@ -24,6 +24,6 @@ struct hf__token {
extern void hf__lex(const char *const src, const size_t src_len, extern void hf__lex(const char *const src, const size_t src_len,
struct hf__token **tokens, size_t *const len, struct hf__token **tokens, size_t *const len,
size_t *const size); size_t *const cap);
#endif #endif

View file

@ -74,12 +74,12 @@ extern void hf__parser__init_keyword_map(struct hf__hashmap *const map,
size_t cap); size_t cap);
extern void hf__parser__node_array_push(struct hf__node **arr, extern void hf__parser__node_array_push(struct hf__node **arr,
size_t *const len, size_t *const size, size_t *const len, size_t *const cap,
struct hf__node item); struct hf__node item);
extern struct hf__result extern struct hf__result
hf__parse(struct hf__parser *const parser, const char *const src, hf__parse(struct hf__parser *const parser, const char *const src,
const struct hf__token *const tokens, const size_t tokens_len, const struct hf__token *const tokens, const size_t tokens_len,
struct hf__node **nodes, size_t *const len, size_t *const size); struct hf__node **nodes, size_t *const len, size_t *const cap);
#endif #endif

View file

@ -9,6 +9,7 @@ enum hf__error {
HF__ERROR__PARSER__WORD_DEF_INVALID_NAME, HF__ERROR__PARSER__WORD_DEF_INVALID_NAME,
HF__ERROR__PARSER__WORD_DEF_IS_KEYWORD, HF__ERROR__PARSER__WORD_DEF_IS_KEYWORD,
HF__ERROR__INTERPRETER__CALL_STACK_TOO_BIG,
HF__ERROR__INTERPRETER__UNKNOWN_WORD, HF__ERROR__INTERPRETER__UNKNOWN_WORD,
HF__ERROR__INTERPRETER__WORD_ALREADY_DEF, HF__ERROR__INTERPRETER__WORD_ALREADY_DEF,
HF__ERROR__INTERPRETER__STACK_UNDERFLOW, HF__ERROR__INTERPRETER__STACK_UNDERFLOW,
@ -31,12 +32,27 @@ static const char *const HF__ERROR_STR[__HF__ERROR__N] = {
"Invalid token type for word name", "Invalid token type for word name",
[HF__ERROR__PARSER__WORD_DEF_IS_KEYWORD] = "Word name is already a keyword", [HF__ERROR__PARSER__WORD_DEF_IS_KEYWORD] = "Word name is already a keyword",
[HF__ERROR__INTERPRETER__CALL_STACK_TOO_BIG] = "Call stack is too big",
[HF__ERROR__INTERPRETER__UNKNOWN_WORD] = "Unknown word", [HF__ERROR__INTERPRETER__UNKNOWN_WORD] = "Unknown word",
[HF__ERROR__INTERPRETER__WORD_ALREADY_DEF] = "Word is already defined", [HF__ERROR__INTERPRETER__WORD_ALREADY_DEF] = "Word is already defined",
[HF__ERROR__INTERPRETER__STACK_UNDERFLOW] = "Stack underflow", [HF__ERROR__INTERPRETER__STACK_UNDERFLOW] = "Stack underflow",
}; };
extern void hf__print_error(struct hf__error_wrapper *error); static const bool HF__ERROR_PANIC[__HF__ERROR__N] = {
[HF__ERROR__PARSER__UNEXPECTED] = false,
[HF__ERROR__PARSER__INVALID_NUMBER] = false,
[HF__ERROR__PARSER__INVALID_CHAR] = false,
[HF__ERROR__PARSER__WORD_DEF_INCOMPLETE] = false,
[HF__ERROR__PARSER__WORD_DEF_INVALID_NAME] = false,
[HF__ERROR__PARSER__WORD_DEF_IS_KEYWORD] = false,
[HF__ERROR__INTERPRETER__CALL_STACK_TOO_BIG] = true,
[HF__ERROR__INTERPRETER__UNKNOWN_WORD] = false,
[HF__ERROR__INTERPRETER__WORD_ALREADY_DEF] = false,
[HF__ERROR__INTERPRETER__STACK_UNDERFLOW] = false,
};
extern void hf__handle_error_light(struct hf__error_wrapper *error);
struct hf__result { struct hf__result {
bool ok; bool ok;

View file

@ -56,9 +56,9 @@ struct hf__result words__word(struct hf__interpreter *const interpreter,
} }
for (size_t i = (*word)->body_len - 1; i != 0 - 1; i--) { for (size_t i = (*word)->body_len - 1; i != 0 - 1; i--) {
hf__parser__node_array_push( hf__parser__node_array_push(&interpreter->call_stack,
&interpreter->call_stack, &interpreter->call_stack_len, &interpreter->call_stack_len,
&interpreter->call_stack_size, (*word)->body[i]); &interpreter->call_stack_cap, (*word)->body[i]);
interpreter->call_stack[interpreter->call_stack_len - 1].is_owner = false; interpreter->call_stack[interpreter->call_stack_len - 1].is_owner = false;
} }
@ -343,12 +343,12 @@ const hf__interpreter__word_func_t
}; };
void hf__interpreter__stack_push(long **arr, size_t *const len, void hf__interpreter__stack_push(long **arr, size_t *const len,
size_t *const size, long item) { size_t *const cap, long item) {
if (*len > *size) { if (*len > *cap) {
return; return;
} else if (*len == *size) { } else if (*len == *cap) {
*size += 1 + (*size / 2); *cap += 1 + (*cap / 2);
*arr = realloc(*arr, sizeof(long) * (*size)); *arr = realloc(*arr, sizeof(long) * (*cap));
} }
(*arr)[*len] = item; (*arr)[*len] = item;
@ -363,15 +363,27 @@ hf__interpreter__run(struct hf__interpreter *const interpreter) {
const struct hf__node *const top = const struct hf__node *const top =
interpreter->call_stack + --interpreter->call_stack_len; interpreter->call_stack + --interpreter->call_stack_len;
SET_8_VALUE_COLOUR(TXT_RED);
printf("--- type = %u ---\n", top->type);
if (top->type == HF__NODE_TYPE__WORD) {
printf("--- word name = \"%s\"\n", top->value.word.value);
}
SET_8_VALUE_COLOUR(TXT_DEFAULT);
const hf__interpreter__word_func_t func = const hf__interpreter__word_func_t func =
HF__INTERPRETER__WORD_FUNCTION[top->type]; HF__INTERPRETER__WORD_FUNCTION[top->type];
return func ? func(interpreter, top) : HF__OK; if (func) {
struct hf__result res = func(interpreter, top);
if (res.ok && interpreter->call_stack_cap_max != 0 &&
interpreter->call_stack_cap >= interpreter->call_stack_cap_max) {
const size_t required_size =
snprintf(NULL, 0, "%lu >= %lu", interpreter->call_stack_cap,
interpreter->call_stack_cap_max);
char *msg = malloc(sizeof(char) * (required_size + 1));
snprintf(msg, required_size + 1, "%lu >= %lu",
interpreter->call_stack_cap, interpreter->call_stack_cap_max);
return HF__ERR_CUSTOM(HF__ERROR__INTERPRETER__CALL_STACK_TOO_BIG, msg,
true);
} else {
return res;
}
} else {
return HF__OK;
}
} }

View file

@ -17,7 +17,7 @@ void token_array_push(struct hf__token **arr, size_t *const len,
} }
void hf__lex(const char *const src, const size_t src_len, void hf__lex(const char *const src, const size_t src_len,
struct hf__token **tokens, size_t *const len, size_t *const size) { struct hf__token **tokens, size_t *const len, size_t *const cap) {
size_t i = 0; size_t i = 0;
while (i < src_len) { while (i < src_len) {
if (hf__is_space_like(src[i]) || src[i] == '\n') { if (hf__is_space_like(src[i]) || src[i] == '\n') {
@ -28,7 +28,12 @@ void hf__lex(const char *const src, const size_t src_len,
size_t start = i; size_t start = i;
struct hf__token token; struct hf__token token;
if (src[i] == '\'') { while (!hf__is_space_like(src[i]) && src[i] != '\n' && i < src_len) {
i++;
}
const size_t str_len = i - start;
if (str_len == 1 && src[start] == '\'') {
const size_t char_start = start; const size_t char_start = start;
i++; i++;
start = i; start = i;
@ -45,76 +50,69 @@ void hf__lex(const char *const src, const size_t src_len,
token.location.end = i - 1; token.location.end = i - 1;
i++; i++;
} else { } else if (hf__is_numeric(src[start]) ||
while (!hf__is_space_like(src[i]) && src[i] != '\n' && i < src_len) { (src[start] == '-' && str_len > 1 &&
hf__is_numeric(src[start + 1]))) {
token.type = HF__TOKEN_TYPE__NUMBER;
token.location.start = start;
token.location.end = i - 1;
} else if (str_len == 1 && src[start] == ':') {
token.type = HF__TOKEN_TYPE__COLON;
token.location.start = start;
token.location.end = i - 1;
} else if (str_len == 1 && src[start] == ';') {
token.type = HF__TOKEN_TYPE__SEMICOLON;
token.location.start = start;
token.location.end = i - 1;
} else if (str_len == 1 && src[start] == '(' && hf__is_space_like(src[i])) {
i++;
bool got_end = false;
while (i < src_len) {
if (src[i] == ')' && hf__is_space_like(src[i - 1])) {
got_end = true;
break;
}
i++; i++;
} }
const size_t str_len = i - start;
if (hf__is_numeric(src[start]) || (src[start] == '-' && str_len > 1 && if (got_end) {
hf__is_numeric(src[start + 1]))) { token.type = HF__TOKEN_TYPE__PAREN_COMMENT;
token.type = HF__TOKEN_TYPE__NUMBER; token.location.start = start + 2;
token.location.start = start; token.location.end = i - 2;
token.location.end = i - 1;
} else if (str_len == 1 && src[start] == ':') {
token.type = HF__TOKEN_TYPE__COLON;
token.location.start = start;
token.location.end = i - 1;
} else if (str_len == 1 && src[start] == ';') {
token.type = HF__TOKEN_TYPE__SEMICOLON;
token.location.start = start;
token.location.end = i - 1;
} else if (str_len == 1 && src[start] == '(' &&
hf__is_space_like(src[i])) {
i++; i++;
bool got_end = false;
while (i < src_len) {
if (src[i] == ')' && hf__is_space_like(src[i - 1])) {
got_end = true;
break;
}
i++;
}
if (got_end) {
token.type = HF__TOKEN_TYPE__PAREN_COMMENT;
token.location.start = start + 2;
token.location.end = i - 2;
i++;
} else {
i = start + 1;
goto TOKEN_IS_WORD;
}
} else if (str_len == 1 && src[start] == '\\' &&
hf__is_space_like(src[i])) {
token.type = HF__TOKEN_TYPE__BACKSLASH_COMMENT;
start = ++i;
while (src[i] != '\n' && i < src_len) {
i++;
}
token.location.start = start;
token.location.end = i - 1;
} else if (str_len == 2 && strncmp(src + start, "--", 2) == 0 &&
(hf__is_space_like(src[i]) || src[i] == '\0')) {
token.type = HF__TOKEN_TYPE__DASH_COMMENT;
start = ++i;
while (src[i] != '\n' && i < src_len) {
i++;
}
token.location.start = start;
token.location.end = i - 1;
} else { } else {
TOKEN_IS_WORD: i = start + 1;
token.type = HF__TOKEN_TYPE__WORD; goto TOKEN_IS_WORD;
token.location.start = start;
token.location.end = i - 1;
} }
} else if (str_len == 1 && src[start] == '\\' &&
hf__is_space_like(src[i])) {
token.type = HF__TOKEN_TYPE__BACKSLASH_COMMENT;
start = ++i;
while (src[i] != '\n' && i < src_len) {
i++;
}
token.location.start = start;
token.location.end = i - 1;
} else if (str_len == 2 && strncmp(src + start, "--", 2) == 0 &&
(hf__is_space_like(src[i]) || src[i] == '\0')) {
token.type = HF__TOKEN_TYPE__DASH_COMMENT;
start = ++i;
while (src[i] != '\n' && i < src_len) {
i++;
}
token.location.start = start;
token.location.end = i - 1;
} else {
TOKEN_IS_WORD:
token.type = HF__TOKEN_TYPE__WORD;
token.location.start = start;
token.location.end = i - 1;
} }
token_array_push(tokens, len, size, token); token_array_push(tokens, len, cap, token);
} }
} }

View file

@ -21,12 +21,12 @@ void hf__parser__init_keyword_map(struct hf__hashmap *const map, size_t cap) {
} }
void hf__parser__node_array_push(struct hf__node **arr, size_t *const len, void hf__parser__node_array_push(struct hf__node **arr, size_t *const len,
size_t *const size, struct hf__node item) { size_t *const cap, struct hf__node item) {
if (*len > *size) { if (*len > *cap) {
return; return;
} else if (*len == *size) { } else if (*len == *cap) {
*size += 1 + (*size / 2); *cap += 1 + (*cap / 2);
*arr = realloc(*arr, sizeof(struct hf__node) * (*size)); *arr = realloc(*arr, sizeof(struct hf__node) * (*cap));
} }
(*arr)[*len] = item; (*arr)[*len] = item;

View file

@ -4,7 +4,7 @@
#include "hydroforth/hydroforth.h" #include "hydroforth/hydroforth.h"
void hf__print_error(struct hf__error_wrapper *error) { void hf__handle_error_light(struct hf__error_wrapper *error) {
SET_8_VALUE_COLOUR(TXT_RED); SET_8_VALUE_COLOUR(TXT_RED);
printf("Error: %s", HF__ERROR_STR[error->error]); printf("Error: %s", HF__ERROR_STR[error->error]);
@ -23,4 +23,9 @@ void hf__print_error(struct hf__error_wrapper *error) {
printf("[0x%x]", error->error); printf("[0x%x]", error->error);
SET_8_VALUE_COLOUR(TXT_DEFAULT); SET_8_VALUE_COLOUR(TXT_DEFAULT);
if (HF__ERROR_PANIC[error->error]) {
putchar('\n');
exit(1);
}
} }

View file

@ -107,7 +107,8 @@ struct hf__result shell(const struct arguments *const arguments) {
struct hf__interpreter interpreter = { struct hf__interpreter interpreter = {
.call_stack = malloc(sizeof(struct hf__node) * 10), .call_stack = malloc(sizeof(struct hf__node) * 10),
.call_stack_len = 0, .call_stack_len = 0,
.call_stack_size = 10, .call_stack_cap = 10,
.call_stack_cap_max = HF__INTERPRETER__CALL_STACK_CAP_MAX,
.words = .words =
{ {
@ -148,7 +149,7 @@ struct hf__result shell(const struct arguments *const arguments) {
free(tokens); free(tokens);
free(input); free(input);
if (!parse_res.ok) { if (!parse_res.ok) {
hf__print_error(&parse_res.error); hf__handle_error_light(&parse_res.error);
putchar('\n'); putchar('\n');
continue; continue;
} }
@ -156,7 +157,7 @@ struct hf__result shell(const struct arguments *const arguments) {
for (size_t i = nodes_len - 1; i != 0 - 1; i--) { for (size_t i = nodes_len - 1; i != 0 - 1; i--) {
hf__parser__node_array_push(&interpreter.call_stack, hf__parser__node_array_push(&interpreter.call_stack,
&interpreter.call_stack_len, &interpreter.call_stack_len,
&interpreter.call_stack_size, nodes[i]); &interpreter.call_stack_cap, nodes[i]);
} }
free(nodes); free(nodes);
@ -166,7 +167,7 @@ struct hf__result shell(const struct arguments *const arguments) {
while (interpreter.call_stack_len != 0 && interpreter.is_running) { while (interpreter.call_stack_len != 0 && interpreter.is_running) {
struct hf__result res = hf__interpreter__run(&interpreter); struct hf__result res = hf__interpreter__run(&interpreter);
if (!res.ok) { if (!res.ok) {
hf__print_error(&res.error); hf__handle_error_light(&res.error);
putchar('\n'); putchar('\n');
} }
} }
@ -229,7 +230,7 @@ int main(int argc, char *argv[]) {
} }
if (!res.ok) { if (!res.ok) {
hf__print_error(&res.error); hf__handle_error_light(&res.error);
putchar('\n'); putchar('\n');
return 1; return 1;
} }
@ -256,7 +257,8 @@ int main(int argc, char *argv[]) {
struct hf__interpreter interpreter = { struct hf__interpreter interpreter = {
.call_stack = malloc(sizeof(struct hf__node) * 10), .call_stack = malloc(sizeof(struct hf__node) * 10),
.call_stack_len = 0, .call_stack_len = 0,
.call_stack_size = 10, .call_stack_cap = 10,
.call_stack_cap_max = HF__INTERPRETER__CALL_STACK_CAP_MAX,
.words = .words =
{ {
@ -283,7 +285,7 @@ int main(int argc, char *argv[]) {
struct hf__result parse_res = hf__parse(&parser, src, tokens, tokens_len, struct hf__result parse_res = hf__parse(&parser, src, tokens, tokens_len,
&nodes, &nodes_len, &nodes_size); &nodes, &nodes_len, &nodes_size);
if (!parse_res.ok) { if (!parse_res.ok) {
hf__print_error(&parse_res.error); hf__handle_error_light(&parse_res.error);
putchar('\n'); putchar('\n');
return 1; return 1;
} }
@ -293,7 +295,7 @@ int main(int argc, char *argv[]) {
for (size_t i = nodes_len - 1; i != 0 - 1; i--) { for (size_t i = nodes_len - 1; i != 0 - 1; i--) {
hf__parser__node_array_push(&interpreter.call_stack, hf__parser__node_array_push(&interpreter.call_stack,
&interpreter.call_stack_len, &interpreter.call_stack_len,
&interpreter.call_stack_size, nodes[i]); &interpreter.call_stack_cap, nodes[i]);
} }
free(nodes); free(nodes);
@ -301,7 +303,7 @@ int main(int argc, char *argv[]) {
while (interpreter.call_stack_len != 0 && interpreter.is_running) { while (interpreter.call_stack_len != 0 && interpreter.is_running) {
struct hf__result res = hf__interpreter__run(&interpreter); struct hf__result res = hf__interpreter__run(&interpreter);
if (!res.ok) { if (!res.ok) {
hf__print_error(&res.error); hf__handle_error_light(&res.error);
putchar('\n'); putchar('\n');
if (arguments.debug) { if (arguments.debug) {