Dominic Grimm
7490439223
All checks were successful
continuous-integration/drone/push Build is passing
390 lines
12 KiB
C
390 lines
12 KiB
C
#include "ansi_lib.h"
|
|
#include <stdbool.h>
|
|
#include <stdio.h>
|
|
#include <stdlib.h>
|
|
|
|
#include "hydroforth/hydroforth.h"
|
|
|
|
void free_word_def_value(void *x) {
|
|
struct hf__interpreter__word *word = x;
|
|
|
|
for (size_t i = 0; i < word->body_len; i++) {
|
|
// TODO: free nodes
|
|
}
|
|
|
|
free(word->body);
|
|
free(word);
|
|
}
|
|
|
|
const hf__hashmap__free_value_t hf__interpreter__word_free =
|
|
free_word_def_value;
|
|
|
|
void hf__interpreter__free(struct hf__interpreter *interpreter) {
|
|
free(interpreter->call_stack);
|
|
hf__hashmap__free(&interpreter->words, hf__interpreter__word_free);
|
|
free(interpreter->stack);
|
|
}
|
|
|
|
struct hf__result words__number(struct hf__interpreter *const interpreter,
|
|
const struct hf__node *const node) {
|
|
hf__interpreter__stack_push(&interpreter->stack, &interpreter->stack_len,
|
|
&interpreter->stack_size, node->value.number);
|
|
|
|
return HF__OK;
|
|
}
|
|
|
|
struct hf__result words__char(struct hf__interpreter *const interpreter,
|
|
const struct hf__node *const node) {
|
|
hf__interpreter__stack_push(&interpreter->stack, &interpreter->stack_len,
|
|
&interpreter->stack_size, node->value.ch);
|
|
|
|
return HF__OK;
|
|
}
|
|
|
|
struct hf__result words__word(struct hf__interpreter *const interpreter,
|
|
const struct hf__node *const node) {
|
|
// printf("word = \"%s\"\n", node->value.word.value);
|
|
|
|
const struct hf__interpreter__word *const *const word =
|
|
(const struct hf__interpreter__word *const *const)hf__hashmap__get(
|
|
&interpreter->words, node->value.word.hash);
|
|
if (word == NULL) {
|
|
char *msg = hf__quote(node->value.word.value, true);
|
|
free(node->value.word.value);
|
|
|
|
return HF__ERR_CUSTOM(HF__ERROR__INTERPRETER__UNKNOWN_WORD, msg, true);
|
|
}
|
|
|
|
for (size_t i = (*word)->body_len - 1; i != 0 - 1; i--) {
|
|
hf__parser__node_array_push(&interpreter->call_stack,
|
|
&interpreter->call_stack_len,
|
|
&interpreter->call_stack_cap, (*word)->body[i]);
|
|
interpreter->call_stack[interpreter->call_stack_len - 1].is_owner = false;
|
|
}
|
|
|
|
if (node->is_owner) {
|
|
free(node->value.word.value);
|
|
}
|
|
|
|
return HF__OK;
|
|
}
|
|
|
|
struct hf__result words__word_def(struct hf__interpreter *const interpreter,
|
|
const struct hf__node *const node) {
|
|
if (hf__hashmap__get(&interpreter->words, node->value.word_def->name.hash)) {
|
|
char *msg = hf__quote(node->value.word_def->name.value, true);
|
|
|
|
free(node->value.word_def->name.value);
|
|
free(node->value.word_def->body);
|
|
free(node->value.word_def);
|
|
|
|
return HF__ERR_CUSTOM(HF__ERROR__INTERPRETER__WORD_ALREADY_DEF, msg, true);
|
|
}
|
|
|
|
struct hf__interpreter__word *word =
|
|
malloc(sizeof(struct hf__interpreter__word));
|
|
(*word) = (struct hf__interpreter__word){
|
|
.body = node->value.word_def->body,
|
|
.body_len = node->value.word_def->body_len,
|
|
};
|
|
|
|
hf__hashmap__insert(&interpreter->words, node->value.word_def->name.hash,
|
|
word);
|
|
|
|
free(node->value.word_def->name.value);
|
|
free(node->value.word_def);
|
|
|
|
return HF__OK;
|
|
}
|
|
|
|
struct hf__result words__comment(struct hf__interpreter *const interpreter,
|
|
const struct hf__node *const node) {
|
|
// printf("comment = \"%s\"\n", node->value.comment);
|
|
|
|
free(node->value.comment);
|
|
|
|
return HF__OK;
|
|
}
|
|
|
|
struct hf__result words__dup(struct hf__interpreter *const interpreter,
|
|
const struct hf__node *const node) {
|
|
if (interpreter->stack_len < 1) {
|
|
return (struct hf__result){
|
|
.ok = false,
|
|
.error = HF__ERROR__INTERPRETER__STACK_UNDERFLOW,
|
|
};
|
|
}
|
|
|
|
const long top = interpreter->stack[interpreter->stack_len - 1];
|
|
hf__interpreter__stack_push(&interpreter->stack, &interpreter->stack_len,
|
|
&interpreter->stack_size, top);
|
|
|
|
return HF__OK;
|
|
}
|
|
|
|
struct hf__result words__drop(struct hf__interpreter *const interpreter,
|
|
const struct hf__node *const node) {
|
|
if (interpreter->stack_len < 1) {
|
|
return (struct hf__result){
|
|
.ok = false,
|
|
.error = HF__ERROR__INTERPRETER__STACK_UNDERFLOW,
|
|
};
|
|
}
|
|
|
|
interpreter->stack_len--;
|
|
|
|
return HF__OK;
|
|
}
|
|
|
|
struct hf__result words__swap(struct hf__interpreter *const interpreter,
|
|
const struct hf__node *const node) {
|
|
if (interpreter->stack_len < 2) {
|
|
return (struct hf__result){
|
|
.ok = false,
|
|
.error = HF__ERROR__INTERPRETER__STACK_UNDERFLOW,
|
|
};
|
|
}
|
|
|
|
const long tmp = interpreter->stack[interpreter->stack_len - 1];
|
|
interpreter->stack[interpreter->stack_len - 1] =
|
|
interpreter->stack[interpreter->stack_len - 2];
|
|
interpreter->stack[interpreter->stack_len - 2] = tmp;
|
|
|
|
return HF__OK;
|
|
}
|
|
|
|
struct hf__result words__over(struct hf__interpreter *const interpreter,
|
|
const struct hf__node *const node) {
|
|
if (interpreter->stack_len < 2) {
|
|
return (struct hf__result){
|
|
.ok = false,
|
|
.error = HF__ERROR__INTERPRETER__STACK_UNDERFLOW,
|
|
};
|
|
}
|
|
|
|
hf__interpreter__stack_push(&interpreter->stack, &interpreter->stack_len,
|
|
&interpreter->stack_size,
|
|
interpreter->stack[interpreter->stack_len - 2]);
|
|
|
|
return HF__OK;
|
|
}
|
|
|
|
struct hf__result words__rot(struct hf__interpreter *const interpreter,
|
|
const struct hf__node *const node) {
|
|
if (interpreter->stack_len < 1) {
|
|
return (struct hf__result){
|
|
.ok = false,
|
|
.error = HF__ERROR__INTERPRETER__STACK_UNDERFLOW,
|
|
};
|
|
}
|
|
|
|
interpreter->stack_len--;
|
|
|
|
return HF__OK;
|
|
}
|
|
|
|
struct hf__result words__add(struct hf__interpreter *const interpreter,
|
|
const struct hf__node *const node) {
|
|
if (interpreter->stack_len < 2) {
|
|
return (struct hf__result){
|
|
.ok = false,
|
|
.error = HF__ERROR__INTERPRETER__STACK_UNDERFLOW,
|
|
};
|
|
}
|
|
|
|
interpreter->stack[interpreter->stack_len - 2] +=
|
|
interpreter->stack[interpreter->stack_len - 1];
|
|
interpreter->stack_len--;
|
|
|
|
return HF__OK;
|
|
}
|
|
|
|
struct hf__result words__sub(struct hf__interpreter *const interpreter,
|
|
const struct hf__node *const node) {
|
|
if (interpreter->stack_len < 2) {
|
|
return (struct hf__result){
|
|
.ok = false,
|
|
.error = HF__ERROR__INTERPRETER__STACK_UNDERFLOW,
|
|
};
|
|
}
|
|
|
|
interpreter->stack[interpreter->stack_len - 2] -=
|
|
interpreter->stack[interpreter->stack_len - 1];
|
|
interpreter->stack_len--;
|
|
|
|
return HF__OK;
|
|
}
|
|
|
|
struct hf__result words__dot(struct hf__interpreter *const interpreter,
|
|
const struct hf__node *const node) {
|
|
if (interpreter->stack_len < 1) {
|
|
return HF__ERR(HF__ERROR__INTERPRETER__STACK_UNDERFLOW);
|
|
}
|
|
|
|
printf("%li", interpreter->stack[--interpreter->stack_len]);
|
|
|
|
return HF__OK;
|
|
}
|
|
|
|
struct hf__result words__emit(struct hf__interpreter *const interpreter,
|
|
const struct hf__node *const node) {
|
|
if (interpreter->stack_len < 1) {
|
|
return HF__ERR(HF__ERROR__INTERPRETER__STACK_UNDERFLOW);
|
|
}
|
|
|
|
putchar(interpreter->stack[--interpreter->stack_len]);
|
|
|
|
return HF__OK;
|
|
}
|
|
|
|
struct hf__result words__space(struct hf__interpreter *const interpreter,
|
|
const struct hf__node *const node) {
|
|
putchar(' ');
|
|
|
|
return HF__OK;
|
|
}
|
|
|
|
struct hf__result words__spaces(struct hf__interpreter *const interpreter,
|
|
const struct hf__node *const node) {
|
|
if (interpreter->stack_len < 1) {
|
|
return HF__ERR(HF__ERROR__INTERPRETER__STACK_UNDERFLOW);
|
|
}
|
|
|
|
const unsigned long max = interpreter->stack[--interpreter->stack_len];
|
|
for (unsigned long i = 0; i < max; i++) {
|
|
putchar(' ');
|
|
}
|
|
|
|
return HF__OK;
|
|
}
|
|
|
|
struct hf__result words__cr(struct hf__interpreter *const interpreter,
|
|
const struct hf__node *const node) {
|
|
putchar('\n');
|
|
|
|
return HF__OK;
|
|
}
|
|
|
|
struct hf__result words__crs(struct hf__interpreter *const interpreter,
|
|
const struct hf__node *const node) {
|
|
if (interpreter->stack_len < 1) {
|
|
return HF__ERR(HF__ERROR__INTERPRETER__STACK_UNDERFLOW);
|
|
}
|
|
|
|
const unsigned long max = interpreter->stack[--interpreter->stack_len];
|
|
for (unsigned long i = 0; i < max; i++) {
|
|
putchar('\n');
|
|
}
|
|
|
|
return HF__OK;
|
|
}
|
|
|
|
struct hf__result words__debug(struct hf__interpreter *const interpreter,
|
|
const struct hf__node *const node) {
|
|
SET_8_VALUE_COLOUR(TXT_CYAN);
|
|
puts("\n===\nDEBUG:");
|
|
for (size_t i = interpreter->stack_len - 1; i != 0 - 1; i--) {
|
|
printf("%lu : %li\n", i, interpreter->stack[i]);
|
|
}
|
|
puts("===");
|
|
SET_8_VALUE_COLOUR(TXT_DEFAULT);
|
|
|
|
return HF__OK;
|
|
}
|
|
|
|
struct hf__result words__exit(struct hf__interpreter *const interpreter,
|
|
const struct hf__node *const node) {
|
|
if (interpreter->stack_len < 1) {
|
|
return HF__ERR(HF__ERROR__INTERPRETER__STACK_UNDERFLOW);
|
|
}
|
|
|
|
interpreter->is_running = false;
|
|
interpreter->exit_code = interpreter->stack[--interpreter->stack_len];
|
|
|
|
return HF__OK;
|
|
}
|
|
|
|
struct hf__result words__abort(struct hf__interpreter *const interpreter,
|
|
const struct hf__node *const node) {
|
|
abort();
|
|
|
|
return HF__OK;
|
|
}
|
|
|
|
const hf__interpreter__word_func_t
|
|
HF__INTERPRETER__WORD_FUNCTION[__HF__NODE_TYPE__N] = {
|
|
[HF__NODE_TYPE__NUMBER] = words__number,
|
|
[HF__NODE_TYPE__CHAR] = words__char,
|
|
[HF__NODE_TYPE__WORD] = words__word,
|
|
[HF__NODE_TYPE__WORD_DEF] = words__word_def,
|
|
|
|
[HF__NODE_TYPE__DASH_COMMENT] = words__comment,
|
|
[HF__NODE_TYPE__PAREN_COMMENT] = words__comment,
|
|
|
|
[HF__NODE_TYPE__DUP] = words__dup,
|
|
[HF__NODE_TYPE__DROP] = words__drop,
|
|
[HF__NODE_TYPE__SWAP] = words__swap,
|
|
[HF__NODE_TYPE__OVER] = words__over,
|
|
[HF__NODE_TYPE__ROT] = words__rot,
|
|
|
|
[HF__NODE_TYPE__ADD] = words__add,
|
|
[HF__NODE_TYPE__SUB] = words__sub,
|
|
|
|
[HF__NODE_TYPE__DOT] = words__dot,
|
|
[HF__NODE_TYPE__EMIT] = words__emit,
|
|
[HF__NODE_TYPE__SPACE] = words__space,
|
|
[HF__NODE_TYPE__SPACES] = words__spaces,
|
|
[HF__NODE_TYPE__CR] = words__cr,
|
|
[HF__NODE_TYPE__CRS] = words__crs,
|
|
[HF__NODE_TYPE__DEBUG] = words__debug,
|
|
|
|
[HF__NODE_TYPE__ABORT] = words__abort,
|
|
[HF__NODE_TYPE__EXIT] = words__exit,
|
|
};
|
|
|
|
void hf__interpreter__stack_push(long **arr, size_t *const len,
|
|
size_t *const cap, long item) {
|
|
if (*len > *cap) {
|
|
return;
|
|
} else if (*len == *cap) {
|
|
*cap += 1 + (*cap / 2);
|
|
*arr = realloc(*arr, sizeof(long) * (*cap));
|
|
}
|
|
|
|
(*arr)[*len] = item;
|
|
(*len)++;
|
|
}
|
|
|
|
struct hf__result
|
|
hf__interpreter__run(struct hf__interpreter *const interpreter) {
|
|
if (interpreter->call_stack_len == 0) {
|
|
return HF__OK;
|
|
}
|
|
|
|
const struct hf__node *const top =
|
|
interpreter->call_stack + --interpreter->call_stack_len;
|
|
const hf__interpreter__word_func_t func =
|
|
HF__INTERPRETER__WORD_FUNCTION[top->type];
|
|
|
|
if (func) {
|
|
struct hf__result res = func(interpreter, top);
|
|
|
|
if (res.ok && interpreter->call_stack_cap_max != 0 &&
|
|
interpreter->call_stack_cap >= interpreter->call_stack_cap_max) {
|
|
const size_t required_size =
|
|
snprintf(NULL, 0, "%lu >= %lu", interpreter->call_stack_cap,
|
|
interpreter->call_stack_cap_max);
|
|
char *msg = malloc(sizeof(char) * (required_size + 1));
|
|
snprintf(msg, required_size + 1, "%lu >= %lu",
|
|
interpreter->call_stack_cap, interpreter->call_stack_cap_max);
|
|
|
|
return HF__ERR_CUSTOM(HF__ERROR__INTERPRETER__CALL_STACK_TOO_BIG, msg,
|
|
true);
|
|
} else {
|
|
return res;
|
|
}
|
|
} else {
|
|
return HF__OK;
|
|
}
|
|
}
|