Complete rewrite
This commit is contained in:
parent
115dc530e1
commit
cd77044bf5
24 changed files with 4650 additions and 836 deletions
24
src/hydroforth/hash.c
Normal file
24
src/hydroforth/hash.c
Normal file
|
@ -0,0 +1,24 @@
|
|||
#include "hydroforth/hydroforth.h"
|
||||
|
||||
// based on http://www.cse.yorku.ca/~oz/hash.html
|
||||
hf__hash_t hf__hash_mem(const unsigned char *start,
|
||||
const unsigned char *const end) {
|
||||
hf__hash_t hash = 5381;
|
||||
int c;
|
||||
|
||||
while ((c = *start++) && start <= (end + 1)) {
|
||||
hash = ((hash << 5) + hash) + c;
|
||||
}
|
||||
|
||||
return hash;
|
||||
}
|
||||
|
||||
hf__hash_t hf__hash_str(const char *str) {
|
||||
hf__hash_t hash = 5381;
|
||||
int c;
|
||||
|
||||
while (c = *str++)
|
||||
hash = ((hash << 5) + hash) + c; /* hash * 33 + c */
|
||||
|
||||
return hash;
|
||||
}
|
66
src/hydroforth/hashmap.c
Normal file
66
src/hydroforth/hashmap.c
Normal file
|
@ -0,0 +1,66 @@
|
|||
#include <stdlib.h>
|
||||
|
||||
#include "hydroforth/hydroforth.h"
|
||||
|
||||
static inline size_t hashvalue(const hf__hash_t hash, const size_t size) {
|
||||
return hash % size;
|
||||
}
|
||||
|
||||
void hf__hashmap__insert(struct hf__hashmap *const hashmap,
|
||||
const hf__hash_t hash, void *value) {
|
||||
struct hf__hashmap__node **const node =
|
||||
hashmap->arr + hashvalue(hash, hashmap->cap);
|
||||
|
||||
if (*node == NULL) {
|
||||
*node = malloc(sizeof(struct hf__hashmap__node));
|
||||
(**node) = (struct hf__hashmap__node){
|
||||
.hash = hash,
|
||||
.value = value,
|
||||
.next = NULL,
|
||||
};
|
||||
} else {
|
||||
struct hf__hashmap__node *new_node = *node;
|
||||
while (new_node->next) {
|
||||
new_node = new_node->next;
|
||||
}
|
||||
|
||||
new_node->next = malloc(sizeof(struct hf__hashmap__node));
|
||||
*new_node->next = (struct hf__hashmap__node){
|
||||
.hash = hash,
|
||||
.value = value,
|
||||
.next = NULL,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
void **hf__hashmap__get(const struct hf__hashmap *const hashmap,
|
||||
const hf__hash_t hash) {
|
||||
struct hf__hashmap__node *node = hashmap->arr[hashvalue(hash, hashmap->cap)];
|
||||
if (node == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
while (node->hash != hash) {
|
||||
node = node->next;
|
||||
if (node == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
}
|
||||
|
||||
return &node->value;
|
||||
}
|
||||
|
||||
void hf__hashmap__free(struct hf__hashmap *const hashmap,
|
||||
const hf__hashmap__free_value_t free_value) {
|
||||
for (size_t i = 0; i < hashmap->cap; i++) {
|
||||
struct hf__hashmap__node *node = hashmap->arr[i];
|
||||
while (node != NULL) {
|
||||
if (free_value) {
|
||||
free_value(node->value);
|
||||
}
|
||||
struct hf__hashmap__node *next = node->next;
|
||||
free(node);
|
||||
node = next;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,391 +1,37 @@
|
|||
#include <stdbool.h>
|
||||
#include "ansi_lib.h"
|
||||
#include <execinfo.h>
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
#include <malloc.h>
|
||||
|
||||
#include "hydroforth/hydroforth.h"
|
||||
|
||||
unsigned hydroforth__hash_string(const char *const key, unsigned char len)
|
||||
{
|
||||
unsigned char i = 0;
|
||||
unsigned hash = 0;
|
||||
while (i < len)
|
||||
{
|
||||
hash += key[i++];
|
||||
hash += hash << 10;
|
||||
hash ^= hash >> 6;
|
||||
}
|
||||
hash += hash << 3;
|
||||
hash ^= hash >> 11;
|
||||
hash += hash << 15;
|
||||
bool hf__is_space_like(const char c) { return c == ' ' || c == '\t'; }
|
||||
|
||||
return hash;
|
||||
bool hf__is_numeric(const char c) { return c >= '0' && c <= '9'; }
|
||||
|
||||
char *hf__quote(const char *const str, const bool double_quote) {
|
||||
const char *const q = double_quote ? "\"" : "'";
|
||||
const size_t required_size = snprintf(NULL, 0, "%s%s%s", q, str, q);
|
||||
char *s = malloc(sizeof(char) * (required_size + 1));
|
||||
snprintf(s, required_size + 1, "%s%s%s", q, str, q);
|
||||
|
||||
return s;
|
||||
}
|
||||
|
||||
inline bool hydroforth__is_space(char c)
|
||||
{
|
||||
return c == ' ' || c == '\t';
|
||||
}
|
||||
|
||||
HYDROFORTH__SCAN_NEXT_WORD_RESULT hydroforth__scan_next_word(HYDROFORTH__INTERPRETER *interpreter)
|
||||
{
|
||||
const unsigned long start = interpreter->pos;
|
||||
unsigned char len = 0;
|
||||
while (!hydroforth__is_space(interpreter->src[interpreter->pos]) && interpreter->src[interpreter->pos] != '\n' && interpreter->src[interpreter->pos])
|
||||
{
|
||||
len++;
|
||||
interpreter->pos++;
|
||||
}
|
||||
|
||||
return (HYDROFORTH__SCAN_NEXT_WORD_RESULT){
|
||||
.start = start,
|
||||
.len = len,
|
||||
};
|
||||
}
|
||||
|
||||
void hydroforth__add_word_to_word_definition(HYDROFORTH__WORD_DEFINITION *word_def, HYDROFORTH__WORD word)
|
||||
{
|
||||
word_def->words = realloc(word_def->words, sizeof(HYDROFORTH__WORD) * (word_def->words_len + 1));
|
||||
word_def->words[word_def->words_len++] = word;
|
||||
}
|
||||
|
||||
void hydroforth__parse(HYDROFORTH__RESULT *result, HYDROFORTH__INTERPRETER *interpreter, HYDROFORTH__WORD_DEFINITION *word_def)
|
||||
{
|
||||
const HYDROFORTH__SCAN_NEXT_WORD_RESULT res = hydroforth__scan_next_word(interpreter);
|
||||
if (res.len == 1)
|
||||
{
|
||||
if (hydroforth__number__is_digit(interpreter->src[res.start]))
|
||||
{
|
||||
int n = interpreter->src[res.start] - '0';
|
||||
if (word_def == NULL)
|
||||
{
|
||||
interpreter->call_stack[interpreter->call_stack_len++] = (HYDROFORTH__WORD){
|
||||
.type = PUSH,
|
||||
.data = {.number = n},
|
||||
};
|
||||
}
|
||||
else
|
||||
{
|
||||
hydroforth__add_word_to_word_definition(word_def, (HYDROFORTH__WORD){
|
||||
.type = PUSH,
|
||||
.data = {.number = n},
|
||||
});
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
switch (interpreter->src[res.start])
|
||||
{
|
||||
case '\\':
|
||||
do
|
||||
{
|
||||
interpreter->pos++;
|
||||
} while (interpreter->src[interpreter->pos] != '\n' && interpreter->src[interpreter->pos]);
|
||||
break;
|
||||
|
||||
case '(':
|
||||
while (true)
|
||||
{
|
||||
interpreter->pos++;
|
||||
if (interpreter->src[interpreter->pos] == ')' &&
|
||||
(hydroforth__is_space(interpreter->src[interpreter->pos + 1]) ||
|
||||
interpreter->src[interpreter->pos + 1] == '\n' ||
|
||||
interpreter->src[interpreter->pos + 1] == '\0') ||
|
||||
interpreter->src[interpreter->pos] == '\n' ||
|
||||
interpreter->src[interpreter->pos] == '\0')
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
case ':':
|
||||
{
|
||||
if (word_def != NULL)
|
||||
{
|
||||
hydroforth__set_func_result(result, ERR_WORD_DEF_INSIDE_WORD_DEF);
|
||||
return;
|
||||
}
|
||||
|
||||
printf("WORD DEFINITION\n");
|
||||
do
|
||||
{
|
||||
interpreter->pos++;
|
||||
} while (hydroforth__is_space(interpreter->src[interpreter->pos]) || interpreter->src[interpreter->pos] == '\n');
|
||||
if (!interpreter->src[interpreter->pos])
|
||||
{
|
||||
hydroforth__set_func_result(result, ERR_UNTERMINATED_WORD_DEFINITION);
|
||||
return;
|
||||
}
|
||||
if (hydroforth__number__is_digit(interpreter->src[interpreter->pos]))
|
||||
{
|
||||
hydroforth__set_func_result(result, ERR_WORD_NAME_CANT_BE_NUMBER);
|
||||
return;
|
||||
}
|
||||
const HYDROFORTH__SCAN_NEXT_WORD_RESULT name_scan_res = hydroforth__scan_next_word(interpreter);
|
||||
for (unsigned char i = 0; i < name_scan_res.len; i++)
|
||||
{
|
||||
putchar(interpreter->src[name_scan_res.start + i]);
|
||||
}
|
||||
putchar('\n');
|
||||
|
||||
if (name_scan_res.len == 1)
|
||||
{
|
||||
}
|
||||
else
|
||||
{
|
||||
unsigned hash = hydroforth__hash_string(interpreter->src + name_scan_res.start, name_scan_res.len);
|
||||
printf("HASH: 0x%x\n", hash);
|
||||
interpreter->word_definitions[interpreter->word_definitions_len] = (HYDROFORTH__WORD_DEFINITION){
|
||||
.words = malloc(0),
|
||||
.words_len = 0,
|
||||
};
|
||||
interpreter->word_keys[interpreter->word_keys_len++] = (HYDROFORTH__WORD_DEFINITION_WORD_KEY){
|
||||
.hash = hash,
|
||||
.key = {.word_definition_index = interpreter->word_definitions_len++},
|
||||
};
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
default:
|
||||
if (word_def == NULL)
|
||||
{
|
||||
interpreter->call_stack[interpreter->call_stack_len++] = (HYDROFORTH__WORD){
|
||||
.type = CHAR_WORD,
|
||||
.data = {.char_word = interpreter->src[res.start]},
|
||||
};
|
||||
}
|
||||
else
|
||||
{
|
||||
hydroforth__add_word_to_word_definition(word_def, (HYDROFORTH__WORD){
|
||||
.type = CHAR_WORD,
|
||||
.data = {.char_word = interpreter->src[res.start]},
|
||||
});
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
const bool next_is_digit = hydroforth__number__is_digit(interpreter->src[res.start + 1]);
|
||||
if (next_is_digit && interpreter->src[res.start] == '-')
|
||||
{
|
||||
int n = hydroforth__number__parse_number(result, interpreter->src + res.start + 1, res.len - 1);
|
||||
if (result->error)
|
||||
{
|
||||
hydroforth__add_func_backtrace(result);
|
||||
return;
|
||||
}
|
||||
if (word_def == NULL)
|
||||
{
|
||||
interpreter->call_stack[interpreter->call_stack_len++] = (HYDROFORTH__WORD){
|
||||
.type = PUSH,
|
||||
.data = {.number = -n},
|
||||
};
|
||||
}
|
||||
else
|
||||
{
|
||||
hydroforth__add_word_to_word_definition(word_def, (HYDROFORTH__WORD){
|
||||
.type = PUSH,
|
||||
.data = {.number = -n},
|
||||
});
|
||||
}
|
||||
}
|
||||
else if (next_is_digit && interpreter->src[res.start] == '+')
|
||||
{
|
||||
int n = hydroforth__number__parse_number(result, interpreter->src + res.start + 1, res.len - 1);
|
||||
if (result->error)
|
||||
{
|
||||
hydroforth__add_func_backtrace(result);
|
||||
return;
|
||||
}
|
||||
if (word_def == NULL)
|
||||
{
|
||||
interpreter->call_stack[interpreter->call_stack_len++] = (HYDROFORTH__WORD){
|
||||
.type = PUSH,
|
||||
.data = {.number = n},
|
||||
};
|
||||
}
|
||||
else
|
||||
{
|
||||
hydroforth__add_word_to_word_definition(word_def, (HYDROFORTH__WORD){
|
||||
.type = PUSH,
|
||||
.data = {.number = n},
|
||||
});
|
||||
}
|
||||
}
|
||||
else if (hydroforth__number__is_digit(interpreter->src[res.start]))
|
||||
{
|
||||
int n = hydroforth__number__parse_number(result, interpreter->src + res.start, res.len);
|
||||
if (result->error)
|
||||
{
|
||||
hydroforth__add_func_backtrace(result);
|
||||
return;
|
||||
}
|
||||
if (word_def == NULL)
|
||||
{
|
||||
interpreter->call_stack[interpreter->call_stack_len++] = (HYDROFORTH__WORD){
|
||||
.type = PUSH,
|
||||
.data = {.number = n},
|
||||
};
|
||||
}
|
||||
else
|
||||
{
|
||||
hydroforth__add_word_to_word_definition(word_def, (HYDROFORTH__WORD){
|
||||
.type = PUSH,
|
||||
.data = {.number = n},
|
||||
});
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
const unsigned hash = hydroforth__hash_string(interpreter->src + res.start, res.len);
|
||||
switch (hash)
|
||||
{
|
||||
case 0x4078cde9: // --
|
||||
do
|
||||
{
|
||||
interpreter->pos++;
|
||||
} while (interpreter->src[interpreter->pos] != '\n' && interpreter->src[interpreter->pos]);
|
||||
break;
|
||||
|
||||
default:
|
||||
if (word_def == NULL)
|
||||
{
|
||||
interpreter->call_stack[interpreter->call_stack_len++] = (HYDROFORTH__WORD){
|
||||
.type = WORD,
|
||||
.data = {.hash = hash},
|
||||
};
|
||||
}
|
||||
else
|
||||
{
|
||||
hydroforth__add_word_to_word_definition(word_def, (HYDROFORTH__WORD){
|
||||
.type = WORD,
|
||||
.data = {.hash = hash},
|
||||
});
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void hydroforth__run_call_stack(HYDROFORTH__RESULT *result, HYDROFORTH__INTERPRETER *interpreter)
|
||||
{
|
||||
for (unsigned char i = interpreter->call_stack_len; i > 0; interpreter->call_stack_len--, i--)
|
||||
{
|
||||
const HYDROFORTH__WORD *word = interpreter->call_stack + i - 1;
|
||||
switch (word->type)
|
||||
{
|
||||
case PUSH:
|
||||
interpreter->stack[interpreter->stack_len++] = word->data.number;
|
||||
break;
|
||||
|
||||
case CHAR_WORD:
|
||||
switch (word->data.char_word)
|
||||
{
|
||||
case '-':
|
||||
interpreter->stack[interpreter->stack_len - 2] -= interpreter->stack[interpreter->stack_len - 1];
|
||||
interpreter->stack_len--;
|
||||
break;
|
||||
|
||||
case '+':
|
||||
interpreter->stack[interpreter->stack_len - 2] += interpreter->stack[interpreter->stack_len - 1];
|
||||
interpreter->stack_len--;
|
||||
break;
|
||||
|
||||
case '*':
|
||||
interpreter->stack[interpreter->stack_len - 2] *= interpreter->stack[interpreter->stack_len - 1];
|
||||
interpreter->stack_len--;
|
||||
break;
|
||||
|
||||
case '/':
|
||||
interpreter->stack[interpreter->stack_len - 2] /= interpreter->stack[interpreter->stack_len - 1];
|
||||
interpreter->stack_len--;
|
||||
break;
|
||||
|
||||
default:
|
||||
hydroforth__set_func_result(result, ERR_UNKNOWN_SINGLE_CHAR_WORD);
|
||||
return;
|
||||
}
|
||||
break;
|
||||
|
||||
case WORD:
|
||||
printf("HASH: 0x%x\n", word->data.hash);
|
||||
switch (word->data.hash)
|
||||
{
|
||||
case 0xaddd94c: // debug
|
||||
const unsigned char left_width = hydroforth__number__count_digits(interpreter->stack_len);
|
||||
unsigned char right_width = 0;
|
||||
for (unsigned char i = 0; i < interpreter->stack_len; i++)
|
||||
{
|
||||
const unsigned char tmp = hydroforth__number__count_digits(interpreter->stack[i]);
|
||||
if (tmp > right_width)
|
||||
{
|
||||
right_width = tmp;
|
||||
}
|
||||
}
|
||||
for (unsigned char i = 0; i < (2 + left_width + 3 + right_width + 2); i++)
|
||||
{
|
||||
putchar('-');
|
||||
}
|
||||
putchar('\n');
|
||||
for (unsigned char i = interpreter->stack_len - 1; i != 0xff; i--)
|
||||
{
|
||||
printf("| ");
|
||||
const unsigned char left_index_width = left_width - hydroforth__number__count_digits(i);
|
||||
for (unsigned char j = 0; j < left_index_width; j++)
|
||||
{
|
||||
putchar(' ');
|
||||
}
|
||||
printf("%u | ", i);
|
||||
const unsigned char right_index_width = right_width - hydroforth__number__count_digits(interpreter->stack[i]);
|
||||
for (unsigned char j = 0; j < right_index_width; j++)
|
||||
{
|
||||
putchar(' ');
|
||||
}
|
||||
printf("%i |\n", interpreter->stack[i]);
|
||||
}
|
||||
for (unsigned char i = 0; i < (2 + left_width + 3 + right_width + 2); i++)
|
||||
{
|
||||
putchar('-');
|
||||
}
|
||||
putchar('\n');
|
||||
break;
|
||||
|
||||
default:
|
||||
hydroforth__set_func_result(result, ERR_UNKNOWN_WORD);
|
||||
return;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void hydroforth__run(HYDROFORTH__RESULT *result, HYDROFORTH__INTERPRETER *interpreter)
|
||||
{
|
||||
while (true)
|
||||
{
|
||||
while (hydroforth__is_space(interpreter->src[interpreter->pos]) || interpreter->src[interpreter->pos] == '\n')
|
||||
{
|
||||
interpreter->pos++;
|
||||
}
|
||||
if (!interpreter->src[interpreter->pos])
|
||||
{
|
||||
return;
|
||||
}
|
||||
hydroforth__parse(result, interpreter, NULL);
|
||||
if (result->error)
|
||||
{
|
||||
hydroforth__add_func_backtrace(result);
|
||||
return;
|
||||
}
|
||||
hydroforth__run_call_stack(result, interpreter);
|
||||
if (result->error)
|
||||
{
|
||||
hydroforth__add_func_backtrace(result);
|
||||
return;
|
||||
}
|
||||
}
|
||||
char *hf__quote_mem_str(const char *const src, const size_t start,
|
||||
const size_t end, const bool double_quote) {
|
||||
const size_t str_len = end - start + 1;
|
||||
char *str = malloc(sizeof(char) * (str_len + 1));
|
||||
strncpy(str, src + start, str_len);
|
||||
str[str_len] = '\0';
|
||||
|
||||
const char *const q = double_quote ? "\"" : "'";
|
||||
const size_t required_size = snprintf(NULL, 0, "%s%s%s", q, str, q);
|
||||
char *s = malloc(sizeof(char) * (required_size + 1));
|
||||
snprintf(s, required_size + 1, "%s%s%s", q, str, q);
|
||||
|
||||
free(str);
|
||||
|
||||
return s;
|
||||
}
|
||||
|
|
364
src/hydroforth/interpreter.c
Normal file
364
src/hydroforth/interpreter.c
Normal file
|
@ -0,0 +1,364 @@
|
|||
#include "ansi_lib.h"
|
||||
#include <stdbool.h>
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
|
||||
#include "hydroforth/hydroforth.h"
|
||||
|
||||
void free_word_def_value(void *x) {
|
||||
struct hf__interpreter__word *word = x;
|
||||
free(word->body);
|
||||
free(word);
|
||||
}
|
||||
|
||||
const hf__hashmap__free_value_t hf__interpreter__word_free =
|
||||
free_word_def_value;
|
||||
|
||||
void hf__interpreter__free(struct hf__interpreter *interpreter) {
|
||||
free(interpreter->call_stack);
|
||||
hf__hashmap__free(&interpreter->words, hf__interpreter__word_free);
|
||||
free(interpreter->stack);
|
||||
}
|
||||
|
||||
struct hf__result words__number(struct hf__interpreter *const interpreter,
|
||||
const struct hf__node *const node) {
|
||||
hf__interpreter__stack_push(&interpreter->stack, &interpreter->stack_len,
|
||||
&interpreter->stack_size, node->value.number);
|
||||
|
||||
return HF__OK;
|
||||
}
|
||||
|
||||
struct hf__result words__char(struct hf__interpreter *const interpreter,
|
||||
const struct hf__node *const node) {
|
||||
hf__interpreter__stack_push(&interpreter->stack, &interpreter->stack_len,
|
||||
&interpreter->stack_size, node->value.ch);
|
||||
|
||||
return HF__OK;
|
||||
}
|
||||
|
||||
struct hf__result words__word(struct hf__interpreter *const interpreter,
|
||||
const struct hf__node *const node) {
|
||||
const struct hf__interpreter__word *const *const word =
|
||||
(const struct hf__interpreter__word *const *const)hf__hashmap__get(
|
||||
&interpreter->words, node->value.word.hash);
|
||||
if (word == NULL) {
|
||||
char *msg = hf__quote(node->value.word.value, true);
|
||||
free(node->value.word.value);
|
||||
|
||||
return HF__ERR_CUSTOM(HF__ERROR__INTERPRETER__UNKNOWN_WORD, msg, true);
|
||||
}
|
||||
|
||||
for (size_t i = (*word)->body_len - 1; i != 0 - 1; i--) {
|
||||
hf__parser__node_array_push(
|
||||
&interpreter->call_stack, &interpreter->call_stack_len,
|
||||
&interpreter->call_stack_size, (*word)->body[i]);
|
||||
}
|
||||
|
||||
free(node->value.word.value);
|
||||
|
||||
return HF__OK;
|
||||
}
|
||||
|
||||
struct hf__result words__word_def(struct hf__interpreter *const interpreter,
|
||||
const struct hf__node *const node) {
|
||||
if (hf__hashmap__get(&interpreter->words, node->value.word_def->name.hash)) {
|
||||
char *msg = hf__quote(node->value.word_def->name.value, true);
|
||||
|
||||
free(node->value.word_def->name.value);
|
||||
free(node->value.word_def->body);
|
||||
free(node->value.word_def);
|
||||
|
||||
return HF__ERR_CUSTOM(HF__ERROR__INTERPRETER__WORD_ALREADY_DEF, msg, true);
|
||||
}
|
||||
|
||||
struct hf__interpreter__word *word =
|
||||
malloc(sizeof(struct hf__interpreter__word));
|
||||
(*word) = (struct hf__interpreter__word){
|
||||
.body = node->value.word_def->body,
|
||||
.body_len = node->value.word_def->body_len,
|
||||
};
|
||||
|
||||
hf__hashmap__insert(&interpreter->words, node->value.word_def->name.hash,
|
||||
word);
|
||||
|
||||
free(node->value.word_def->name.value);
|
||||
free(node->value.word_def);
|
||||
|
||||
return HF__OK;
|
||||
}
|
||||
|
||||
struct hf__result words__comment(struct hf__interpreter *const interpreter,
|
||||
const struct hf__node *const node) {
|
||||
// printf("comment = \"%s\"\n", node->value.comment);
|
||||
|
||||
free(node->value.comment);
|
||||
|
||||
return HF__OK;
|
||||
}
|
||||
|
||||
struct hf__result words__dup(struct hf__interpreter *const interpreter,
|
||||
const struct hf__node *const node) {
|
||||
if (interpreter->stack_len < 1) {
|
||||
return (struct hf__result){
|
||||
.ok = false,
|
||||
.error = HF__ERROR__INTERPRETER__STACK_UNDERFLOW,
|
||||
};
|
||||
}
|
||||
|
||||
const long top = interpreter->stack[interpreter->stack_len - 1];
|
||||
hf__interpreter__stack_push(&interpreter->stack, &interpreter->stack_len,
|
||||
&interpreter->stack_size, top);
|
||||
|
||||
return HF__OK;
|
||||
}
|
||||
|
||||
struct hf__result words__drop(struct hf__interpreter *const interpreter,
|
||||
const struct hf__node *const node) {
|
||||
if (interpreter->stack_len < 1) {
|
||||
return (struct hf__result){
|
||||
.ok = false,
|
||||
.error = HF__ERROR__INTERPRETER__STACK_UNDERFLOW,
|
||||
};
|
||||
}
|
||||
|
||||
interpreter->stack_len--;
|
||||
|
||||
return HF__OK;
|
||||
}
|
||||
|
||||
struct hf__result words__swap(struct hf__interpreter *const interpreter,
|
||||
const struct hf__node *const node) {
|
||||
if (interpreter->stack_len < 2) {
|
||||
return (struct hf__result){
|
||||
.ok = false,
|
||||
.error = HF__ERROR__INTERPRETER__STACK_UNDERFLOW,
|
||||
};
|
||||
}
|
||||
|
||||
const long tmp = interpreter->stack[interpreter->stack_len - 1];
|
||||
interpreter->stack[interpreter->stack_len - 1] =
|
||||
interpreter->stack[interpreter->stack_len - 2];
|
||||
interpreter->stack[interpreter->stack_len - 2] = tmp;
|
||||
|
||||
return HF__OK;
|
||||
}
|
||||
|
||||
struct hf__result words__over(struct hf__interpreter *const interpreter,
|
||||
const struct hf__node *const node) {
|
||||
if (interpreter->stack_len < 2) {
|
||||
return (struct hf__result){
|
||||
.ok = false,
|
||||
.error = HF__ERROR__INTERPRETER__STACK_UNDERFLOW,
|
||||
};
|
||||
}
|
||||
|
||||
hf__interpreter__stack_push(&interpreter->stack, &interpreter->stack_len,
|
||||
&interpreter->stack_size,
|
||||
interpreter->stack[interpreter->stack_len - 2]);
|
||||
|
||||
return HF__OK;
|
||||
}
|
||||
|
||||
struct hf__result words__rot(struct hf__interpreter *const interpreter,
|
||||
const struct hf__node *const node) {
|
||||
if (interpreter->stack_len < 1) {
|
||||
return (struct hf__result){
|
||||
.ok = false,
|
||||
.error = HF__ERROR__INTERPRETER__STACK_UNDERFLOW,
|
||||
};
|
||||
}
|
||||
|
||||
interpreter->stack_len--;
|
||||
|
||||
return HF__OK;
|
||||
}
|
||||
|
||||
struct hf__result words__add(struct hf__interpreter *const interpreter,
|
||||
const struct hf__node *const node) {
|
||||
if (interpreter->stack_len < 2) {
|
||||
return (struct hf__result){
|
||||
.ok = false,
|
||||
.error = HF__ERROR__INTERPRETER__STACK_UNDERFLOW,
|
||||
};
|
||||
}
|
||||
|
||||
interpreter->stack[interpreter->stack_len - 2] +=
|
||||
interpreter->stack[interpreter->stack_len - 1];
|
||||
interpreter->stack_len--;
|
||||
|
||||
return HF__OK;
|
||||
}
|
||||
|
||||
struct hf__result words__sub(struct hf__interpreter *const interpreter,
|
||||
const struct hf__node *const node) {
|
||||
if (interpreter->stack_len < 2) {
|
||||
return (struct hf__result){
|
||||
.ok = false,
|
||||
.error = HF__ERROR__INTERPRETER__STACK_UNDERFLOW,
|
||||
};
|
||||
}
|
||||
|
||||
interpreter->stack[interpreter->stack_len - 2] -=
|
||||
interpreter->stack[interpreter->stack_len - 1];
|
||||
interpreter->stack_len--;
|
||||
|
||||
return HF__OK;
|
||||
}
|
||||
|
||||
struct hf__result words__dot(struct hf__interpreter *const interpreter,
|
||||
const struct hf__node *const node) {
|
||||
if (interpreter->stack_len < 1) {
|
||||
return HF__ERR(HF__ERROR__INTERPRETER__STACK_UNDERFLOW);
|
||||
}
|
||||
|
||||
printf("%li", interpreter->stack[--interpreter->stack_len]);
|
||||
|
||||
return HF__OK;
|
||||
}
|
||||
|
||||
struct hf__result words__emit(struct hf__interpreter *const interpreter,
|
||||
const struct hf__node *const node) {
|
||||
if (interpreter->stack_len < 1) {
|
||||
return HF__ERR(HF__ERROR__INTERPRETER__STACK_UNDERFLOW);
|
||||
}
|
||||
|
||||
putchar(interpreter->stack[--interpreter->stack_len]);
|
||||
|
||||
return HF__OK;
|
||||
}
|
||||
|
||||
struct hf__result words__space(struct hf__interpreter *const interpreter,
|
||||
const struct hf__node *const node) {
|
||||
putchar(' ');
|
||||
|
||||
return HF__OK;
|
||||
}
|
||||
|
||||
struct hf__result words__spaces(struct hf__interpreter *const interpreter,
|
||||
const struct hf__node *const node) {
|
||||
if (interpreter->stack_len < 1) {
|
||||
return HF__ERR(HF__ERROR__INTERPRETER__STACK_UNDERFLOW);
|
||||
}
|
||||
|
||||
const unsigned long max = interpreter->stack[--interpreter->stack_len];
|
||||
for (unsigned long i = 0; i < max; i++) {
|
||||
putchar(' ');
|
||||
}
|
||||
|
||||
return HF__OK;
|
||||
}
|
||||
|
||||
struct hf__result words__cr(struct hf__interpreter *const interpreter,
|
||||
const struct hf__node *const node) {
|
||||
putchar('\n');
|
||||
|
||||
return HF__OK;
|
||||
}
|
||||
|
||||
struct hf__result words__crs(struct hf__interpreter *const interpreter,
|
||||
const struct hf__node *const node) {
|
||||
if (interpreter->stack_len < 1) {
|
||||
return HF__ERR(HF__ERROR__INTERPRETER__STACK_UNDERFLOW);
|
||||
}
|
||||
|
||||
const unsigned long max = interpreter->stack[--interpreter->stack_len];
|
||||
for (unsigned long i = 0; i < max; i++) {
|
||||
putchar('\n');
|
||||
}
|
||||
|
||||
return HF__OK;
|
||||
}
|
||||
|
||||
struct hf__result words__debug(struct hf__interpreter *const interpreter,
|
||||
const struct hf__node *const node) {
|
||||
SET_8_VALUE_COLOUR(TXT_CYAN);
|
||||
puts("\n===\nDEBUG:");
|
||||
for (size_t i = interpreter->stack_len - 1; i != 0 - 1; i--) {
|
||||
printf("%lu : %li\n", i, interpreter->stack[i]);
|
||||
}
|
||||
puts("===");
|
||||
SET_8_VALUE_COLOUR(TXT_DEFAULT);
|
||||
|
||||
return HF__OK;
|
||||
}
|
||||
|
||||
struct hf__result words__exit(struct hf__interpreter *const interpreter,
|
||||
const struct hf__node *const node) {
|
||||
if (interpreter->stack_len < 1) {
|
||||
return HF__ERR(HF__ERROR__INTERPRETER__STACK_UNDERFLOW);
|
||||
}
|
||||
|
||||
interpreter->is_running = false;
|
||||
interpreter->exit_code = interpreter->stack[--interpreter->stack_len];
|
||||
|
||||
return HF__OK;
|
||||
}
|
||||
|
||||
struct hf__result words__abort(struct hf__interpreter *const interpreter,
|
||||
const struct hf__node *const node) {
|
||||
abort();
|
||||
|
||||
return HF__OK;
|
||||
}
|
||||
|
||||
const hf__interpreter__word_func_t
|
||||
HF__INTERPRETER__WORD_FUNCTION[__HF__NODE_TYPE__N] = {
|
||||
[HF__NODE_TYPE__NUMBER] = words__number,
|
||||
[HF__NODE_TYPE__CHAR] = words__char,
|
||||
[HF__NODE_TYPE__WORD] = words__word,
|
||||
[HF__NODE_TYPE__WORD_DEF] = words__word_def,
|
||||
|
||||
[HF__NODE_TYPE__DASH_COMMENT] = words__comment,
|
||||
[HF__NODE_TYPE__PAREN_COMMENT] = words__comment,
|
||||
|
||||
[HF__NODE_TYPE__DUP] = words__dup,
|
||||
[HF__NODE_TYPE__DROP] = words__drop,
|
||||
[HF__NODE_TYPE__SWAP] = words__swap,
|
||||
[HF__NODE_TYPE__OVER] = words__over,
|
||||
[HF__NODE_TYPE__ROT] = words__rot,
|
||||
|
||||
[HF__NODE_TYPE__ADD] = words__add,
|
||||
[HF__NODE_TYPE__SUB] = words__sub,
|
||||
|
||||
[HF__NODE_TYPE__DOT] = words__dot,
|
||||
[HF__NODE_TYPE__EMIT] = words__emit,
|
||||
[HF__NODE_TYPE__SPACE] = words__space,
|
||||
[HF__NODE_TYPE__SPACES] = words__spaces,
|
||||
[HF__NODE_TYPE__CR] = words__cr,
|
||||
[HF__NODE_TYPE__CRS] = words__crs,
|
||||
[HF__NODE_TYPE__DEBUG] = words__debug,
|
||||
|
||||
[HF__NODE_TYPE__ABORT] = words__abort,
|
||||
[HF__NODE_TYPE__EXIT] = words__exit,
|
||||
};
|
||||
|
||||
void hf__interpreter__stack_push(long **arr, size_t *const len,
|
||||
size_t *const size, long item) {
|
||||
if (*len > *size) {
|
||||
return;
|
||||
} else if (*len == *size) {
|
||||
*size += 1 + (*size / 2);
|
||||
*arr = realloc(*arr, sizeof(long) * (*size));
|
||||
}
|
||||
|
||||
(*arr)[*len] = item;
|
||||
(*len)++;
|
||||
}
|
||||
|
||||
struct hf__result
|
||||
hf__interpreter__run(struct hf__interpreter *const interpreter) {
|
||||
if (interpreter->call_stack_len == 0) {
|
||||
return HF__OK;
|
||||
}
|
||||
|
||||
const struct hf__node *const top =
|
||||
interpreter->call_stack + --interpreter->call_stack_len;
|
||||
const hf__interpreter__word_func_t func =
|
||||
HF__INTERPRETER__WORD_FUNCTION[top->type];
|
||||
|
||||
if (func) {
|
||||
return func(interpreter, top);
|
||||
} else {
|
||||
return HF__OK;
|
||||
}
|
||||
}
|
120
src/hydroforth/lexer.c
Normal file
120
src/hydroforth/lexer.c
Normal file
|
@ -0,0 +1,120 @@
|
|||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
|
||||
#include "hydroforth/hydroforth.h"
|
||||
|
||||
void token_array_push(struct hf__token **arr, size_t *const len,
|
||||
size_t *const size, struct hf__token item) {
|
||||
if (*len > *size) {
|
||||
return;
|
||||
} else if (*len == *size) {
|
||||
*size += 1 + (*size / 2);
|
||||
*arr = realloc(*arr, sizeof(struct hf__token) * (*size));
|
||||
}
|
||||
|
||||
(*arr)[*len] = item;
|
||||
(*len)++;
|
||||
}
|
||||
|
||||
void hf__lex(const char *const src, const size_t src_len,
|
||||
struct hf__token **tokens, size_t *const len, size_t *const size) {
|
||||
size_t i = 0;
|
||||
while (i < src_len) {
|
||||
if (hf__is_space_like(src[i]) || src[i] == '\n') {
|
||||
i++;
|
||||
continue;
|
||||
}
|
||||
|
||||
size_t start = i;
|
||||
struct hf__token token;
|
||||
|
||||
if (src[i] == '\'') {
|
||||
const size_t char_start = start;
|
||||
i++;
|
||||
start = i;
|
||||
while (src[i] != '\'') {
|
||||
i++;
|
||||
if (i >= src_len) {
|
||||
start = char_start;
|
||||
goto TOKEN_IS_WORD;
|
||||
}
|
||||
}
|
||||
|
||||
token.type = HF__TOKEN_TYPE__CHAR;
|
||||
token.location.start = start;
|
||||
token.location.end = i - 1;
|
||||
|
||||
i++;
|
||||
} else {
|
||||
while (!hf__is_space_like(src[i]) && src[i] != '\n' && i < src_len) {
|
||||
i++;
|
||||
}
|
||||
const size_t str_len = i - start;
|
||||
|
||||
if (hf__is_numeric(src[start]) || (src[start] == '-' && str_len > 1 &&
|
||||
hf__is_numeric(src[start + 1]))) {
|
||||
token.type = HF__TOKEN_TYPE__NUMBER;
|
||||
token.location.start = start;
|
||||
token.location.end = i - 1;
|
||||
} else if (str_len == 1 && src[start] == ':') {
|
||||
token.type = HF__TOKEN_TYPE__COLON;
|
||||
token.location.start = start;
|
||||
token.location.end = i - 1;
|
||||
} else if (str_len == 1 && src[start] == ';') {
|
||||
token.type = HF__TOKEN_TYPE__SEMICOLON;
|
||||
token.location.start = start;
|
||||
token.location.end = i - 1;
|
||||
} else if (str_len == 1 && src[start] == '(' &&
|
||||
hf__is_space_like(src[i])) {
|
||||
i++;
|
||||
bool got_end = false;
|
||||
while (i < src_len) {
|
||||
if (src[i] == ')' && hf__is_space_like(src[i - 1])) {
|
||||
got_end = true;
|
||||
break;
|
||||
}
|
||||
i++;
|
||||
}
|
||||
|
||||
if (got_end) {
|
||||
token.type = HF__TOKEN_TYPE__PAREN_COMMENT;
|
||||
token.location.start = start + 2;
|
||||
token.location.end = i - 2;
|
||||
i++;
|
||||
} else {
|
||||
i = start + 1;
|
||||
goto TOKEN_IS_WORD;
|
||||
}
|
||||
} else if (str_len == 1 && src[start] == '\\' &&
|
||||
hf__is_space_like(src[i])) {
|
||||
token.type = HF__TOKEN_TYPE__BACKSLASH_COMMENT;
|
||||
|
||||
start = ++i;
|
||||
while (src[i] != '\n' && i < src_len) {
|
||||
i++;
|
||||
}
|
||||
|
||||
token.location.start = start;
|
||||
token.location.end = i - 1;
|
||||
} else if (str_len == 2 && strncmp(src + start, "--", 2) == 0 &&
|
||||
(hf__is_space_like(src[i]) || src[i] == '\0')) {
|
||||
token.type = HF__TOKEN_TYPE__DASH_COMMENT;
|
||||
|
||||
start = ++i;
|
||||
while (src[i] != '\n' && i < src_len) {
|
||||
i++;
|
||||
}
|
||||
|
||||
token.location.start = start;
|
||||
token.location.end = i - 1;
|
||||
} else {
|
||||
TOKEN_IS_WORD:
|
||||
token.type = HF__TOKEN_TYPE__WORD;
|
||||
token.location.start = start;
|
||||
token.location.end = i - 1;
|
||||
}
|
||||
}
|
||||
|
||||
token_array_push(tokens, len, size, token);
|
||||
}
|
||||
}
|
|
@ -1,135 +0,0 @@
|
|||
#include <stdbool.h>
|
||||
#include <stddef.h>
|
||||
|
||||
#include "hydroforth/hydroforth.h"
|
||||
|
||||
bool hydroforth__number__is_digit(char c)
|
||||
{
|
||||
return '0' <= c && c <= '9';
|
||||
}
|
||||
|
||||
unsigned char hydroforth__number__convert_hex_digit(HYDROFORTH__RESULT *const result, char c)
|
||||
{
|
||||
if (hydroforth__number__is_digit(c))
|
||||
{
|
||||
return c - '0';
|
||||
}
|
||||
else
|
||||
{
|
||||
switch (c)
|
||||
{
|
||||
case 'A':
|
||||
case 'a':
|
||||
return 0xa;
|
||||
|
||||
case 'B':
|
||||
case 'b':
|
||||
return 0xb;
|
||||
|
||||
case 'C':
|
||||
case 'c':
|
||||
return 0xc;
|
||||
|
||||
case 'D':
|
||||
case 'd':
|
||||
return 0xd;
|
||||
|
||||
case 'E':
|
||||
case 'e':
|
||||
return 0xe;
|
||||
|
||||
case 'F':
|
||||
case 'f':
|
||||
return 0xf;
|
||||
|
||||
default:
|
||||
// hydroforth__result__set(result, ERR_INVALID_HEX_CHAR, __func__);
|
||||
hydroforth__set_func_result(result, ERR_INVALID_HEX_CHAR);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
int hydroforth__number__parse_number_hex(HYDROFORTH__RESULT *const result, const char *const start, unsigned char len)
|
||||
{
|
||||
int n = 0;
|
||||
for (unsigned char i = 0; i < len; i++)
|
||||
{
|
||||
unsigned char m = hydroforth__number__convert_hex_digit(result, start[i]);
|
||||
if (result->error)
|
||||
{
|
||||
hydroforth__add_func_backtrace(result);
|
||||
return 0;
|
||||
}
|
||||
n *= 16;
|
||||
n += m;
|
||||
}
|
||||
|
||||
return n;
|
||||
}
|
||||
|
||||
int hydroforth__number__parse_number(HYDROFORTH__RESULT *const result, const char *const start, unsigned char len)
|
||||
{
|
||||
if (start[0] == '0')
|
||||
{
|
||||
if (len > 1)
|
||||
{
|
||||
switch (start[1])
|
||||
{
|
||||
case 'X':
|
||||
case 'x':
|
||||
{
|
||||
int n = hydroforth__number__parse_number_hex(result, start + 2, len - 2);
|
||||
if (result->error)
|
||||
{
|
||||
hydroforth__add_func_backtrace(result);
|
||||
}
|
||||
return n;
|
||||
}
|
||||
|
||||
default:
|
||||
{
|
||||
int n = hydroforth__number__parse_number(result, start + 1, len - 1);
|
||||
if (result->error)
|
||||
{
|
||||
hydroforth__add_func_backtrace(result);
|
||||
}
|
||||
return n;
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
int n = start[0] - '0';
|
||||
for (unsigned char i = 1; i < len; i++)
|
||||
{
|
||||
n *= 10;
|
||||
n += start[i] - '0';
|
||||
}
|
||||
return n;
|
||||
}
|
||||
}
|
||||
|
||||
unsigned char hydroforth__number__count_digits(int n)
|
||||
{
|
||||
if (n == 0)
|
||||
{
|
||||
return 1;
|
||||
}
|
||||
else
|
||||
{
|
||||
unsigned char res = 0;
|
||||
while (n != 0)
|
||||
{
|
||||
n /= 10;
|
||||
res++;
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
}
|
361
src/hydroforth/parser.c
Normal file
361
src/hydroforth/parser.c
Normal file
|
@ -0,0 +1,361 @@
|
|||
#include <ctype.h>
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
|
||||
#include "hydroforth/hydroforth.h"
|
||||
|
||||
void hf__parser__init_keyword_map(struct hf__hashmap *const map, size_t cap) {
|
||||
if (cap == 0) {
|
||||
cap = HF__PARSER__KEYWORD_MAP_CAP;
|
||||
}
|
||||
*map = (struct hf__hashmap){
|
||||
.arr = calloc(cap, sizeof(struct hf__hashmap__node *)),
|
||||
.cap = cap,
|
||||
};
|
||||
|
||||
for (enum HF__KEYWORD k = 0; k < __HF__KEYWORD__N; k++) {
|
||||
hf__hashmap__insert(map, hf__hash_str(HF__KEYWORD_STR[k]),
|
||||
(void *)&HF__KEYWORD_NODE_TYPE[k]);
|
||||
}
|
||||
}
|
||||
|
||||
void hf__parser__node_array_push(struct hf__node **arr, size_t *const len,
|
||||
size_t *const size, struct hf__node item) {
|
||||
if (*len > *size) {
|
||||
return;
|
||||
} else if (*len == *size) {
|
||||
*size += 1 + (*size / 2);
|
||||
*arr = realloc(*arr, sizeof(struct hf__node) * (*size));
|
||||
}
|
||||
|
||||
(*arr)[*len] = item;
|
||||
(*len)++;
|
||||
}
|
||||
|
||||
char *strip_whitespaces(const char *const str, size_t start, size_t end) {
|
||||
while (hf__is_space_like(str[start])) {
|
||||
start++;
|
||||
}
|
||||
while (hf__is_space_like(str[end])) {
|
||||
end--;
|
||||
}
|
||||
const size_t raw_len = end - start + 1;
|
||||
char *stripped = malloc(sizeof(char) * raw_len);
|
||||
strncpy(stripped, str + start, raw_len);
|
||||
stripped[raw_len] = '\0';
|
||||
|
||||
return stripped;
|
||||
}
|
||||
|
||||
struct hf__result hf__parse(struct hf__parser *const parser,
|
||||
const char *const src,
|
||||
const struct hf__token *const tokens,
|
||||
const size_t tokens_len, struct hf__node **nodes,
|
||||
size_t *const len, size_t *const size) {
|
||||
if (!parser->keyword_map_is_init) {
|
||||
hf__parser__init_keyword_map(&parser->keyword_map, 0);
|
||||
parser->keyword_map_is_init = true;
|
||||
}
|
||||
|
||||
for (size_t i = 0; i < tokens_len; i++) {
|
||||
switch (tokens[i].type) {
|
||||
case HF__TOKEN_TYPE__NUMBER: {
|
||||
size_t j = tokens[i].location.start;
|
||||
bool negative = false;
|
||||
long number = 0;
|
||||
|
||||
if (src[tokens[i].location.start] == '-') {
|
||||
j++;
|
||||
negative = true;
|
||||
} else if (src[tokens[i].location.start] == '+') {
|
||||
j++;
|
||||
}
|
||||
|
||||
if (src[j] == '0') {
|
||||
j++;
|
||||
if (j < (tokens[i].location.end + 1)) {
|
||||
switch (src[j]) {
|
||||
case 'B':
|
||||
case 'b':
|
||||
j++;
|
||||
for (; j < (tokens[i].location.end + 1); j++) {
|
||||
if (src[j] != '0' && src[j] != '1') {
|
||||
return HF__ERR_CUSTOM(
|
||||
HF__ERROR__PARSER__INVALID_NUMBER,
|
||||
hf__quote_mem_str(src, tokens[i].location.start,
|
||||
tokens[i].location.end, true),
|
||||
true);
|
||||
}
|
||||
|
||||
number *= 2;
|
||||
number += src[j] - '0';
|
||||
}
|
||||
break;
|
||||
|
||||
case 'X':
|
||||
case 'x':
|
||||
j++;
|
||||
for (; j < (tokens[i].location.end + 1); j++) {
|
||||
const bool is_alphabetical_high_case =
|
||||
src[j] >= 'A' && src[j] <= 'F';
|
||||
const bool is_alphabetical =
|
||||
is_alphabetical_high_case || (src[j] >= 'a' && src[j] <= 'f');
|
||||
if (!((src[j] >= '0' && src[j] <= '9') || is_alphabetical)) {
|
||||
return HF__ERR_CUSTOM(
|
||||
HF__ERROR__PARSER__INVALID_NUMBER,
|
||||
hf__quote_mem_str(src, tokens[i].location.start,
|
||||
tokens[i].location.end, true),
|
||||
true);
|
||||
}
|
||||
|
||||
number *= 16;
|
||||
if (is_alphabetical_high_case) {
|
||||
number += 10 + src[j] - 'A';
|
||||
} else if (is_alphabetical) {
|
||||
number += 10 + src[j] - 'a';
|
||||
} else {
|
||||
number += src[j] - '0';
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
||||
default:
|
||||
goto PARSER_NUMBER_DEFAULT;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
PARSER_NUMBER_DEFAULT:
|
||||
for (; j < (tokens[i].location.end + 1); j++) {
|
||||
if (src[j] < '0' || src[j] > '9') {
|
||||
return HF__ERR_CUSTOM(
|
||||
HF__ERROR__PARSER__INVALID_NUMBER,
|
||||
hf__quote_mem_str(src, tokens[i].location.start,
|
||||
tokens[i].location.end, true),
|
||||
true);
|
||||
}
|
||||
|
||||
number *= 10;
|
||||
number += src[j] - '0';
|
||||
}
|
||||
}
|
||||
|
||||
hf__parser__node_array_push(
|
||||
nodes, len, size,
|
||||
(struct hf__node){
|
||||
.type = HF__NODE_TYPE__NUMBER,
|
||||
.value = {.number = negative ? -number : number},
|
||||
});
|
||||
break;
|
||||
}
|
||||
|
||||
case HF__TOKEN_TYPE__CHAR: {
|
||||
const size_t char_len =
|
||||
tokens[i].location.end - tokens[i].location.start + 1;
|
||||
char ch = src[tokens[i].location.start + 1];
|
||||
|
||||
if (src[tokens[i].location.start] == '\\') {
|
||||
size_t j = 1;
|
||||
|
||||
switch (ch) {
|
||||
case 's':
|
||||
case 't':
|
||||
j++;
|
||||
ch = ' ';
|
||||
break;
|
||||
|
||||
case 'n':
|
||||
j++;
|
||||
ch = '\n';
|
||||
break;
|
||||
|
||||
default:
|
||||
j++;
|
||||
break;
|
||||
}
|
||||
|
||||
if (j != char_len) {
|
||||
return HF__ERR_CUSTOM(HF__ERROR__PARSER__INVALID_CHAR,
|
||||
hf__quote_mem_str(src, tokens[i].location.start,
|
||||
tokens[i].location.end,
|
||||
false),
|
||||
true);
|
||||
}
|
||||
} else if (char_len != 1) {
|
||||
return HF__ERR_CUSTOM(HF__ERROR__PARSER__INVALID_CHAR,
|
||||
hf__quote_mem_str(src, tokens[i].location.start,
|
||||
tokens[i].location.end, false),
|
||||
true);
|
||||
} else {
|
||||
ch = src[tokens[i].location.start];
|
||||
}
|
||||
|
||||
hf__parser__node_array_push(nodes, len, size,
|
||||
(struct hf__node){
|
||||
.type = HF__NODE_TYPE__CHAR,
|
||||
.value = {.ch = ch},
|
||||
});
|
||||
break;
|
||||
}
|
||||
|
||||
case HF__TOKEN_TYPE__WORD: {
|
||||
struct hf__node node;
|
||||
|
||||
const size_t diff = tokens[i].location.end - tokens[i].location.start;
|
||||
const size_t word_len = diff + 1;
|
||||
char *lower_s = malloc(sizeof(char) * word_len);
|
||||
const char *const word_start = src + tokens[i].location.start;
|
||||
for (size_t j = 0; j < word_len; j++) {
|
||||
lower_s[j] = tolower(word_start[j]);
|
||||
}
|
||||
lower_s[word_len] = '\0';
|
||||
const hf__hash_t hash = hf__hash_str(lower_s);
|
||||
|
||||
const enum hf__node_type *const *const type_ptr =
|
||||
(const enum hf__node_type *const *const)hf__hashmap__get(
|
||||
&parser->keyword_map, hash);
|
||||
if (type_ptr == NULL) {
|
||||
node = (struct hf__node){
|
||||
.type = HF__NODE_TYPE__WORD,
|
||||
.value = {.word = {.hash = hash, .value = lower_s}},
|
||||
};
|
||||
} else {
|
||||
node.type = **type_ptr;
|
||||
free(lower_s);
|
||||
}
|
||||
|
||||
hf__parser__node_array_push(nodes, len, size, node);
|
||||
break;
|
||||
}
|
||||
|
||||
case HF__TOKEN_TYPE__COLON: {
|
||||
const size_t start = i++;
|
||||
bool got_end = false;
|
||||
size_t end;
|
||||
unsigned char depth = 1;
|
||||
for (; i < tokens_len; i++) {
|
||||
switch (tokens[i].type) {
|
||||
case HF__TOKEN_TYPE__COLON:
|
||||
depth++;
|
||||
break;
|
||||
|
||||
case HF__TOKEN_TYPE__SEMICOLON:
|
||||
depth--;
|
||||
break;
|
||||
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
if (depth == 0) {
|
||||
end = i;
|
||||
got_end = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
const struct hf__token *const name_tok = tokens + start + 1;
|
||||
if (!got_end || end - start <= 1) {
|
||||
return HF__ERR(HF__ERROR__PARSER__WORD_DEF_INCOMPLETE);
|
||||
} else if (name_tok->type != HF__TOKEN_TYPE__WORD) {
|
||||
return HF__ERR(HF__ERROR__PARSER__WORD_DEF_INVALID_NAME);
|
||||
}
|
||||
|
||||
const size_t name_len =
|
||||
name_tok->location.end - name_tok->location.start + 1;
|
||||
char *name = malloc(sizeof(char) * (name_len + 1));
|
||||
for (size_t j = 0; j < name_len; j++) {
|
||||
name[j] = tolower(src[name_tok->location.start + j]);
|
||||
}
|
||||
name[name_len] = '\0';
|
||||
const hf__hash_t hash = hf__hash_str(name);
|
||||
|
||||
if (hf__hashmap__get(&parser->keyword_map, hash)) {
|
||||
free(name);
|
||||
|
||||
return HF__ERR_CUSTOM(HF__ERROR__PARSER__WORD_DEF_IS_KEYWORD,
|
||||
hf__quote_mem_str(src, name_tok->location.start,
|
||||
name_tok->location.end, true),
|
||||
true);
|
||||
}
|
||||
|
||||
size_t body_len = 0;
|
||||
size_t body_size = 0;
|
||||
struct hf__node *body = NULL;
|
||||
struct hf__result parse_res =
|
||||
hf__parse(parser, src, tokens + 2, (end - start + 1) - 3, &body,
|
||||
&body_len, &body_size);
|
||||
|
||||
struct hf__node_value__word_def *word_def =
|
||||
malloc(sizeof(struct hf__node_value__word_def));
|
||||
(*word_def) = (struct hf__node_value__word_def){
|
||||
.name =
|
||||
{
|
||||
.hash = hash,
|
||||
.value = name,
|
||||
},
|
||||
.body = body,
|
||||
.body_len = body_len,
|
||||
};
|
||||
|
||||
hf__parser__node_array_push(nodes, len, size,
|
||||
(struct hf__node){
|
||||
.type = HF__NODE_TYPE__WORD_DEF,
|
||||
.value = {.word_def = word_def},
|
||||
});
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
case HF__TOKEN_TYPE__SEMICOLON:
|
||||
return HF__ERR_CUSTOM(HF__ERROR__PARSER__UNEXPECTED,
|
||||
hf__quote_mem_str(src, tokens[i].location.start,
|
||||
tokens[i].location.end, true),
|
||||
true);
|
||||
|
||||
case HF__TOKEN_TYPE__DASH_COMMENT:
|
||||
hf__parser__node_array_push(nodes, len, size,
|
||||
(struct hf__node){
|
||||
.type = HF__NODE_TYPE__DASH_COMMENT,
|
||||
.value =
|
||||
{
|
||||
.comment = strip_whitespaces(
|
||||
src, tokens[i].location.start,
|
||||
tokens[i].location.end),
|
||||
},
|
||||
});
|
||||
break;
|
||||
|
||||
case HF__TOKEN_TYPE__BACKSLASH_COMMENT:
|
||||
hf__parser__node_array_push(nodes, len, size,
|
||||
(struct hf__node){
|
||||
.type = HF__NODE_TYPE__DASH_COMMENT,
|
||||
.value =
|
||||
{
|
||||
.comment = strip_whitespaces(
|
||||
src, tokens[i].location.start,
|
||||
tokens[i].location.end),
|
||||
},
|
||||
});
|
||||
break;
|
||||
|
||||
case HF__TOKEN_TYPE__PAREN_COMMENT:
|
||||
hf__parser__node_array_push(nodes, len, size,
|
||||
(struct hf__node){
|
||||
.type = HF__NODE_TYPE__PAREN_COMMENT,
|
||||
.value =
|
||||
{
|
||||
.comment = strip_whitespaces(
|
||||
src, tokens[i].location.start,
|
||||
tokens[i].location.end),
|
||||
},
|
||||
});
|
||||
break;
|
||||
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return HF__OK;
|
||||
}
|
|
@ -1,61 +1,26 @@
|
|||
#include <malloc.h>
|
||||
#include "ansi_lib.h"
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
|
||||
#include "hydroforth/hydroforth.h"
|
||||
|
||||
void hydroforth__result__add_backtrace(HYDROFORTH__RESULT *const result, const char *const s)
|
||||
{
|
||||
result->backtrace = realloc(result->backtrace, sizeof(char *) * (result->backtrace_len + 1));
|
||||
result->backtrace[result->backtrace_len++] = s;
|
||||
}
|
||||
void hf__print_error(struct hf__error_wrapper *error) {
|
||||
SET_8_VALUE_COLOUR(TXT_RED);
|
||||
printf("Error: %s", HF__ERROR_STR[error->error]);
|
||||
|
||||
void hydroforth__result__set(HYDROFORTH__RESULT *const result, HYDROFORTH__ERROR error, const char *const s)
|
||||
{
|
||||
result->error = error;
|
||||
hydroforth__result__add_backtrace(result, s);
|
||||
}
|
||||
|
||||
const char *hydroforth__result__get_error_message(HYDROFORTH__ERROR error)
|
||||
{
|
||||
switch (error)
|
||||
{
|
||||
case OK:
|
||||
return "OK";
|
||||
|
||||
case ERR_UNKNOWN:
|
||||
return "Unknown error";
|
||||
|
||||
case ERR_INVALID_HEX_CHAR:
|
||||
return "Invalid hexadecimal char in number";
|
||||
|
||||
case ERR_UNKNOWN_SINGLE_CHAR_WORD:
|
||||
case ERR_UNKNOWN_WORD:
|
||||
return "Unknown word";
|
||||
|
||||
case ERR_UNTERMINATED_WORD_DEFINITION:
|
||||
return "Unterminated word definition";
|
||||
|
||||
case ERR_WORD_NAME_CANT_BE_NUMBER:
|
||||
return "Word name can't be a number";
|
||||
|
||||
case ERR_WORD_DEF_INSIDE_WORD_DEF:
|
||||
return "Can't define a word inside a word definition";
|
||||
|
||||
default:
|
||||
return "???";
|
||||
if (error->msg) {
|
||||
printf(": ");
|
||||
SET_8_VALUE_COLOUR(TXT_GREEN);
|
||||
printf("%s", error->msg);
|
||||
SET_8_VALUE_COLOUR(TXT_RED);
|
||||
if (error->msg_is_freeable) {
|
||||
free(error->msg);
|
||||
}
|
||||
}
|
||||
}
|
||||
putchar(' ');
|
||||
|
||||
int hydroforth__result__unwrap(HYDROFORTH__RESULT *const result, int code)
|
||||
{
|
||||
printf("BACKTRACE LEN: %u\n", result->backtrace_len);
|
||||
puts("ERROR! Backtrace:");
|
||||
printf("Error: %u -> \"%s\"\n", result->error, hydroforth__result__get_error_message(result->error));
|
||||
for (unsigned short i = 0; i < result->backtrace_len; i++)
|
||||
{
|
||||
printf("%u: %s\n", result->backtrace_len - i - 1, result->backtrace[i]);
|
||||
}
|
||||
printf("EXITING with code: %u!\n", code);
|
||||
free(result->backtrace);
|
||||
SET_8_VALUE_COLOUR(TXT_YELLOW);
|
||||
printf("[0x%x]", error->error);
|
||||
|
||||
return code;
|
||||
}
|
||||
SET_8_VALUE_COLOUR(TXT_DEFAULT);
|
||||
}
|
405
src/main.c
405
src/main.c
|
@ -1,92 +1,345 @@
|
|||
#include <stdio.h>
|
||||
#include "ansi_lib.h"
|
||||
#include <argp.h>
|
||||
#include <fcntl.h>
|
||||
#include <readline/history.h>
|
||||
#include <readline/readline.h>
|
||||
#include <stdbool.h>
|
||||
#include <malloc.h>
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <unistd.h>
|
||||
|
||||
#include "hydroforth/hydroforth.h"
|
||||
|
||||
struct ReadSrcResult
|
||||
{
|
||||
bool success;
|
||||
char *const src;
|
||||
const char *argp_program_version = "hydroforth " HF__VERSION;
|
||||
const char *argp_program_bug_address = "<dominic@dergrimm.net>";
|
||||
static char doc[] = "Hydroforth is a minimal Forth interpreter written in C";
|
||||
|
||||
static char args_doc[] = "[SRC]";
|
||||
|
||||
static struct argp_option options[] = {
|
||||
{"debug", 'd', 0, 0, "Prints debug information"},
|
||||
{"shell", 's', 0, 0, "Starts Forth interpreter shell"},
|
||||
{"output", 'o', "FILE", 0, "Output to FILE instead of standard output"},
|
||||
{0},
|
||||
};
|
||||
|
||||
struct ReadSrcResult read_src(FILE *fp)
|
||||
{
|
||||
if (fseek(fp, 0L, SEEK_END) != 0)
|
||||
{
|
||||
fputs("Error seeking to file end!\n", stderr);
|
||||
return (struct ReadSrcResult){
|
||||
.success = false,
|
||||
};
|
||||
}
|
||||
const long bufsize = ftell(fp);
|
||||
if (bufsize == -1)
|
||||
{
|
||||
fputs("Error getting file size!\n", stderr);
|
||||
return (struct ReadSrcResult){
|
||||
.success = false,
|
||||
};
|
||||
}
|
||||
char *const src = malloc(sizeof(char) * bufsize);
|
||||
if (fseek(fp, 0L, SEEK_SET) != 0)
|
||||
{
|
||||
fputs("Error rewinding file to start!\n", stderr);
|
||||
return (struct ReadSrcResult){
|
||||
.success = false,
|
||||
};
|
||||
}
|
||||
fread(src, sizeof(char), bufsize, fp);
|
||||
if (ferror(fp) != 0)
|
||||
{
|
||||
fputs("Error reading file!\n", stderr);
|
||||
return (struct ReadSrcResult){
|
||||
.success = false,
|
||||
};
|
||||
}
|
||||
struct arguments {
|
||||
char *args[1];
|
||||
bool debug;
|
||||
bool shell;
|
||||
char *output_file;
|
||||
};
|
||||
|
||||
return (struct ReadSrcResult){
|
||||
.success = true,
|
||||
.src = src,
|
||||
};
|
||||
static error_t parse_opt(int key, char *arg, struct argp_state *state) {
|
||||
struct arguments *arguments = state->input;
|
||||
|
||||
switch (key) {
|
||||
case 'd':
|
||||
arguments->debug = true;
|
||||
break;
|
||||
|
||||
case 's':
|
||||
arguments->shell = true;
|
||||
break;
|
||||
|
||||
case 'o':
|
||||
arguments->output_file = arg;
|
||||
break;
|
||||
|
||||
case ARGP_KEY_ARG:
|
||||
if (state->arg_num >= 1)
|
||||
/* Too many arguments. */
|
||||
argp_usage(state);
|
||||
|
||||
arguments->args[state->arg_num] = arg;
|
||||
|
||||
break;
|
||||
|
||||
case ARGP_KEY_END:
|
||||
// if (state->arg_num < 1)
|
||||
// /* Not enough arguments. */
|
||||
// argp_usage(state);
|
||||
break;
|
||||
|
||||
default:
|
||||
return ARGP_ERR_UNKNOWN;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
int main(int argc, char *argv[])
|
||||
{
|
||||
if (argc < 2)
|
||||
{
|
||||
fputs("No source file specified!\n", stderr);
|
||||
return 1;
|
||||
static struct argp argp = {options, parse_opt, args_doc, doc};
|
||||
|
||||
bool read_src(FILE *fp, char **src, size_t *const len) {
|
||||
if (fseek(fp, 0L, SEEK_END) != 0) {
|
||||
fputs("Error seeking to file end!\n", stderr);
|
||||
return false;
|
||||
}
|
||||
FILE *fp = fopen(argv[1], "r");
|
||||
if (fp == NULL)
|
||||
{
|
||||
fputs("Error opening file!\n", stderr);
|
||||
return 1;
|
||||
const size_t bufsize = ftell(fp);
|
||||
if (bufsize == -1) {
|
||||
fputs("Error getting file size!\n", stderr);
|
||||
return false;
|
||||
}
|
||||
const struct ReadSrcResult res = read_src(fp);
|
||||
fclose(fp);
|
||||
if (res.success)
|
||||
{
|
||||
HYDROFORTH__INTERPRETER interpreter = {
|
||||
.src = res.src,
|
||||
.pos = 0,
|
||||
.single_char_word_keys_len = 0,
|
||||
.word_keys_len = 0,
|
||||
.word_definitions_len = 0,
|
||||
};
|
||||
HYDROFORTH__RESULT result = {.error = OK, .backtrace_len = 0};
|
||||
hydroforth.run(&result, &interpreter);
|
||||
free(interpreter.src);
|
||||
if (result.error)
|
||||
{
|
||||
hydroforth__add_func_backtrace(&result);
|
||||
return hydroforth.result.unwrap(&result, 1);
|
||||
*len = bufsize != 0 ? bufsize - 1 : 0;
|
||||
*src = malloc(sizeof(char) * bufsize);
|
||||
if (fseek(fp, 0L, SEEK_SET) != 0) {
|
||||
fputs("Error rewinding file to start!\n", stderr);
|
||||
return false;
|
||||
}
|
||||
fread(*src, sizeof(char), bufsize, fp);
|
||||
if (ferror(fp) != 0) {
|
||||
fputs("Error reading file!\n", stderr);
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
struct hf__result shell(const struct arguments *const arguments) {
|
||||
SET_8_VALUE_COLOUR(TXT_GREEN);
|
||||
printf("hydroforth@%s shell\n\n", HF__VERSION);
|
||||
SET_8_VALUE_COLOUR(TXT_DEFAULT);
|
||||
|
||||
struct hf__parser parser = {
|
||||
.keyword_map = NULL,
|
||||
.keyword_map_is_init = false,
|
||||
};
|
||||
|
||||
struct hf__interpreter interpreter = {
|
||||
.call_stack = malloc(sizeof(struct hf__node) * 10),
|
||||
.call_stack_len = 0,
|
||||
.call_stack_size = 10,
|
||||
|
||||
.words =
|
||||
{
|
||||
.arr = calloc(HF__INTERPRETER__WORDS_CAP,
|
||||
sizeof(struct hf__hashmap__node *)),
|
||||
.cap = HF__INTERPRETER__WORDS_CAP,
|
||||
},
|
||||
|
||||
.stack = malloc(sizeof(long) * 10),
|
||||
.stack_len = 0,
|
||||
.stack_size = 10,
|
||||
|
||||
.is_running = true,
|
||||
};
|
||||
|
||||
using_history();
|
||||
|
||||
char *input;
|
||||
while ((input = readline("hydroforth> ")) != NULL) {
|
||||
if (*input) {
|
||||
add_history(input);
|
||||
}
|
||||
|
||||
size_t tokens_len = 0;
|
||||
size_t tokens_size = 0;
|
||||
struct hf__token *tokens = malloc(sizeof(struct hf__token) * tokens_size);
|
||||
hf__lex(input, strlen(input), &tokens, &tokens_len, &tokens_size);
|
||||
|
||||
if (tokens_len == 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
size_t nodes_len = 0;
|
||||
size_t nodes_size = 0;
|
||||
struct hf__node *nodes = NULL;
|
||||
struct hf__result parse_res = hf__parse(&parser, input, tokens, tokens_len,
|
||||
&nodes, &nodes_len, &nodes_size);
|
||||
free(tokens);
|
||||
free(input);
|
||||
if (!parse_res.ok) {
|
||||
hf__print_error(&parse_res.error);
|
||||
putchar('\n');
|
||||
continue;
|
||||
}
|
||||
|
||||
for (size_t i = nodes_len - 1; i != 0 - 1; i--) {
|
||||
hf__parser__node_array_push(&interpreter.call_stack,
|
||||
&interpreter.call_stack_len,
|
||||
&interpreter.call_stack_size, nodes[i]);
|
||||
}
|
||||
free(nodes);
|
||||
|
||||
SET_8_VALUE_COLOUR(TXT_GREEN);
|
||||
printf("=> ");
|
||||
SET_8_VALUE_COLOUR(TXT_DEFAULT);
|
||||
while (interpreter.call_stack_len != 0 && interpreter.is_running) {
|
||||
struct hf__result res = hf__interpreter__run(&interpreter);
|
||||
if (!res.ok) {
|
||||
hf__print_error(&res.error);
|
||||
putchar('\n');
|
||||
}
|
||||
}
|
||||
putchar('\n');
|
||||
|
||||
if (interpreter.stack_len != 0) {
|
||||
printf("stack:");
|
||||
SET_8_VALUE_COLOUR(TXT_YELLOW);
|
||||
for (size_t i = 0; i < interpreter.stack_len; i++) {
|
||||
printf(" %li", interpreter.stack[i]);
|
||||
}
|
||||
SET_8_VALUE_COLOUR(TXT_DEFAULT);
|
||||
putchar('\n');
|
||||
}
|
||||
|
||||
if (!interpreter.is_running) {
|
||||
SET_GRAPHIC_MODE(DIM_MODE);
|
||||
printf("\nexiting...\n");
|
||||
RESET_GRAPHICS_MODES;
|
||||
break;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
free(res.src);
|
||||
return 1;
|
||||
|
||||
// free(interpreter.call_stack);
|
||||
// free(interpreter.stack);
|
||||
hf__interpreter__free(&interpreter);
|
||||
|
||||
return HF__OK;
|
||||
}
|
||||
|
||||
int main(int argc, char *argv[]) {
|
||||
struct arguments arguments;
|
||||
|
||||
arguments.args[0] = NULL;
|
||||
arguments.debug = false;
|
||||
arguments.shell = false;
|
||||
arguments.output_file = NULL;
|
||||
|
||||
argp_parse(&argp, argc, argv, 0, 0, &arguments);
|
||||
|
||||
int fd;
|
||||
if (arguments.output_file) {
|
||||
fd = open(arguments.output_file, O_WRONLY | O_CREAT, 0644);
|
||||
printf("fd = %i\n", fd);
|
||||
if (fd == -1) {
|
||||
perror("open failed");
|
||||
return 1;
|
||||
}
|
||||
if (dup2(fd, 1) == -1) {
|
||||
perror("dup2 failed");
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
if (arguments.shell) {
|
||||
struct hf__result res = shell(&arguments);
|
||||
|
||||
if (arguments.output_file) {
|
||||
close(fd);
|
||||
}
|
||||
|
||||
if (!res.ok) {
|
||||
hf__print_error(&res.error);
|
||||
putchar('\n');
|
||||
return 1;
|
||||
}
|
||||
} else {
|
||||
FILE *fp = fopen(arguments.args[0], "r");
|
||||
if (fp == NULL) {
|
||||
fputs("Error opening file!\n", stderr);
|
||||
return 1;
|
||||
}
|
||||
char *src;
|
||||
size_t src_len;
|
||||
const bool read_res = read_src(fp, &src, &src_len);
|
||||
fclose(fp);
|
||||
if (!read_res) {
|
||||
fputs("Error reading file!\n", stderr);
|
||||
return 1;
|
||||
}
|
||||
|
||||
struct hf__parser parser = {
|
||||
.keyword_map = NULL,
|
||||
.keyword_map_is_init = false,
|
||||
};
|
||||
|
||||
struct hf__interpreter interpreter = {
|
||||
.call_stack = malloc(sizeof(struct hf__node) * 10),
|
||||
.call_stack_len = 0,
|
||||
.call_stack_size = 10,
|
||||
|
||||
.words =
|
||||
{
|
||||
.arr = calloc(HF__INTERPRETER__WORDS_CAP,
|
||||
sizeof(struct hf__hashmap__node *)),
|
||||
.cap = HF__INTERPRETER__WORDS_CAP,
|
||||
},
|
||||
|
||||
.stack = malloc(sizeof(long) * 10),
|
||||
.stack_len = 0,
|
||||
.stack_size = 10,
|
||||
|
||||
.is_running = true,
|
||||
};
|
||||
|
||||
size_t tokens_len = 0;
|
||||
size_t tokens_size = 0;
|
||||
struct hf__token *tokens = malloc(sizeof(struct hf__token) * tokens_size);
|
||||
hf__lex(src, src_len, &tokens, &tokens_len, &tokens_size);
|
||||
|
||||
size_t nodes_len = 0;
|
||||
size_t nodes_size = 0;
|
||||
struct hf__node *nodes = NULL;
|
||||
struct hf__result parse_res = hf__parse(&parser, src, tokens, tokens_len,
|
||||
&nodes, &nodes_len, &nodes_size);
|
||||
if (!parse_res.ok) {
|
||||
hf__print_error(&parse_res.error);
|
||||
putchar('\n');
|
||||
return 1;
|
||||
}
|
||||
free(tokens);
|
||||
free(src);
|
||||
|
||||
for (size_t i = nodes_len - 1; i != 0 - 1; i--) {
|
||||
hf__parser__node_array_push(&interpreter.call_stack,
|
||||
&interpreter.call_stack_len,
|
||||
&interpreter.call_stack_size, nodes[i]);
|
||||
}
|
||||
free(nodes);
|
||||
|
||||
bool err = false;
|
||||
while (interpreter.call_stack_len != 0 && interpreter.is_running) {
|
||||
struct hf__result res = hf__interpreter__run(&interpreter);
|
||||
if (!res.ok) {
|
||||
hf__print_error(&res.error);
|
||||
putchar('\n');
|
||||
|
||||
if (arguments.debug) {
|
||||
SET_8_VALUE_COLOUR(TXT_RED);
|
||||
puts("\n=== DEBUG ===");
|
||||
|
||||
SET_8_VALUE_COLOUR(TXT_DEFAULT);
|
||||
printf("stack:");
|
||||
SET_8_VALUE_COLOUR(TXT_YELLOW);
|
||||
for (size_t i = 0; i < interpreter.stack_len; i++) {
|
||||
printf(" %li", interpreter.stack[i]);
|
||||
}
|
||||
SET_8_VALUE_COLOUR(TXT_DEFAULT);
|
||||
putchar('\n');
|
||||
}
|
||||
|
||||
err = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (parser.keyword_map_is_init) {
|
||||
hf__hashmap__free(&parser.keyword_map, NULL);
|
||||
}
|
||||
|
||||
// free(interpreter.call_stack);
|
||||
// free(interpreter.stack);
|
||||
hf__interpreter__free(&interpreter);
|
||||
|
||||
if (arguments.output_file) {
|
||||
close(fd);
|
||||
}
|
||||
|
||||
if (err) {
|
||||
return 1;
|
||||
} else if (!interpreter.is_running) {
|
||||
return interpreter.exit_code;
|
||||
}
|
||||
}
|
||||
|
||||
return 0;
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue