Upgrades upgrades upgrades
This commit is contained in:
parent
2def36a617
commit
366ac05225
23 changed files with 495 additions and 594 deletions
55
Cargo.lock
generated
55
Cargo.lock
generated
|
@ -33,9 +33,9 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
|||
|
||||
[[package]]
|
||||
name = "clap"
|
||||
version = "3.2.12"
|
||||
version = "3.2.16"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ab8b79fe3946ceb4a0b1c080b4018992b8d27e9ff363644c1c9b6387c854614d"
|
||||
checksum = "a3dbbb6653e7c55cc8595ad3e1f7be8f32aba4eb7ff7f0fd1163d4f3d137c0a9"
|
||||
dependencies = [
|
||||
"atty",
|
||||
"bitflags",
|
||||
|
@ -50,9 +50,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "clap_derive"
|
||||
version = "3.2.7"
|
||||
version = "3.2.15"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "759bf187376e1afa7b85b959e6a664a3e7a95203415dba952ad19139e798f902"
|
||||
checksum = "9ba52acd3b0a5c33aeada5cdaa3267cdc7c594a98731d4268cdc1532f4264cb4"
|
||||
dependencies = [
|
||||
"heck",
|
||||
"proc-macro-error",
|
||||
|
@ -70,12 +70,32 @@ dependencies = [
|
|||
"os_str_bytes",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "console"
|
||||
version = "0.15.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "89eab4d20ce20cea182308bca13088fecea9c05f6776cf287205d41a0ed3c847"
|
||||
dependencies = [
|
||||
"encode_unicode",
|
||||
"libc",
|
||||
"once_cell",
|
||||
"terminal_size",
|
||||
"unicode-width",
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "either"
|
||||
version = "1.7.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3f107b87b6afc2a64fd13cac55fe06d6c8859f12d4b14cbcdd2c67d0976781be"
|
||||
|
||||
[[package]]
|
||||
name = "encode_unicode"
|
||||
version = "0.3.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f"
|
||||
|
||||
[[package]]
|
||||
name = "getrandom"
|
||||
version = "0.2.7"
|
||||
|
@ -104,6 +124,7 @@ name = "hence"
|
|||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"clap",
|
||||
"console",
|
||||
"itertools",
|
||||
"num-parse",
|
||||
"radix_fmt",
|
||||
|
@ -111,16 +132,6 @@ dependencies = [
|
|||
"rhexdump",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hencelisp"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"clap",
|
||||
"hence",
|
||||
"itertools",
|
||||
"num-parse",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hermit-abi"
|
||||
version = "0.1.19"
|
||||
|
@ -368,6 +379,16 @@ dependencies = [
|
|||
"winapi-util",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "terminal_size"
|
||||
version = "0.1.17"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "633c1a546cee861a1a6d0dc69ebeca693bf4296661ba7852b9d21d159e0506df"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "textwrap"
|
||||
version = "0.15.0"
|
||||
|
@ -380,6 +401,12 @@ version = "1.0.2"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "15c61ba63f9235225a22310255a29b806b907c9b8c964bcbd0a2c70f3f2deea7"
|
||||
|
||||
[[package]]
|
||||
name = "unicode-width"
|
||||
version = "0.1.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3ed742d4ea2bd1176e236172c8429aaf54486e7ac098db29ffe6529e0ce50973"
|
||||
|
||||
[[package]]
|
||||
name = "version_check"
|
||||
version = "0.9.4"
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
[workspace]
|
||||
members = ["hence", "hencelisp"]
|
||||
members = ["hence"]
|
||||
|
|
33
README.md
33
README.md
|
@ -10,20 +10,19 @@
|
|||
|
||||
# Opcodes
|
||||
|
||||
| Index | Name | Description | Arguments |
|
||||
| ------ | ------ | ------------------------------------------ | --------- |
|
||||
| `0x00` | `nop` | No operation | |
|
||||
| `0x01` | `push` | Push to stack | |
|
||||
| `0x02` | `pop` | Pops top of stack | |
|
||||
| `0x03` | `ts` | Store value into `tmp` | |
|
||||
| `0x04` | `tsr` | Store register's value into `tmp` | |
|
||||
| `0x05` | `tss` | Stores top of stack into `tmp` | |
|
||||
| `0x06` | `tlr` | Load `tmp`'s value into register | |
|
||||
| `0x07` | `tls` | Push value of `tmp` to stack | |
|
||||
| `0x08` | `ld` | Loads top of stack into register | |
|
||||
| `0x09` | `st` | Loads registers's value onto top of stack | |
|
||||
| `0x0a` | `dbg` | Debug | |
|
||||
| `0x0b` | `alu` | Runs ALU | |
|
||||
| `0x0c` | `at` | Runs ALU with `tmp`'s value as operatorion | |
|
||||
| `0x0d` | `get` | Sets `tmp` to memory at address in `tmp` | |
|
||||
| `0x0e` | `set` | Sets memory to value at specific address | |
|
||||
| Index | Name | Description | Arguments |
|
||||
| ------ | ------ | ------------------------------------------------------------------ | --------- |
|
||||
| `0x00` | `nop` | No operation | |
|
||||
| `0x01` | `push` | Push to stack | |
|
||||
| `0x02` | `pop` | Pops top of stack | |
|
||||
| `0x03` | `ts` | Store value into `tmp` | |
|
||||
| `0x04` | `tsr` | Store register's value into `tmp` | |
|
||||
| `0x05` | `tss` | Stores top of stack into `tmp` | |
|
||||
| `0x06` | `tlr` | Load `tmp`'s value into register | |
|
||||
| `0x07` | `tlrc` | Same as `tlr` but only executes if register `a`'s first bit is `1` | |
|
||||
| `0x08` | `tls` | Push value of `tmp` to stack | |
|
||||
| `0x09` | `ld` | Loads top of stack into register | |
|
||||
| `0x0a` | `dbg` | Debug | |
|
||||
| `0x0b` | `alu` | Runs ALU with `tmp`'s value as operator | |
|
||||
| `0x0c` | `get` | Sets `tmp` to memory at address in `tmp` | |
|
||||
| `0x0d` | `set` | Sets memory to value at specific address | |
|
||||
|
|
147
examples/forth.asm
Normal file
147
examples/forth.asm
Normal file
|
@ -0,0 +1,147 @@
|
|||
@ hence core lib
|
||||
core:
|
||||
core_mem:
|
||||
.define CORE_MEM_PRG, (0 * 1024)
|
||||
.define CORE_MEM_ST, (32 * 1024)
|
||||
.define CORE_MEM_MEM, (40 * 1024)
|
||||
.define CORE_MEM_OUT, (56 * 1024)
|
||||
.define CORE_MEM_CHR, (56 * 1024 + 1)
|
||||
.define CORE_MEM_KEY, (56 * 1024 + 2)
|
||||
|
||||
core_reg:
|
||||
.define CORE_REG_PC, 0x0
|
||||
.define CORE_REG_OPC, 0x1
|
||||
.define CORE_REG_ARG, 0x2
|
||||
.define CORE_REG_S, 0x3
|
||||
.define CORE_REG_SP, 0x4
|
||||
.define CORE_REG_A, 0x5
|
||||
.define CORE_REG_B, 0x6
|
||||
.define CORE_REG_C, 0x7
|
||||
.define CORE_REG_D, 0x8
|
||||
|
||||
core_alu:
|
||||
.define CORE_ALU_NOT, 0x00
|
||||
.define CORE_ALU_AND, 0x01
|
||||
.define CORE_ALU_OR, 0x02
|
||||
.define CORE_ALU_XOR, 0x03
|
||||
.define CORE_ALU_LSH, 0x04
|
||||
.define CORE_ALU_RSH, 0x05
|
||||
.define CORE_ALU_ADD, 0x06
|
||||
.define CORE_ALU_SUB, 0x07
|
||||
.define CORE_ALU_MUL, 0x08
|
||||
.define CORE_ALU_DIV, 0x09
|
||||
.define CORE_ALU_CMP, 0x0a
|
||||
.define CORE_ALU_EQ, 0x0b
|
||||
.define CORE_ALU_LT, 0x0c
|
||||
.define CORE_ALU_GT, 0x0d
|
||||
.define CORE_ALU_LEQ, 0x0e
|
||||
.define CORE_ALU_GEQ, 0x0f
|
||||
.define CORE_ALU_BOL, 0x10
|
||||
.define CORE_ALU_INV, 0x11
|
||||
.define CORE_ALU_RND, 0x12
|
||||
|
||||
@ hence standard lib
|
||||
STD:
|
||||
.define STD_U8_MAX, 0xff
|
||||
.define STD_U16_MAX, 0xffff
|
||||
|
||||
.macro std_stop
|
||||
ts 0xffff
|
||||
tlr CORE_REG_PC
|
||||
.macroend
|
||||
|
||||
forth:
|
||||
.define FORTH_MEM_INPUT_SIZE, 16
|
||||
.define FORTH_MEM_INPUT_DYN_SIZE, CORE_MEM_MEM
|
||||
.define FORTH_MEM_INPUT_START, (FORTH_MEM_INPUT_DYN_SIZE + 1)
|
||||
.define FORTH_MEM_INPUT_END, (FORTH_MEM_INPUT_START + FORTH_MEM_INPUT_SIZE)
|
||||
|
||||
.define jump_main, (CORE_MEM_ST - 3 - 1)
|
||||
|
||||
ts jump_main
|
||||
tlr CORE_REG_PC
|
||||
|
||||
end_input:
|
||||
dbg
|
||||
|
||||
main:
|
||||
@ loop body
|
||||
loop:
|
||||
@ read key from stdin
|
||||
ts CORE_MEM_KEY
|
||||
get
|
||||
|
||||
@ check if key is newline (0x0a)
|
||||
tlr CORE_REG_D @ store in register D because register A is used later on
|
||||
tlr CORE_REG_A @ store in register A as input for ALU
|
||||
ts "\n" @ store newline in TMP
|
||||
tlr CORE_REG_B @ store newline in register B as input for ALU
|
||||
ts CORE_ALU_EQ @ ALU equal operation
|
||||
alu @ run ALU
|
||||
|
||||
@ go back to loop start if pressed key is newline
|
||||
tlr CORE_REG_A @ store result of ALU operation in TMP
|
||||
@ ts loop @ load memory address of loop start into TMP
|
||||
ts 0
|
||||
tls
|
||||
ts jump_switch
|
||||
tls
|
||||
ts end_input
|
||||
tlrc CORE_REG_PC @ set register PC to loop start address if result is true
|
||||
pop
|
||||
|
||||
tlr CORE_REG_A
|
||||
ts 0
|
||||
tlrc CORE_REG_C
|
||||
|
||||
@ print out char
|
||||
tsr CORE_REG_D @ get char
|
||||
tlr CORE_REG_A @ load char into register A
|
||||
ts CORE_MEM_CHR @ set TMP to char print memory address
|
||||
set @ print char
|
||||
|
||||
@ increment counter by one
|
||||
tsr CORE_REG_C
|
||||
tlr CORE_REG_A
|
||||
ts 1
|
||||
tlr CORE_REG_B
|
||||
ts CORE_ALU_ADD
|
||||
alu
|
||||
tlr CORE_REG_C
|
||||
tlr CORE_REG_A
|
||||
|
||||
ts FORTH_MEM_INPUT_SIZE
|
||||
tlr CORE_REG_B
|
||||
ts CORE_ALU_LT
|
||||
alu
|
||||
tlr CORE_REG_A
|
||||
ts loop
|
||||
tlrc CORE_REG_PC
|
||||
|
||||
ts "\n"
|
||||
tlr CORE_REG_A
|
||||
ts CORE_MEM_CHR
|
||||
set
|
||||
|
||||
tsr CORE_REG_C
|
||||
tls
|
||||
dbg
|
||||
pop
|
||||
|
||||
ts 0
|
||||
tlr CORE_REG_C
|
||||
ts loop
|
||||
tlr CORE_REG_PC
|
||||
|
||||
@ .std_stop
|
||||
|
||||
@ set PC to maximum for u16 and therefore stops program execution
|
||||
ts 0xffff @ load 0xffff into TMP
|
||||
tlr CORE_REG_PC @ store value of TMP into register PC
|
||||
|
||||
jump_switch:
|
||||
dbg
|
||||
|
||||
.org jump_main
|
||||
ts main
|
||||
tlr CORE_REG_PC
|
BIN
examples/forth.bin
Normal file
BIN
examples/forth.bin
Normal file
Binary file not shown.
|
@ -1,22 +0,0 @@
|
|||
main:
|
||||
push 40
|
||||
push (42 - 40)
|
||||
|
||||
tss
|
||||
tlr 0x5
|
||||
pop
|
||||
tss
|
||||
tlr 0x6
|
||||
pop
|
||||
|
||||
alu 0x06
|
||||
tls
|
||||
|
||||
alu 0x12
|
||||
tlr 0x5
|
||||
tls
|
||||
|
||||
dbg
|
||||
|
||||
ts 0xffff
|
||||
tlr 0x0
|
|
@ -1,5 +0,0 @@
|
|||
;; main function
|
||||
(defun main ()
|
||||
"Program entrypoint"
|
||||
(let ((x (+ 40 2))))
|
||||
(debug x))
|
|
@ -16,7 +16,8 @@ path = "src/bin/main.rs"
|
|||
[dependencies]
|
||||
itertools = "0.10.2"
|
||||
num-parse = "0.1.2"
|
||||
clap = { version = "3.2.12", features = ["derive"] }
|
||||
clap = { version = "3.2.16", features = ["derive"] }
|
||||
rhexdump = "0.1.1"
|
||||
radix_fmt = "1"
|
||||
rand = "0.8.5"
|
||||
console = "0.15.1"
|
||||
|
|
|
@ -60,11 +60,14 @@ fn main() {
|
|||
let tokens = lexer::lex(assembly).unwrap();
|
||||
let ast = parser::parse(tokens).unwrap();
|
||||
|
||||
let mut data = assembler::Data::new(match Path::new(&src).parent().unwrap().to_str() {
|
||||
Some(x) => x.to_string(),
|
||||
_ => panic!("Could not get directory in which source code resides"),
|
||||
});
|
||||
assembler::assemble(ast, &mut data).unwrap();
|
||||
let mut data = assembler::Data::new(
|
||||
match Path::new(&src).parent().unwrap().to_str() {
|
||||
Some(x) => x.to_string(),
|
||||
_ => panic!("Could not get directory in which source code resides"),
|
||||
},
|
||||
ast.body,
|
||||
);
|
||||
assembler::assemble(&mut data).unwrap();
|
||||
|
||||
match bin {
|
||||
Some(x) => {
|
||||
|
|
|
@ -36,10 +36,12 @@ impl assembler::ByteResolvable<assembler::Data> for Arg {
|
|||
fn resolve_number(&self, data: &mut assembler::Data) -> Result<u16, String> {
|
||||
match self {
|
||||
Arg::String(x) => {
|
||||
if x.len() == 1 {
|
||||
Ok(x.as_bytes()[0] as u16)
|
||||
let y = x.replace("\\n", "\n");
|
||||
|
||||
if y.len() == 1 {
|
||||
Ok(y.as_bytes()[0] as u16)
|
||||
} else {
|
||||
let bytes = x.as_bytes();
|
||||
let bytes = y.as_bytes();
|
||||
|
||||
Ok(((bytes[0] as u16) << 8) | bytes[1] as u16)
|
||||
}
|
||||
|
@ -47,7 +49,7 @@ impl assembler::ByteResolvable<assembler::Data> for Arg {
|
|||
Arg::Number(x) => Ok(*x),
|
||||
Arg::Variable(x) => {
|
||||
let mut name = x.clone();
|
||||
let mut arg: Option<arg::Arg> = None;
|
||||
let mut arg: Option<arg::Arg>;
|
||||
|
||||
loop {
|
||||
arg = match data.contants.get(&name) {
|
||||
|
@ -57,7 +59,7 @@ impl assembler::ByteResolvable<assembler::Data> for Arg {
|
|||
|
||||
match arg {
|
||||
Some(a) => {
|
||||
arg = Some(a.clone());
|
||||
// arg = Some(a.clone());
|
||||
match a {
|
||||
arg::Arg::Variable(n) => {
|
||||
name = n;
|
||||
|
@ -86,11 +88,11 @@ impl assembler::ByteResolvable<assembler::Data> for Arg {
|
|||
|
||||
fn resolve_bytes(&self, data: &mut assembler::Data) -> Result<Vec<u8>, String> {
|
||||
match self {
|
||||
Arg::String(x) => Ok(x.bytes().collect()),
|
||||
Arg::String(x) => Ok(x.replace("\\n", "\n").bytes().collect()),
|
||||
Arg::Number(x) => Ok(vec![(*x >> 8) as u8, *x as u8]),
|
||||
Arg::Variable(x) => {
|
||||
let mut name = x.clone();
|
||||
let mut arg: Option<arg::Arg> = None;
|
||||
let mut arg: Option<arg::Arg>;
|
||||
|
||||
loop {
|
||||
dbg!(&name);
|
||||
|
@ -102,7 +104,7 @@ impl assembler::ByteResolvable<assembler::Data> for Arg {
|
|||
|
||||
match arg {
|
||||
Some(a) => {
|
||||
arg = Some(a.clone());
|
||||
// arg = Some(a.clone());
|
||||
match a {
|
||||
arg::Arg::Variable(n) => {
|
||||
name = n;
|
||||
|
@ -148,22 +150,20 @@ impl assembler::ToCode for BinaryExpressionOperator {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn parse_binary_operation(
|
||||
token: &lexer::Token,
|
||||
) -> Result<BinaryExpressionOperator, &'static str> {
|
||||
pub fn parse_binary_operation(token: &lexer::Token) -> Result<BinaryExpressionOperator, String> {
|
||||
match token {
|
||||
lexer::Token::Add => Ok(BinaryExpressionOperator::Add),
|
||||
lexer::Token::Sub => Ok(BinaryExpressionOperator::Sub),
|
||||
lexer::Token::Mul => Ok(BinaryExpressionOperator::Mul),
|
||||
lexer::Token::Div => Ok(BinaryExpressionOperator::Div),
|
||||
lexer::Token::Pow => Ok(BinaryExpressionOperator::Pow),
|
||||
_ => Err("Invalid binary expression operator"),
|
||||
_ => Err("Invalid binary expression operator".to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse_binary_expression_arg(tokens: &mut Vec<&&lexer::Token>) -> Result<Arg, &'static str> {
|
||||
pub fn parse_binary_expression_arg(tokens: &mut Vec<&&lexer::Token>) -> Result<Arg, String> {
|
||||
if tokens.is_empty() {
|
||||
return Err("Malformed binary expression");
|
||||
return Err("Malformed binary expression".to_string());
|
||||
}
|
||||
|
||||
let mut args: Vec<&&lexer::Token> = tokens.drain(..3).collect();
|
||||
|
@ -202,6 +202,7 @@ pub fn parse_args(tokens: Vec<&lexer::Token>) -> Result<Vec<Arg>, &str> {
|
|||
}
|
||||
|
||||
match token {
|
||||
lexer::Token::Comment(_) => {}
|
||||
lexer::Token::StringLiteral(x) => {
|
||||
args.push(Arg::String(x.clone()));
|
||||
}
|
||||
|
|
|
@ -15,163 +15,255 @@ pub trait ByteResolvable<T> {
|
|||
fn resolve_bytes(&self, data: &mut T) -> Result<Vec<u8>, String>;
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Macro {
|
||||
pub args: Vec<String>,
|
||||
pub body: Vec<parser::ast::Node>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum State {
|
||||
Default,
|
||||
Macro { name: String, depth: usize },
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Data {
|
||||
pub dir: String,
|
||||
pub body: Vec<parser::ast::Node>,
|
||||
pub program: [u8; 32 * 1024],
|
||||
pub offset: u16,
|
||||
pub contants: HashMap<String, arg::Arg>,
|
||||
pub macros: HashMap<String, Macro>,
|
||||
pub state: State,
|
||||
}
|
||||
|
||||
impl Data {
|
||||
pub fn new(dir: String) -> Self {
|
||||
pub fn new(dir: String, body: Vec<parser::ast::Node>) -> Self {
|
||||
Self {
|
||||
dir,
|
||||
body,
|
||||
program: [0; 32 * 1024],
|
||||
offset: 0,
|
||||
contants: HashMap::new(),
|
||||
macros: HashMap::new(),
|
||||
state: State::Default,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn assemble(ast: parser::ast::AST, data: &mut Data) -> Result<(), String> {
|
||||
for node in ast.body {
|
||||
pub fn assemble(data: &mut Data) -> Result<(), String> {
|
||||
for node in data.body.clone() {
|
||||
data.contants
|
||||
.insert("OFFSET".to_string(), arg::Arg::Number(data.offset));
|
||||
|
||||
match node {
|
||||
parser::ast::Node::Comment(_) => {}
|
||||
parser::ast::Node::Label(x) => {
|
||||
if data.contants.contains_key(&x) {
|
||||
return Err(format!("Label already exists: '{x}'"));
|
||||
}
|
||||
|
||||
data.contants.insert(x, arg::Arg::Number(data.offset));
|
||||
}
|
||||
parser::ast::Node::Call { name, arg } => {
|
||||
let arg_num = match arg {
|
||||
Some(x) => x.resolve_number(data).unwrap(),
|
||||
_ => 0,
|
||||
};
|
||||
|
||||
data.program[data.offset as usize] = match name.as_str() {
|
||||
"nop" => 0x00,
|
||||
"push" => 0x01,
|
||||
"pop" => 0x02,
|
||||
"ts" => 0x03,
|
||||
"tsr" => 0x04,
|
||||
"tss" => 0x05,
|
||||
"tlr" => 0x06,
|
||||
"tls" => 0x07,
|
||||
"ld" => 0x08,
|
||||
"st" => 0x09,
|
||||
"dbg" => 0x0a,
|
||||
"alu" => 0x0b,
|
||||
"at" => 0x0c,
|
||||
"get" => 0x0d,
|
||||
"set" => 0x0e,
|
||||
_ => return Err(format!("Unknown opcode: '{name}'")),
|
||||
};
|
||||
if arg_num == 0 {
|
||||
data.program[data.offset as usize] |= 0b10000000;
|
||||
}
|
||||
data.offset += 1;
|
||||
|
||||
if arg_num != 0 {
|
||||
data.program[data.offset as usize] = (arg_num >> 8) as u8;
|
||||
data.offset += 1;
|
||||
|
||||
data.program[data.offset as usize] = arg_num as u8;
|
||||
data.offset += 1;
|
||||
}
|
||||
}
|
||||
parser::ast::Node::MacroCall { name, args } => {
|
||||
match name.as_str() {
|
||||
"debug" => {
|
||||
for arg in args {
|
||||
let bytes = arg.resolve_bytes(data).unwrap();
|
||||
|
||||
println!("{}", arg.to_code().replace('\n', "\\n"));
|
||||
println!(" => {}", arg.resolve_number(data).unwrap());
|
||||
println!(
|
||||
" => [{}]",
|
||||
bytes
|
||||
.iter()
|
||||
.map(|n| {
|
||||
let num = radix(*n, 16).to_string();
|
||||
format!(
|
||||
"0x{}{}",
|
||||
"00".chars().take(2 - num.len()).collect::<String>(),
|
||||
num
|
||||
)
|
||||
})
|
||||
.join(", ")
|
||||
);
|
||||
println!(
|
||||
" => \"{}\"",
|
||||
String::from_utf8(bytes).unwrap().replace('\n', "\\n")
|
||||
);
|
||||
match &data.state {
|
||||
State::Default => {
|
||||
match node {
|
||||
parser::ast::Node::Comment(_) => {}
|
||||
parser::ast::Node::Label(x) => {
|
||||
if data.contants.contains_key(&x) {
|
||||
return Err(format!("Label already exists: '{x}'"));
|
||||
}
|
||||
println!("==========");
|
||||
|
||||
data.contants
|
||||
.insert(x.to_string(), arg::Arg::Number(data.offset));
|
||||
}
|
||||
"define" => {
|
||||
let name = match &args[0] {
|
||||
arg::Arg::Variable(x) | arg::Arg::String(x) => x,
|
||||
_ => {
|
||||
return Err(
|
||||
"First argument of define macro needs to be a literal-like"
|
||||
.to_string(),
|
||||
)
|
||||
}
|
||||
parser::ast::Node::Call { name, arg } => {
|
||||
let arg_num = match arg {
|
||||
Some(x) => x.resolve_number(data).unwrap(),
|
||||
_ => 0,
|
||||
};
|
||||
|
||||
if data.contants.contains_key(name) {
|
||||
return Err(format!("Constant already exists: '{name}'"));
|
||||
}
|
||||
data.contants.insert(name.to_string(), (&args[1]).clone());
|
||||
}
|
||||
"define_override" => {
|
||||
let name = match &args[0] {
|
||||
arg::Arg::Variable(x) | arg::Arg::String(x) => x,
|
||||
_ => {
|
||||
return Err(
|
||||
"First argument of define macro needs to be a literal-like"
|
||||
.to_string(),
|
||||
)
|
||||
}
|
||||
data.program[data.offset as usize] = match name.as_str() {
|
||||
"nop" => 0x00,
|
||||
"push" => 0x01,
|
||||
"pop" => 0x02,
|
||||
"ts" => 0x03,
|
||||
"tsr" => 0x04,
|
||||
"tss" => 0x05,
|
||||
"tlr" => 0x06,
|
||||
"tlrc" => 0x07,
|
||||
"tls" => 0x08,
|
||||
"ld" => 0x09,
|
||||
"dbg" => 0x0a,
|
||||
"alu" => 0x0b,
|
||||
"get" => 0x0c,
|
||||
"set" => 0x0d,
|
||||
_ => return Err(format!("Unknown opcode: '{name}'")),
|
||||
};
|
||||
|
||||
data.contants.insert(name.to_string(), (&args[1]).clone());
|
||||
}
|
||||
"org" => {
|
||||
data.offset = args[0].resolve_number(data).unwrap();
|
||||
}
|
||||
"org_add" => {
|
||||
data.offset += args[0].resolve_number(data).unwrap();
|
||||
}
|
||||
"org_sub" => {
|
||||
data.offset -= args[0].resolve_number(data).unwrap();
|
||||
}
|
||||
"bytes" => {
|
||||
for arg in args {
|
||||
for n in arg.resolve_bytes(data).unwrap() {
|
||||
data.program[data.offset as usize] = n;
|
||||
data.offset += 1;
|
||||
}
|
||||
if arg_num == 0 {
|
||||
data.program[data.offset as usize] |= 0b10000000;
|
||||
}
|
||||
}
|
||||
"bw" => {
|
||||
let string_arg = args[0].resolve_bytes(data).unwrap();
|
||||
let string = String::from_utf8(string_arg).unwrap().replace("\\n", "\n");
|
||||
data.offset += 1;
|
||||
|
||||
for n in string.bytes() {
|
||||
data.program[data.offset as usize] = n;
|
||||
if arg_num != 0 {
|
||||
data.program[data.offset as usize] = (arg_num >> 8) as u8;
|
||||
data.offset += 1;
|
||||
|
||||
data.program[data.offset as usize] = arg_num as u8;
|
||||
data.offset += 1;
|
||||
}
|
||||
}
|
||||
_ => return Err(format!("Unknown macro: '{name}'")),
|
||||
};
|
||||
parser::ast::Node::MacroCall { name, args } => {
|
||||
match name.as_str() {
|
||||
"debug" => {
|
||||
for arg in args {
|
||||
let bytes = arg.resolve_bytes(data).unwrap();
|
||||
|
||||
println!("{}", arg.to_code().replace('\n', "\\n"));
|
||||
println!(" => {}", arg.resolve_number(data).unwrap());
|
||||
println!(
|
||||
" => [{}]",
|
||||
bytes
|
||||
.iter()
|
||||
.map(|n| {
|
||||
let num = radix(*n, 16).to_string();
|
||||
format!(
|
||||
"0x{}{}",
|
||||
"00".chars()
|
||||
.take(2 - num.len())
|
||||
.collect::<String>(),
|
||||
num
|
||||
)
|
||||
})
|
||||
.join(", ")
|
||||
);
|
||||
println!(
|
||||
" => \"{}\"",
|
||||
String::from_utf8(bytes).unwrap().replace('\n', "\\n")
|
||||
);
|
||||
}
|
||||
println!("==========");
|
||||
}
|
||||
// "define" => {
|
||||
// let name = match &args[0] {
|
||||
// arg::Arg::Variable(x) | arg::Arg::String(x) => x,
|
||||
// _ => return Err(
|
||||
// "First argument of define macro needs to be a literal-like"
|
||||
// .to_string(),
|
||||
// ),
|
||||
// };
|
||||
|
||||
// if data.contants.contains_key(name) {
|
||||
// return Err(format!("Constant already exists: '{name}'"));
|
||||
// }
|
||||
// data.contants.insert(name.to_string(), (&args[1]).clone());
|
||||
// }
|
||||
"define" => {
|
||||
let name = match &args[0] {
|
||||
arg::Arg::Variable(x) | arg::Arg::String(x) => x,
|
||||
_ => return Err(
|
||||
"First argument of define macro needs to be a literal-like"
|
||||
.to_string(),
|
||||
),
|
||||
};
|
||||
|
||||
data.contants.insert(name.to_string(), (&args[1]).clone());
|
||||
}
|
||||
"macro" => {
|
||||
let name = match &args[0] {
|
||||
arg::Arg::Variable(x) | arg::Arg::String(x) => x,
|
||||
_ => return Err(
|
||||
"First argument of define macro needs to be a literal-like"
|
||||
.to_string(),
|
||||
),
|
||||
};
|
||||
let args = match (&args[1..])
|
||||
.into_iter()
|
||||
.map(|a| match a {
|
||||
arg::Arg::Variable(x) => Ok(x.clone()),
|
||||
__ => {
|
||||
return Err(
|
||||
"Macro arguments need to be variables".to_string()
|
||||
)
|
||||
}
|
||||
})
|
||||
.collect::<Result<Vec<_>, _>>()
|
||||
{
|
||||
Ok(x) => x,
|
||||
Err(x) => return Err(x),
|
||||
};
|
||||
|
||||
data.macros.insert(
|
||||
name.clone(),
|
||||
Macro {
|
||||
args,
|
||||
body: Vec::new(),
|
||||
},
|
||||
);
|
||||
data.state = State::Macro {
|
||||
name: name.clone(),
|
||||
depth: 1,
|
||||
};
|
||||
}
|
||||
"macroend" => return Err("Unexpected macro end".to_string()),
|
||||
"org" => {
|
||||
data.offset = args[0].resolve_number(data).unwrap();
|
||||
}
|
||||
"org_add" => {
|
||||
data.offset += args[0].resolve_number(data).unwrap();
|
||||
}
|
||||
"org_sub" => {
|
||||
data.offset -= args[0].resolve_number(data).unwrap();
|
||||
}
|
||||
"bytes" => {
|
||||
for arg in args {
|
||||
for n in arg.resolve_bytes(data).unwrap() {
|
||||
data.program[data.offset as usize] = n;
|
||||
data.offset += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
"bw" => {
|
||||
let string_arg = args[0].resolve_bytes(data).unwrap();
|
||||
let string =
|
||||
String::from_utf8(string_arg).unwrap().replace("\\n", "\n");
|
||||
|
||||
for n in string.bytes() {
|
||||
data.program[data.offset as usize] = n;
|
||||
data.offset += 1;
|
||||
}
|
||||
}
|
||||
_ => match data.macros.get(&name) {
|
||||
Some(m) => {
|
||||
dbg!(name, m);
|
||||
}
|
||||
None => return Err(format!("Unknown macro: '{name}'")),
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
State::Macro { name, depth } => match &node {
|
||||
parser::ast::Node::MacroCall {
|
||||
name: node_name,
|
||||
args: _,
|
||||
} => match node_name.as_str() {
|
||||
"macro" => {
|
||||
data.state = State::Macro {
|
||||
name: name.clone(),
|
||||
depth: depth + 1,
|
||||
};
|
||||
}
|
||||
"macroend" => {
|
||||
if *depth - 1 == 0 {
|
||||
data.state = State::Default;
|
||||
} else {
|
||||
data.state = State::Macro {
|
||||
name: name.clone(),
|
||||
depth: depth - 1,
|
||||
};
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
data.macros.get_mut(name).unwrap().body.push(node.clone());
|
||||
}
|
||||
},
|
||||
_ => {
|
||||
data.macros.get_mut(name).unwrap().body.push(node.clone());
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
if data.offset > (32 * 1024) {
|
||||
|
|
|
@ -1,8 +1,7 @@
|
|||
use itertools::Itertools;
|
||||
use radix_fmt::radix;
|
||||
use std::cmp::Ordering;
|
||||
use std::io;
|
||||
use std::io::Read;
|
||||
use std::io::{self, Write};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Data {
|
||||
|
@ -22,6 +21,8 @@ pub struct Data {
|
|||
|
||||
stack: [u16; 8 * 1024],
|
||||
memory: [u16; 16 * 1024],
|
||||
|
||||
term: console::Term,
|
||||
}
|
||||
|
||||
impl Data {
|
||||
|
@ -43,6 +44,8 @@ impl Data {
|
|||
|
||||
stack: [0; 8 * 1024],
|
||||
memory: [0; 16 * 1024],
|
||||
|
||||
term: console::Term::stdout(),
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -56,34 +59,24 @@ impl Data {
|
|||
self.stack[(address - (32 * 1024)) as usize]
|
||||
} else if address < (56 * 1024) {
|
||||
self.memory[(address - (40 * 1024)) as usize]
|
||||
} else if address == (56 * 1024 + 2) {
|
||||
self.term.read_char().unwrap() as u16
|
||||
} else {
|
||||
0
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_memory(&mut self, address: u16, value: u16) -> u16 {
|
||||
pub fn set_memory(&mut self, address: u16, value: u16) {
|
||||
if address >= (32 * 1024) && address < (40 * 1024) {
|
||||
self.stack[(address - (32 * 1024)) as usize] = value;
|
||||
|
||||
value
|
||||
} else if address < (40 * 1024) {
|
||||
self.memory[(address - (40 * 1024)) as usize] = value;
|
||||
|
||||
value
|
||||
} else if address == (56 * 1024) {
|
||||
print!("{}", value);
|
||||
|
||||
value & 0xff
|
||||
} else if address == (56 * 1024) {
|
||||
print!("{value}");
|
||||
|
||||
0
|
||||
io::stdout().flush().unwrap();
|
||||
} else if address == (56 * 1024 + 1) {
|
||||
print!("{}", char::from(value as u8));
|
||||
|
||||
0
|
||||
} else {
|
||||
0
|
||||
io::stdout().flush().unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -203,9 +196,9 @@ impl Data {
|
|||
}
|
||||
0x0a => {
|
||||
self.tmp = match self.reg_a.cmp(&self.reg_b) {
|
||||
Ordering::Equal => 0,
|
||||
Ordering::Less => 1,
|
||||
Ordering::Greater => 2,
|
||||
Ordering::Equal => 0,
|
||||
}
|
||||
}
|
||||
0x0b => {
|
||||
|
@ -248,8 +241,6 @@ pub fn bool_to_num(x: bool) -> u16 {
|
|||
}
|
||||
|
||||
pub fn emulate(data: &mut Data) -> Result<(), String> {
|
||||
let mut stdin = io::stdin().lock();
|
||||
|
||||
while data.reg_pc != 0xffff {
|
||||
data.reg_opc = data.get_memory(data.reg_pc) as u8;
|
||||
data.reg_pc = data.reg_pc.wrapping_add(1);
|
||||
|
@ -288,36 +279,35 @@ pub fn emulate(data: &mut Data) -> Result<(), String> {
|
|||
data.set_register(data.reg_arg as u8, data.tmp);
|
||||
}
|
||||
0x07 => {
|
||||
if data.reg_a & 1 == 1 {
|
||||
data.set_register(data.reg_arg as u8, data.tmp);
|
||||
}
|
||||
}
|
||||
0x08 => {
|
||||
data.stack[data.reg_sp as usize] = data.tmp;
|
||||
data.reg_sp = data.reg_sp.wrapping_add(1);
|
||||
}
|
||||
0x08 => {
|
||||
0x09 => {
|
||||
data.reg_sp = data.reg_sp.wrapping_sub(1);
|
||||
data.set_register(data.reg_arg as u8, data.stack[data.reg_sp as usize]);
|
||||
data.stack[data.reg_sp as usize] = 0;
|
||||
}
|
||||
0x09 => {
|
||||
data.stack[data.reg_sp as usize] = data.get_register(data.reg_arg as u8);
|
||||
data.reg_sp = data.reg_sp.wrapping_add(1);
|
||||
}
|
||||
0x0a => {
|
||||
println!(
|
||||
"[DEBUG]: [{}]",
|
||||
data.stack.iter().take(data.reg_sp as usize).join(", ")
|
||||
);
|
||||
println!("Press enter to continue execution...",);
|
||||
stdin.read_exact(&mut [0; 1]).unwrap();
|
||||
print!("Press enter to continue execution...",);
|
||||
io::stdout().flush().unwrap();
|
||||
data.term.read_line().unwrap();
|
||||
}
|
||||
0x0b => {
|
||||
data.alu(data.reg_arg as u8).unwrap();
|
||||
}
|
||||
0x0c => {
|
||||
data.alu(data.tmp as u8).unwrap();
|
||||
}
|
||||
0x0d => {
|
||||
0x0c => {
|
||||
data.tmp = data.get_memory(data.tmp);
|
||||
}
|
||||
0x0e => {
|
||||
0x0d => {
|
||||
data.set_memory(data.tmp, data.reg_a);
|
||||
}
|
||||
_ => return Err(format!("Invalid opcode: 0x{}", radix(data.reg_opc, 16))),
|
||||
|
|
|
@ -56,17 +56,17 @@ pub fn lex(source: String) -> Result<Vec<Token>, String> {
|
|||
|
||||
while let Some(&ch) = chars.peek() {
|
||||
match ch {
|
||||
';' => {
|
||||
chars.next();
|
||||
chars.next_if(|c| *c == ';');
|
||||
// ';' => {
|
||||
// chars.next();
|
||||
// chars.next_if(|c| *c == ';');
|
||||
|
||||
tokens.push(Token::Comment(
|
||||
chars.peeking_take_while(|c| *c != '\n').collect::<String>(),
|
||||
));
|
||||
}
|
||||
'@' => {
|
||||
// tokens.push(Token::Comment(
|
||||
// chars.peeking_take_while(|c| *c != '\n').collect::<String>(),
|
||||
// ));
|
||||
// }
|
||||
';' | '@' => {
|
||||
chars.next();
|
||||
chars.next_if(|c| *c == '@');
|
||||
chars.next_if(|c| *c == ';' || *c == '@');
|
||||
|
||||
tokens.push(Token::Comment(
|
||||
chars.peeking_take_while(|c| *c != '\n').collect::<String>(),
|
||||
|
@ -83,13 +83,13 @@ pub fn lex(source: String) -> Result<Vec<Token>, String> {
|
|||
tokens.push(Token::MacroLiteral(format!(
|
||||
".{}",
|
||||
chars
|
||||
.peeking_take_while(|c| c.is_alphabetic() || c.is_numeric())
|
||||
.peeking_take_while(|c| c.is_alphabetic() || c.is_numeric() || *c == '_')
|
||||
.collect::<String>()
|
||||
)));
|
||||
}
|
||||
ch if ch.is_alphabetic() => {
|
||||
let name: String = chars
|
||||
.peeking_take_while(|c| c.is_alphabetic() || c.is_numeric())
|
||||
.peeking_take_while(|c| c.is_alphabetic() || c.is_numeric() || *c == '_')
|
||||
.collect();
|
||||
|
||||
tokens.push(Token::Literal(name));
|
||||
|
@ -97,7 +97,7 @@ pub fn lex(source: String) -> Result<Vec<Token>, String> {
|
|||
ch if ch.is_numeric() => {
|
||||
tokens.push(Token::Number(
|
||||
chars
|
||||
.peeking_take_while(|c| c.is_alphanumeric())
|
||||
.peeking_take_while(|c| c.is_alphanumeric() || *c == '_')
|
||||
.collect::<String>(),
|
||||
));
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@ use crate::lexer;
|
|||
|
||||
pub mod ast;
|
||||
|
||||
pub fn parse(tokens: Vec<lexer::Token>) -> Result<ast::AST, &'static str> {
|
||||
pub fn parse(tokens: Vec<lexer::Token>) -> Result<ast::AST, String> {
|
||||
let mut iter = tokens.iter().peekable();
|
||||
let mut body: Vec<ast::Node> = Vec::new();
|
||||
|
||||
|
@ -37,15 +37,20 @@ pub fn parse(tokens: Vec<lexer::Token>) -> Result<ast::AST, &'static str> {
|
|||
{
|
||||
body.push(ast::Node::Label(x.clone()));
|
||||
} else {
|
||||
let args = arg::parse_args(
|
||||
let args = match arg::parse_args(
|
||||
iter.by_ref()
|
||||
.take_while(|t| !matches!(t, lexer::Token::Newline(_)))
|
||||
.filter(|t| !matches!(t, lexer::Token::Whitespace(_)))
|
||||
.filter(|t| {
|
||||
!matches!(t, lexer::Token::Whitespace(_) | lexer::Token::Comment(_))
|
||||
})
|
||||
.collect(),
|
||||
)
|
||||
.unwrap();
|
||||
) {
|
||||
Ok(x) => x,
|
||||
Err(x) => return Err(x.to_string()),
|
||||
};
|
||||
|
||||
if args.len() > 1 {
|
||||
return Err("Opcode call only accepts one argument");
|
||||
return Err("Opcode call only accepts one argument".to_string());
|
||||
}
|
||||
|
||||
body.push(ast::Node::Call {
|
||||
|
@ -60,7 +65,7 @@ pub fn parse(tokens: Vec<lexer::Token>) -> Result<ast::AST, &'static str> {
|
|||
lexer::Token::Whitespace(_) | lexer::Token::Newline(_) => {
|
||||
iter.next();
|
||||
}
|
||||
_ => return Err("Unexpected token"),
|
||||
_ => return Err("Unexpected token".to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@ use itertools::Itertools;
|
|||
use crate::arg;
|
||||
use crate::assembler;
|
||||
|
||||
#[derive(Debug)]
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum Node {
|
||||
Comment(String),
|
||||
Label(String),
|
||||
|
@ -34,7 +34,7 @@ impl assembler::ToCode for Node {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct AST {
|
||||
pub body: Vec<Node>,
|
||||
}
|
||||
|
|
1
hencelisp/.gitignore
vendored
1
hencelisp/.gitignore
vendored
|
@ -1 +0,0 @@
|
|||
/target
|
|
@ -1,19 +0,0 @@
|
|||
[package]
|
||||
name = "hencelisp"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
name = "hencelisp"
|
||||
path = "src/lib/lib.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "hencelisp"
|
||||
path = "src/bin/main.rs"
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
hence = { path = "../hence" }
|
||||
clap = { version = "3.2.12", features = ["derive"] }
|
||||
itertools = "0.10.3"
|
||||
num-parse = "0.1.2"
|
|
@ -1,59 +0,0 @@
|
|||
use clap::{Parser, Subcommand};
|
||||
use hencelisp::*;
|
||||
use std::fs;
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
#[clap(author, version, about, long_about = None)]
|
||||
struct Cli {
|
||||
#[clap(subcommand)]
|
||||
commands: Commands,
|
||||
}
|
||||
|
||||
#[derive(Debug, Subcommand)]
|
||||
enum Commands {
|
||||
#[clap(about = "Lexes source code and outputs tokens")]
|
||||
Lex {
|
||||
#[clap(value_parser)]
|
||||
src: String,
|
||||
},
|
||||
#[clap(about = "Parses source code and outputs AST")]
|
||||
Parse {
|
||||
#[clap(value_parser)]
|
||||
src: String,
|
||||
},
|
||||
#[clap(about = "Compiles source code to hence assembly")]
|
||||
Compile {
|
||||
#[clap(value_parser)]
|
||||
src: String,
|
||||
#[clap(value_parser)]
|
||||
out: Option<String>,
|
||||
#[clap(long, action)]
|
||||
dump: bool,
|
||||
},
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let args = Cli::parse();
|
||||
match args.commands {
|
||||
Commands::Lex { src } => {
|
||||
let source = fs::read_to_string(src).unwrap();
|
||||
let tokens = lexer::lex(source).unwrap();
|
||||
dbg!(tokens);
|
||||
}
|
||||
Commands::Parse { src } => {
|
||||
let source = fs::read_to_string(src).unwrap();
|
||||
let tokens = lexer::lex(source).unwrap();
|
||||
let ast = parser::parse(tokens).unwrap();
|
||||
dbg!(ast);
|
||||
}
|
||||
Commands::Compile { src, out, dump } => {
|
||||
let source = fs::read_to_string(src).unwrap();
|
||||
let tokens = lexer::lex(source).unwrap();
|
||||
let ast = parser::parse(tokens).unwrap();
|
||||
|
||||
let mut data = compiler::Data::new();
|
||||
compiler::compile(ast, &mut data).unwrap();
|
||||
dbg!(data);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,61 +0,0 @@
|
|||
use hence;
|
||||
|
||||
use crate::parser;
|
||||
|
||||
#[derive(Debug)]
|
||||
|
||||
pub struct Data {
|
||||
pub body: Vec<hence::parser::ast::Node>,
|
||||
}
|
||||
|
||||
impl Data {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
body: Vec::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// pub fn create_call(list: Vec<parser::ast::Node>) -> Result<Call, &'static str> {
|
||||
// let mut raw_content = list.iter();
|
||||
// let content = raw_content.by_ref();
|
||||
|
||||
// let name = match content
|
||||
// .skip_while(|n| matches!(n, parser::ast::Node::Comment(_)))
|
||||
// .next()
|
||||
// {
|
||||
// Some(node) => match node {
|
||||
// parser::ast::Node::String(x)
|
||||
// | parser::ast::Node::Symbol(x)
|
||||
// | parser::ast::Node::Literal(x) => x.clone(),
|
||||
// _ => return Err("Invalid call name node literal"),
|
||||
// },
|
||||
// _ => return Err("Cannot create call from empty list"),
|
||||
// };
|
||||
// let args: Vec<_> = content.collect();
|
||||
// dbg!(args);
|
||||
|
||||
// Ok(Call {
|
||||
// name,
|
||||
// args: Vec::new(),
|
||||
// })
|
||||
// }
|
||||
|
||||
pub fn compile(ast: parser::ast::AST, data: &mut Data) -> Result<(), &str> {
|
||||
let mut iter = ast.body.into_iter().peekable();
|
||||
while let Some(node) = iter.next() {
|
||||
match node {
|
||||
parser::ast::Node::Comment(_) => {}
|
||||
parser::ast::Node::String(_) => return Err("Unexpected string on top level"),
|
||||
parser::ast::Node::Number(_) => return Err("Unexpected number on top level"),
|
||||
parser::ast::Node::Symbol(_) => return Err("Unexpected symbol on top level"),
|
||||
parser::ast::Node::Literal(_) => return Err("Unexpected literal on top level"),
|
||||
|
||||
parser::ast::Node::List(x) => {
|
||||
dbg!(&x);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
|
@ -1,99 +0,0 @@
|
|||
use hence;
|
||||
use itertools::Itertools;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum Token {
|
||||
Comment(String),
|
||||
|
||||
Newline(String),
|
||||
Whitespace(String),
|
||||
|
||||
LParen,
|
||||
RParen,
|
||||
|
||||
String(String),
|
||||
Number(String),
|
||||
Symbol(String),
|
||||
Literal(String),
|
||||
}
|
||||
|
||||
impl hence::assembler::ToCode for Token {
|
||||
fn to_code(&self) -> String {
|
||||
match self {
|
||||
Token::Comment(x) => format!(";;{x}"),
|
||||
Token::Newline(x) | Token::Whitespace(x) => x.clone(),
|
||||
Token::LParen => "(".to_string(),
|
||||
Token::RParen => ")".to_string(),
|
||||
Token::String(x) => format!("\"{x}\""),
|
||||
Token::Number(x) | Token::Symbol(x) | Token::Literal(x) => x.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn lex(source: String) -> Result<Vec<Token>, String> {
|
||||
let mut chars = source.chars().peekable();
|
||||
let mut tokens: Vec<Token> = Vec::new();
|
||||
|
||||
while let Some(&ch) = chars.peek() {
|
||||
match ch {
|
||||
';' => {
|
||||
chars.next();
|
||||
chars.next_if(|c| *c == ';');
|
||||
|
||||
tokens.push(Token::Comment(
|
||||
chars.peeking_take_while(|c| *c != '\n').collect(),
|
||||
));
|
||||
}
|
||||
'\n' => {
|
||||
tokens.push(Token::Newline(
|
||||
chars.peeking_take_while(|c| *c == '\n').collect(),
|
||||
));
|
||||
}
|
||||
_ if ch.is_whitespace() => {
|
||||
tokens.push(Token::Whitespace(
|
||||
chars
|
||||
.peeking_take_while(|c| c.is_whitespace() && *c != '\n')
|
||||
.collect(),
|
||||
));
|
||||
}
|
||||
'(' => {
|
||||
tokens.push(Token::LParen);
|
||||
chars.next();
|
||||
}
|
||||
')' => {
|
||||
tokens.push(Token::RParen);
|
||||
chars.next();
|
||||
}
|
||||
'"' => {
|
||||
chars.next();
|
||||
tokens.push(Token::String(
|
||||
chars.by_ref().take_while(|c| *c != '"').collect(),
|
||||
));
|
||||
}
|
||||
_ if ch.is_numeric() => {
|
||||
tokens.push(Token::Number(
|
||||
chars
|
||||
.peeking_take_while(|c| c.is_alphanumeric())
|
||||
.collect::<String>(),
|
||||
));
|
||||
}
|
||||
':' => {
|
||||
chars.next();
|
||||
tokens.push(Token::Symbol(
|
||||
chars
|
||||
.peeking_take_while(|c| !c.is_whitespace() && *c != '(' && *c != ')')
|
||||
.collect(),
|
||||
));
|
||||
}
|
||||
_ => {
|
||||
tokens.push(Token::Literal(
|
||||
chars
|
||||
.peeking_take_while(|c| !c.is_whitespace() && *c != '(' && *c != ')')
|
||||
.collect(),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(tokens)
|
||||
}
|
|
@ -1,3 +0,0 @@
|
|||
pub mod compiler;
|
||||
pub mod lexer;
|
||||
pub mod parser;
|
|
@ -1,65 +0,0 @@
|
|||
use crate::lexer;
|
||||
|
||||
pub mod ast;
|
||||
|
||||
pub fn parse(tokens: Vec<lexer::Token>) -> Result<ast::AST, &'static str> {
|
||||
let mut iter = tokens.into_iter().peekable();
|
||||
let mut body: Vec<ast::Node> = Vec::new();
|
||||
|
||||
while let Some(token) = iter.peek() {
|
||||
match token {
|
||||
lexer::Token::Comment(x) => {
|
||||
body.push(ast::Node::Comment(x.trim().to_string()));
|
||||
iter.next();
|
||||
}
|
||||
lexer::Token::Newline(_) | lexer::Token::Whitespace(_) => {
|
||||
iter.next();
|
||||
}
|
||||
lexer::Token::LParen => {
|
||||
iter.next();
|
||||
let mut depth: usize = 1;
|
||||
body.push(ast::Node::List(
|
||||
parse(
|
||||
iter.by_ref()
|
||||
.take_while(|t| match t {
|
||||
lexer::Token::LParen => {
|
||||
depth += 1;
|
||||
true
|
||||
}
|
||||
lexer::Token::RParen => {
|
||||
depth -= 1;
|
||||
depth != 0
|
||||
}
|
||||
_ => true,
|
||||
})
|
||||
.collect(),
|
||||
)
|
||||
.unwrap()
|
||||
.body,
|
||||
));
|
||||
}
|
||||
lexer::Token::RParen => return Err("Unexpected right parenthesis"),
|
||||
lexer::Token::String(x) => {
|
||||
body.push(ast::Node::String(x.clone()));
|
||||
iter.next();
|
||||
}
|
||||
lexer::Token::Number(x) => {
|
||||
body.push(ast::Node::Number(match num_parse::parse_int(x) {
|
||||
Some(y) => y,
|
||||
_ => return Err("Error parsing number"),
|
||||
}));
|
||||
iter.next();
|
||||
}
|
||||
lexer::Token::Symbol(x) => {
|
||||
body.push(ast::Node::Symbol(x.clone()));
|
||||
iter.next();
|
||||
}
|
||||
lexer::Token::Literal(x) => {
|
||||
body.push(ast::Node::Literal(x.clone()));
|
||||
iter.next();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(ast::AST { body })
|
||||
}
|
|
@ -1,30 +0,0 @@
|
|||
use hence;
|
||||
use itertools::Itertools;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum Node {
|
||||
Comment(String),
|
||||
List(Vec<Node>),
|
||||
String(String),
|
||||
Number(i32),
|
||||
Symbol(String),
|
||||
Literal(String),
|
||||
}
|
||||
|
||||
impl hence::assembler::ToCode for Node {
|
||||
fn to_code(&self) -> String {
|
||||
match self {
|
||||
Node::Comment(x) => format!(";; {x}"),
|
||||
Node::List(x) => format!("({})", x.iter().map(|n| n.to_code()).join(" ")),
|
||||
Node::String(x) => format!("\"{x}\""),
|
||||
Node::Number(x) => x.to_string(),
|
||||
Node::Symbol(x) => format!(":{x}"),
|
||||
Node::Literal(x) => x.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct AST {
|
||||
pub body: Vec<Node>,
|
||||
}
|
Loading…
Reference in a new issue