Rewrie assembler with macro support

This commit is contained in:
Dominic Grimm 2022-08-28 12:31:56 +02:00
parent e82c5bdb90
commit 6adb943754
No known key found for this signature in database
GPG key ID: D0214A581220BCA5
9 changed files with 435 additions and 264 deletions

82
Cargo.lock generated
View file

@ -2,6 +2,30 @@
# It is not intended for manual editing. # It is not intended for manual editing.
version = 3 version = 3
[[package]]
name = "addr2line"
version = "0.17.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b9ecd88a8c8378ca913a680cd98f0f13ac67383d35993f86c90a70e3f137816b"
dependencies = [
"gimli",
]
[[package]]
name = "adler"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
[[package]]
name = "anyhow"
version = "1.0.62"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1485d4d2cc45e7b201ee3767015c96faa5904387c9d87c6efdd0fb511f12d305"
dependencies = [
"backtrace",
]
[[package]] [[package]]
name = "atty" name = "atty"
version = "0.2.14" version = "0.2.14"
@ -19,12 +43,33 @@ version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
[[package]]
name = "backtrace"
version = "0.3.66"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cab84319d616cfb654d03394f38ab7e6f0919e181b1b57e1fd15e7fb4077d9a7"
dependencies = [
"addr2line",
"cc",
"cfg-if",
"libc",
"miniz_oxide",
"object",
"rustc-demangle",
]
[[package]] [[package]]
name = "bitflags" name = "bitflags"
version = "1.3.2" version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
[[package]]
name = "cc"
version = "1.0.73"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2fff2a6927b3bb87f9595d67196a70493f627687a71d87a0d692242c33f58c11"
[[package]] [[package]]
name = "cfg-if" name = "cfg-if"
version = "1.0.0" version = "1.0.0"
@ -107,6 +152,12 @@ dependencies = [
"wasi", "wasi",
] ]
[[package]]
name = "gimli"
version = "0.26.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "22030e2c5a68ec659fde1e949a745124b48e6fa8b045b7ed5bd1fe4ccc5c4e5d"
[[package]] [[package]]
name = "hashbrown" name = "hashbrown"
version = "0.12.3" version = "0.12.3"
@ -123,6 +174,7 @@ checksum = "2540771e65fc8cb83cd6e8a237f70c319bd5c29f78ed1084ba5d50eeac86f7f9"
name = "hence" name = "hence"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"anyhow",
"clap", "clap",
"console", "console",
"itertools", "itertools",
@ -166,6 +218,21 @@ version = "0.2.132"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8371e4e5341c3a96db127eb2465ac681ced4c433e01dd0e938adbef26ba93ba5" checksum = "8371e4e5341c3a96db127eb2465ac681ced4c433e01dd0e938adbef26ba93ba5"
[[package]]
name = "memchr"
version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
[[package]]
name = "miniz_oxide"
version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6f5c75688da582b8ffc1f1799e9db273f32133c49e048f614d22ec3256773ccc"
dependencies = [
"adler",
]
[[package]] [[package]]
name = "num" name = "num"
version = "0.4.0" version = "0.4.0"
@ -251,6 +318,15 @@ dependencies = [
"autocfg", "autocfg",
] ]
[[package]]
name = "object"
version = "0.29.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "21158b2c33aa6d4561f1c0a6ea283ca92bc54802a93b263e910746d679a7eb53"
dependencies = [
"memchr",
]
[[package]] [[package]]
name = "once_cell" name = "once_cell"
version = "1.13.1" version = "1.13.1"
@ -353,6 +429,12 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c5e9af64574935e39f24d1c0313a997c8b880ca0e087c888bc6af8af31579847" checksum = "c5e9af64574935e39f24d1c0313a997c8b880ca0e087c888bc6af8af31579847"
[[package]]
name = "rustc-demangle"
version = "0.1.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7ef03e0a2b150c7a90d01faf6254c9c48a41e95fb2a8c2ac1c6f0d2b9aefc342"
[[package]] [[package]]
name = "strsim" name = "strsim"
version = "0.10.0" version = "0.10.0"

View file

@ -21,3 +21,5 @@ rhexdump = "0.1.1"
radix_fmt = "1" radix_fmt = "1"
rand = "0.8.5" rand = "0.8.5"
console = "0.15.1" console = "0.15.1"
anyhow = { version = "1.0.62", features = ["backtrace"] }

Binary file not shown.

View file

@ -65,9 +65,10 @@ fn main() {
Some(x) => x.to_string(), Some(x) => x.to_string(),
_ => panic!("Could not get directory in which source code resides"), _ => panic!("Could not get directory in which source code resides"),
}, },
ast.body, ast,
); );
assembler::assemble(&mut data).unwrap(); // assembler::assemble(&mut data).unwrap();
data.assemble().unwrap();
if let Some(x) = bin { if let Some(x) = bin {
File::create(x).unwrap().write_all(&data.program).unwrap(); File::create(x).unwrap().write_all(&data.program).unwrap();

View file

@ -1,3 +1,5 @@
use anyhow::{bail, Result};
use crate::arg; use crate::arg;
use crate::assembler; use crate::assembler;
use crate::lexer; use crate::lexer;
@ -14,8 +16,8 @@ pub enum Arg {
}, },
} }
impl assembler::ToCode for Arg { impl assembler::ToAssembly for Arg {
fn to_code(&self) -> String { fn to_assembly(&self) -> String {
match self { match self {
Arg::String(x) => format!("\"{x}\""), Arg::String(x) => format!("\"{x}\""),
Arg::Number(x) => x.to_string(), Arg::Number(x) => x.to_string(),
@ -23,9 +25,9 @@ impl assembler::ToCode for Arg {
Arg::BinaryExpression { left, right, op } => { Arg::BinaryExpression { left, right, op } => {
format!( format!(
"({left} {op} {right})", "({left} {op} {right})",
left = left.to_code(), left = left.to_assembly(),
op = op.to_code(), op = op.to_assembly(),
right = right.to_code() right = right.to_assembly()
) )
} }
} }
@ -52,7 +54,7 @@ impl assembler::ByteResolvable<assembler::Data> for Arg {
let mut arg: Option<arg::Arg>; let mut arg: Option<arg::Arg>;
loop { loop {
arg = data.contants.get(&name).cloned(); arg = data.constants.get(&name).cloned();
match arg { match arg {
Some(a) => { Some(a) => {
@ -92,7 +94,7 @@ impl assembler::ByteResolvable<assembler::Data> for Arg {
let mut arg: Option<arg::Arg>; let mut arg: Option<arg::Arg>;
loop { loop {
arg = data.contants.get(&name).cloned(); arg = data.constants.get(&name).cloned();
match arg { match arg {
Some(a) => { Some(a) => {
@ -130,8 +132,8 @@ pub enum BinaryExpressionOperator {
Pow, Pow,
} }
impl assembler::ToCode for BinaryExpressionOperator { impl assembler::ToAssembly for BinaryExpressionOperator {
fn to_code(&self) -> String { fn to_assembly(&self) -> String {
match self { match self {
BinaryExpressionOperator::Add => "+".to_string(), BinaryExpressionOperator::Add => "+".to_string(),
BinaryExpressionOperator::Sub => "-".to_string(), BinaryExpressionOperator::Sub => "-".to_string(),
@ -142,20 +144,20 @@ impl assembler::ToCode for BinaryExpressionOperator {
} }
} }
pub fn parse_binary_operation(token: &lexer::Token) -> Result<BinaryExpressionOperator, String> { pub fn parse_binary_operation(token: &lexer::Token) -> Result<BinaryExpressionOperator> {
match token { match token {
lexer::Token::Add => Ok(BinaryExpressionOperator::Add), lexer::Token::Add => Ok(BinaryExpressionOperator::Add),
lexer::Token::Sub => Ok(BinaryExpressionOperator::Sub), lexer::Token::Sub => Ok(BinaryExpressionOperator::Sub),
lexer::Token::Mul => Ok(BinaryExpressionOperator::Mul), lexer::Token::Mul => Ok(BinaryExpressionOperator::Mul),
lexer::Token::Div => Ok(BinaryExpressionOperator::Div), lexer::Token::Div => Ok(BinaryExpressionOperator::Div),
lexer::Token::Pow => Ok(BinaryExpressionOperator::Pow), lexer::Token::Pow => Ok(BinaryExpressionOperator::Pow),
_ => Err("Invalid binary expression operator".to_string()), _ => bail!("Invalid binary expression operator"),
} }
} }
pub fn parse_binary_expression_arg(tokens: &mut Vec<&&lexer::Token>) -> Result<Arg, String> { pub fn parse_binary_expression_arg(tokens: &mut Vec<&&lexer::Token>) -> Result<Arg> {
if tokens.is_empty() { if tokens.is_empty() {
return Err("Malformed binary expression".to_string()); bail!("Malformed binary expression");
} }
let mut args: Vec<&&lexer::Token> = tokens.drain(..3).collect(); let mut args: Vec<&&lexer::Token> = tokens.drain(..3).collect();

View file

@ -1,12 +1,281 @@
use itertools::Itertools; // use itertools::Itertools;
use radix_fmt::radix; // use radix_fmt::radix;
// use std::collections::HashMap;
//
// use crate::arg;
// use crate::parser;
//
// pub trait ToCode {
// fn to_code(&self) -> String;
// }
//
// pub trait ByteResolvable<T> {
// fn resolve_number(&self, data: &mut T) -> Result<u16, String>;
//
// fn resolve_bytes(&self, data: &mut T) -> Result<Vec<u8>, String>;
// }
//
// #[derive(Debug)]
// pub struct Macro {
// pub args: Vec<String>,
// pub body: Vec<parser::ast::Node>,
// }
//
// #[derive(Debug)]
// pub enum State {
// Default,
// Macro { name: String, depth: usize },
// }
//
// #[derive(Debug)]
// pub struct Data {
// pub dir: String,
// pub body: Vec<parser::ast::Node>,
// pub program: [u8; 32 * 1024],
// pub offset: u16,
// pub contants: HashMap<String, arg::Arg>,
// pub macros: HashMap<String, Macro>,
// pub state: State,
// }
//
// impl Data {
// pub fn new(dir: String, body: Vec<parser::ast::Node>) -> Self {
// Self {
// dir,
// body,
// program: [0; 32 * 1024],
// offset: 0,
// contants: HashMap::new(),
// macros: HashMap::new(),
// state: State::Default,
// }
// }
// }
//
// pub fn assemble(data: &mut Data) -> Result<(), String> {
// for node in data.body.clone() {
// data.contants
// .insert("OFFSET".to_string(), arg::Arg::Number(data.offset));
//
// match &data.state {
// State::Default => {
// match node {
// parser::ast::Node::Comment(_) => {}
// parser::ast::Node::Label(x) => {
// if data.contants.contains_key(&x) {
// return Err(format!("Label already exists: '{x}'"));
// }
//
// data.contants
// .insert(x.to_string(), arg::Arg::Number(data.offset));
// }
// parser::ast::Node::Call { name, arg } => {
// let arg_num = match arg {
// Some(x) => x.resolve_number(data).unwrap(),
// _ => 0,
// };
//
// data.program[data.offset as usize] = match name.as_str() {
// "nop" => 0x00,
// "push" => 0x01,
// "pop" => 0x02,
// "ts" => 0x03,
// "tsr" => 0x04,
// "tss" => 0x05,
// "tlr" => 0x06,
// "tlrc" => 0x07,
// "tls" => 0x08,
// "ld" => 0x09,
// "dbg" => 0x0a,
// "alu" => 0x0b,
// "get" => 0x0c,
// "set" => 0x0d,
// _ => return Err(format!("Unknown opcode: '{name}'")),
// };
// if arg_num == 0 {
// data.program[data.offset as usize] |= 0b10000000;
// }
// data.offset += 1;
//
// if arg_num != 0 {
// data.program[data.offset as usize] = (arg_num >> 8) as u8;
// data.offset += 1;
//
// data.program[data.offset as usize] = arg_num as u8;
// data.offset += 1;
// }
// }
// parser::ast::Node::MacroCall { name, args } => {
// match name.as_str() {
// "debug" => {
// for arg in args {
// let bytes = arg.resolve_bytes(data).unwrap();
//
// println!("{}", arg.to_code().replace('\n', "\\n"));
// println!(" => {}", arg.resolve_number(data).unwrap());
// println!(
// " => [{}]",
// bytes
// .iter()
// .map(|n| {
// let num = radix(*n, 16).to_string();
// format!(
// "0x{}{}",
// "00".chars()
// .take(2 - num.len())
// .collect::<String>(),
// num
// )
// })
// .join(", ")
// );
// println!(
// " => \"{}\"",
// String::from_utf8(bytes).unwrap().replace('\n', "\\n")
// );
// }
// println!("==========");
// }
// "define" => {
// let name = match &args[0] {
// arg::Arg::Variable(x) | arg::Arg::String(x) => x,
// _ => return Err(
// "First argument of define macro needs to be a literal-like"
// .to_string(),
// ),
// };
//
// data.contants.insert(name.to_string(), (&args[1]).clone());
// }
// "macro" => {
// let name = match &args[0] {
// arg::Arg::Variable(x) | arg::Arg::String(x) => x,
// _ => return Err(
// "First argument of define macro needs to be a literal-like"
// .to_string(),
// ),
// };
// let args = match (&args[1..])
// .iter()
// // .map(|a| match a {
// // arg::Arg::Variable(x) => Ok(x.clone()),
// // __ => {
// // Err("Macro arguments need to be variables".to_string())
// // }
// // })
// .map(|a| {
// if let arg::Arg::Variable(x) = a {
// Ok(x.clone())
// } else {
// Err("Macro arguments need to be variables".to_string())
// }
// })
// .collect::<Result<Vec<_>, _>>()
// {
// Ok(x) => x,
// Err(x) => return Err(x),
// };
//
// data.macros.insert(
// name.clone(),
// Macro {
// args,
// body: Vec::new(),
// },
// );
// data.state = State::Macro {
// name: name.clone(),
// depth: 1,
// };
// }
// "macroend" => return Err("Unexpected macro end".to_string()),
// "org" => {
// data.offset = args[0].resolve_number(data).unwrap();
// }
// "org_add" => {
// data.offset += args[0].resolve_number(data).unwrap();
// }
// "org_sub" => {
// data.offset -= args[0].resolve_number(data).unwrap();
// }
// "bytes" => {
// for arg in args {
// for n in arg.resolve_bytes(data).unwrap() {
// data.program[data.offset as usize] = n;
// data.offset += 1;
// }
// }
// }
// "bw" => {
// let string_arg = args[0].resolve_bytes(data).unwrap();
// let string =
// String::from_utf8(string_arg).unwrap().replace("\\n", "\n");
//
// for n in string.bytes() {
// data.program[data.offset as usize] = n;
// data.offset += 1;
// }
// }
// _ => match data.macros.get(&name) {
// Some(m) => {
// dbg!(name, m);
// }
// None => return Err(format!("Unknown macro: '{name}'")),
// },
// };
// }
// }
// }
// State::Macro { name, depth } => match &node {
// parser::ast::Node::MacroCall {
// name: node_name,
// args: _,
// } => match node_name.as_str() {
// "macro" => {
// data.state = State::Macro {
// name: name.clone(),
// depth: depth + 1,
// };
// }
// "macroend" => {
// if *depth - 1 == 0 {
// data.state = State::Default;
// } else {
// data.state = State::Macro {
// name: name.clone(),
// depth: depth - 1,
// };
// }
// }
// _ => {
// data.macros.get_mut(name).unwrap().body.push(node.clone());
// }
// },
// _ => {
// data.macros.get_mut(name).unwrap().body.push(node.clone());
// }
// },
// }
//
// if data.offset > (32 * 1024) {
// return Err(format!(
// "Offset out of bounds: 0x{} > 0x8000",
// radix(data.offset, 16),
// ));
// }
// }
//
// Ok(())
// }
use anyhow::Result;
use std::collections::HashMap; use std::collections::HashMap;
use crate::arg; use crate::arg;
use crate::parser; use crate::parser;
pub trait ToCode { pub trait ToAssembly {
fn to_code(&self) -> String; fn to_assembly(&self) -> String;
} }
pub trait ByteResolvable<T> { pub trait ByteResolvable<T> {
@ -16,254 +285,70 @@ pub trait ByteResolvable<T> {
} }
#[derive(Debug)] #[derive(Debug)]
pub struct Macro { pub enum ResolveResult {
pub args: Vec<String>, Resolved(Vec<u8>),
pub body: Vec<parser::ast::Node>, Partial(u16),
Unresolved,
} }
#[derive(Debug)] impl ResolveResult {
pub enum State { pub fn empty() -> Self {
Default, ResolveResult::Resolved(vec![])
Macro { name: String, depth: usize }, }
} }
#[derive(Debug)] pub const PROGRAM_SIZE: u16 = 32 * 1024;
pub type Program = [u8; PROGRAM_SIZE as usize];
pub struct Data { pub struct Data {
pub dir: String, pub dir: String,
pub body: Vec<parser::ast::Node>, pub ast: parser::ast::AST,
pub program: [u8; 32 * 1024], pub program: Program,
pub offset: u16, pub offset: u16,
pub contants: HashMap<String, arg::Arg>, pub body_stack: Vec<parser::ast::Node>,
pub macros: HashMap<String, Macro>, pub constants: HashMap<String, arg::Arg>,
pub state: State,
} }
impl Data { impl Data {
pub fn new(dir: String, body: Vec<parser::ast::Node>) -> Self { pub fn new(dir: String, ast: parser::ast::AST) -> Self {
Self { Self {
dir, dir,
body, body_stack: ast.body.iter().rev().cloned().collect(),
program: [0; 32 * 1024], ast,
program: [0; PROGRAM_SIZE as usize],
offset: 0, offset: 0,
contants: HashMap::new(), constants: HashMap::new(),
macros: HashMap::new(),
state: State::Default,
}
} }
} }
pub fn assemble(data: &mut Data) -> Result<(), String> { pub fn resolve_node(&mut self, node: parser::ast::Node) -> ResolveResult {
for node in data.body.clone() {
data.contants
.insert("OFFSET".to_string(), arg::Arg::Number(data.offset));
match &data.state {
State::Default => {
match node { match node {
parser::ast::Node::Comment(_) => {} parser::ast::Node::Comment(_) => ResolveResult::empty(),
parser::ast::Node::Label(x) => { parser::ast::Node::Label(x) => {
if data.contants.contains_key(&x) { self.constants.insert(x, arg::Arg::Number(self.offset));
return Err(format!("Label already exists: '{x}'"));
}
data.contants ResolveResult::empty()
.insert(x.to_string(), arg::Arg::Number(data.offset));
} }
parser::ast::Node::Call { name, arg } => { parser::ast::Node::Call { name, arg } => {
let arg_num = match arg { dbg!(name, arg);
Some(x) => x.resolve_number(data).unwrap(),
_ => 0,
};
data.program[data.offset as usize] = match name.as_str() { ResolveResult::empty()
"nop" => 0x00,
"push" => 0x01,
"pop" => 0x02,
"ts" => 0x03,
"tsr" => 0x04,
"tss" => 0x05,
"tlr" => 0x06,
"tlrc" => 0x07,
"tls" => 0x08,
"ld" => 0x09,
"dbg" => 0x0a,
"alu" => 0x0b,
"get" => 0x0c,
"set" => 0x0d,
_ => return Err(format!("Unknown opcode: '{name}'")),
};
if arg_num == 0 {
data.program[data.offset as usize] |= 0b10000000;
}
data.offset += 1;
if arg_num != 0 {
data.program[data.offset as usize] = (arg_num >> 8) as u8;
data.offset += 1;
data.program[data.offset as usize] = arg_num as u8;
data.offset += 1;
}
} }
parser::ast::Node::MacroCall { name, args } => { parser::ast::Node::MacroCall { name, args } => {
match name.as_str() { dbg!(name, args);
"debug" => {
for arg in args {
let bytes = arg.resolve_bytes(data).unwrap();
println!("{}", arg.to_code().replace('\n', "\\n")); ResolveResult::empty()
println!(" => {}", arg.resolve_number(data).unwrap());
println!(
" => [{}]",
bytes
.iter()
.map(|n| {
let num = radix(*n, 16).to_string();
format!(
"0x{}{}",
"00".chars()
.take(2 - num.len())
.collect::<String>(),
num
)
})
.join(", ")
);
println!(
" => \"{}\"",
String::from_utf8(bytes).unwrap().replace('\n', "\\n")
);
} }
println!("==========");
}
"define" => {
let name = match &args[0] {
arg::Arg::Variable(x) | arg::Arg::String(x) => x,
_ => return Err(
"First argument of define macro needs to be a literal-like"
.to_string(),
),
};
data.contants.insert(name.to_string(), (&args[1]).clone());
}
"macro" => {
let name = match &args[0] {
arg::Arg::Variable(x) | arg::Arg::String(x) => x,
_ => return Err(
"First argument of define macro needs to be a literal-like"
.to_string(),
),
};
let args = match (&args[1..])
.iter()
// .map(|a| match a {
// arg::Arg::Variable(x) => Ok(x.clone()),
// __ => {
// Err("Macro arguments need to be variables".to_string())
// }
// })
.map(|a| {
if let arg::Arg::Variable(x) = a {
Ok(x.clone())
} else {
Err("Macro arguments need to be variables".to_string())
}
})
.collect::<Result<Vec<_>, _>>()
{
Ok(x) => x,
Err(x) => return Err(x),
};
data.macros.insert(
name.clone(),
Macro {
args,
body: Vec::new(),
},
);
data.state = State::Macro {
name: name.clone(),
depth: 1,
};
}
"macroend" => return Err("Unexpected macro end".to_string()),
"org" => {
data.offset = args[0].resolve_number(data).unwrap();
}
"org_add" => {
data.offset += args[0].resolve_number(data).unwrap();
}
"org_sub" => {
data.offset -= args[0].resolve_number(data).unwrap();
}
"bytes" => {
for arg in args {
for n in arg.resolve_bytes(data).unwrap() {
data.program[data.offset as usize] = n;
data.offset += 1;
}
}
}
"bw" => {
let string_arg = args[0].resolve_bytes(data).unwrap();
let string =
String::from_utf8(string_arg).unwrap().replace("\\n", "\n");
for n in string.bytes() {
data.program[data.offset as usize] = n;
data.offset += 1;
}
}
_ => match data.macros.get(&name) {
Some(m) => {
dbg!(name, m);
}
None => return Err(format!("Unknown macro: '{name}'")),
},
};
}
}
}
State::Macro { name, depth } => match &node {
parser::ast::Node::MacroCall {
name: node_name,
args: _,
} => match node_name.as_str() {
"macro" => {
data.state = State::Macro {
name: name.clone(),
depth: depth + 1,
};
}
"macroend" => {
if *depth - 1 == 0 {
data.state = State::Default;
} else {
data.state = State::Macro {
name: name.clone(),
depth: depth - 1,
};
}
}
_ => {
data.macros.get_mut(name).unwrap().body.push(node.clone());
}
},
_ => {
data.macros.get_mut(name).unwrap().body.push(node.clone());
}
},
}
if data.offset > (32 * 1024) {
return Err(format!(
"Offset out of bounds: 0x{} > 0x8000",
radix(data.offset, 16),
));
} }
} }
Ok(()) pub fn assemble(&mut self) -> Result<Program> {
while let Some(node) = self.body_stack.pop() {
let res = self.resolve_node(node);
dbg!(res);
} }
Ok(self.program)
}
}

View file

@ -27,8 +27,8 @@ pub enum Token {
Whitespace(String), Whitespace(String),
} }
impl assembler::ToCode for Token { impl assembler::ToAssembly for Token {
fn to_code(&self) -> String { fn to_assembly(&self) -> String {
match self { match self {
Token::Comment(x) => format!(";{x}"), Token::Comment(x) => format!(";{x}"),
Token::StringLiteral(x) => format!("\"{x}\""), Token::StringLiteral(x) => format!("\"{x}\""),

View file

@ -1,3 +1,4 @@
use anyhow::{bail, Result};
use itertools::PeekingNext; use itertools::PeekingNext;
use crate::arg; use crate::arg;
@ -5,7 +6,7 @@ use crate::lexer;
pub mod ast; pub mod ast;
pub fn parse(tokens: Vec<lexer::Token>) -> Result<ast::AST, String> { pub fn parse(tokens: Vec<lexer::Token>) -> Result<ast::AST> {
let mut iter = tokens.iter().peekable(); let mut iter = tokens.iter().peekable();
let mut body: Vec<ast::Node> = Vec::new(); let mut body: Vec<ast::Node> = Vec::new();
@ -46,19 +47,15 @@ pub fn parse(tokens: Vec<lexer::Token>) -> Result<ast::AST, String> {
.collect(), .collect(),
) { ) {
Ok(x) => x, Ok(x) => x,
Err(x) => return Err(x.to_string()), Err(x) => bail!("{}", x),
}; };
if args.len() > 1 { if args.len() > 1 {
return Err("Opcode call only accepts one argument".to_string()); bail!("Opcode call only accepts one argument");
} }
body.push(ast::Node::Call { body.push(ast::Node::Call {
name: x.clone(), name: x.clone(),
// arg: match args.first() {
// Some(x) => Some(x.clone()),
// _ => None,
// },
arg: args.first().cloned(), arg: args.first().cloned(),
}); });
} }
@ -66,7 +63,7 @@ pub fn parse(tokens: Vec<lexer::Token>) -> Result<ast::AST, String> {
lexer::Token::Whitespace(_) | lexer::Token::Newline(_) => { lexer::Token::Whitespace(_) | lexer::Token::Newline(_) => {
iter.next(); iter.next();
} }
_ => return Err("Unexpected token".to_string()), _ => bail!("Unexpected token"),
} }
} }

View file

@ -11,14 +11,14 @@ pub enum Node {
MacroCall { name: String, args: Vec<arg::Arg> }, MacroCall { name: String, args: Vec<arg::Arg> },
} }
impl assembler::ToCode for Node { impl assembler::ToAssembly for Node {
fn to_code(&self) -> String { fn to_assembly(&self) -> String {
match self { match self {
Node::Comment(x) => format!("; {x}"), Node::Comment(x) => format!("; {x}"),
Node::Label(x) => format!("{x}:"), Node::Label(x) => format!("{x}:"),
Node::Call { name, arg } => { Node::Call { name, arg } => {
if let Some(a) = arg { if let Some(a) = arg {
format!("{name} {arg}", arg = a.to_code()) format!("{name} {arg}", arg = a.to_assembly())
} else { } else {
name.clone() name.clone()
} }
@ -27,20 +27,22 @@ impl assembler::ToCode for Node {
if args.is_empty() { if args.is_empty() {
format!(".{name}") format!(".{name}")
} else { } else {
format!(".{name} {}", args.iter().map(|a| a.to_code()).join(", ")) format!(".{name} {}", args.iter().map(|a| a.to_assembly()).join(", "))
} }
} }
} }
} }
} }
pub type Body = Vec<Node>;
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct AST { pub struct AST {
pub body: Vec<Node>, pub body: Vec<Node>,
} }
impl assembler::ToCode for AST { impl assembler::ToAssembly for AST {
fn to_code(&self) -> String { fn to_assembly(&self) -> String {
self.body.iter().map(|n| n.to_code()).join("\n") self.body.iter().map(|n| n.to_assembly()).join("\n")
} }
} }