Update lisp lexer

This commit is contained in:
Dominic Grimm 2022-07-17 22:07:59 +02:00
parent ffad349a41
commit dd23759f85
No known key found for this signature in database
GPG key ID: A6C051C716D2CE65
9 changed files with 110 additions and 24 deletions

1
Cargo.lock generated
View file

@ -117,6 +117,7 @@ version = "0.1.0"
dependencies = [
"clap",
"hence",
"itertools",
]
[[package]]

View file

@ -1,3 +1,4 @@
;; test module
(module test
"Main module"
(defun main ()

View file

@ -14,8 +14,8 @@ pub enum Arg {
},
}
impl assembler::ToAssembly for Arg {
fn to_assembly(&self) -> String {
impl assembler::ToCode for Arg {
fn to_code(&self) -> String {
match self {
Arg::String(x) => format!("\"{x}\""),
Arg::Number(x) => x.to_string(),
@ -23,9 +23,9 @@ impl assembler::ToAssembly for Arg {
Arg::BinaryExpression { left, right, op } => {
format!(
"({left} {op} {right})",
left = left.to_assembly(),
op = op.to_assembly(),
right = right.to_assembly()
left = left.to_code(),
op = op.to_code(),
right = right.to_code()
)
}
}
@ -136,8 +136,8 @@ pub enum BinaryExpressionOperator {
Pow,
}
impl assembler::ToAssembly for BinaryExpressionOperator {
fn to_assembly(&self) -> String {
impl assembler::ToCode for BinaryExpressionOperator {
fn to_code(&self) -> String {
match self {
BinaryExpressionOperator::Add => "+".to_string(),
BinaryExpressionOperator::Sub => "-".to_string(),

View file

@ -5,8 +5,8 @@ use std::collections::HashMap;
use crate::arg;
use crate::parser;
pub trait ToAssembly {
fn to_assembly(&self) -> String;
pub trait ToCode {
fn to_code(&self) -> String;
}
pub trait ByteResolvable {
@ -91,7 +91,7 @@ pub fn assemble(ast: parser::ast::AST, data: &mut Data) -> Result<(), String> {
for arg in args {
let bytes = arg.resolve_bytes(data).unwrap();
println!("{}", arg.to_assembly().replace('\n', "\\n"));
println!("{}", arg.to_code().replace('\n', "\\n"));
println!(" => {}", arg.resolve_number(data).unwrap());
println!(
" => [{}]",

View file

@ -27,8 +27,8 @@ pub enum Token {
Whitespace(String),
}
impl assembler::ToAssembly for Token {
fn to_assembly(&self) -> String {
impl assembler::ToCode for Token {
fn to_code(&self) -> String {
match self {
Token::Comment(x) => format!(";{x}"),
Token::StringLiteral(x) => format!("\"{x}\""),

View file

@ -11,14 +11,14 @@ pub enum Node {
MacroCall { name: String, args: Vec<arg::Arg> },
}
impl assembler::ToAssembly for Node {
fn to_assembly(&self) -> String {
impl assembler::ToCode for Node {
fn to_code(&self) -> String {
match self {
Node::Comment(x) => format!("; {x}"),
Node::Label(x) => format!("{x}:"),
Node::Call { name, arg } => {
if let Some(a) = arg {
format!("{name} {arg}", arg = a.to_assembly())
format!("{name} {arg}", arg = a.to_code())
} else {
name.clone()
}
@ -27,10 +27,7 @@ impl assembler::ToAssembly for Node {
if args.is_empty() {
format!(".{name}")
} else {
format!(
".{name} {}",
args.iter().map(|a| a.to_assembly()).join(", ")
)
format!(".{name} {}", args.iter().map(|a| a.to_code()).join(", "))
}
}
}
@ -42,8 +39,8 @@ pub struct AST {
pub body: Vec<Node>,
}
impl assembler::ToAssembly for AST {
fn to_assembly(&self) -> String {
self.body.iter().map(|n| n.to_assembly()).join("\n")
impl assembler::ToCode for AST {
fn to_code(&self) -> String {
self.body.iter().map(|n| n.to_code()).join("\n")
}
}

View file

@ -15,3 +15,4 @@ path = "src/bin/main.rs"
[dependencies]
hence = { path = "../hence" }
clap = { version = "3.2.12", features = ["derive"] }
itertools = "0.10.3"

View file

@ -23,7 +23,8 @@ fn main() {
match args.commands {
Commands::Lex { src } => {
let source = fs::read_to_string(src).unwrap();
println!("{source}");
let tokens = lexer::lex(source).unwrap();
dbg!(tokens);
}
}
}

View file

@ -1 +1,86 @@
pub fn lex(source: String) {}
use hence;
use itertools::Itertools;
#[derive(Debug)]
pub enum Token {
Comment(String),
MultiLineComment(String),
Newline(String),
Whitespace(String),
LParen,
RParen,
StringLiteral(String),
Number(String),
Literal(String),
}
impl hence::assembler::ToCode for Token {
fn to_code(&self) -> String {
match self {
Token::Comment(x) => format!(";;{x}"),
Token::MultiLineComment(x) => format!("#|{x}|#"),
Token::Newline(x) | Token::Whitespace(x) => x.clone(),
Token::LParen => "(".to_string(),
Token::RParen => ")".to_string(),
Token::StringLiteral(x) => format!("\"{x}\""),
Token::Number(x) | Token::Literal(x) => x.clone(),
}
}
}
pub fn lex(source: String) -> Result<Vec<Token>, String> {
let mut chars = source.chars().peekable();
let mut tokens: Vec<Token> = Vec::new();
while let Some(&ch) = chars.peek() {
match ch {
';' => {
chars.next();
chars.next_if(|c| *c == ';');
tokens.push(Token::Comment(
chars.peeking_take_while(|c| *c != '\n').collect(),
));
}
'\n' => {
tokens.push(Token::Newline(
chars.peeking_take_while(|c| *c == '\n').collect(),
));
}
_ if ch.is_whitespace() => {
tokens.push(Token::Whitespace(
chars
.peeking_take_while(|c| c.is_whitespace() && *c != '\n')
.collect(),
));
}
'(' => {
tokens.push(Token::LParen);
chars.next();
}
')' => {
tokens.push(Token::RParen);
chars.next();
}
'"' => {
chars.next();
tokens.push(Token::StringLiteral(
chars.by_ref().take_while(|c| *c != '"').collect(),
));
}
_ => {
dbg!(ch);
tokens.push(Token::Literal(
chars
.peeking_take_while(|c| !c.is_whitespace() && *c != '(' && *c != ')')
.collect(),
));
}
}
}
Ok(tokens)
}