hence/hence/src/lib/parser.rs

71 lines
2.3 KiB
Rust

use anyhow::{bail, Result};
use itertools::PeekingNext;
use crate::arg;
use crate::lexer;
pub mod ast;
pub fn parse(tokens: Vec<lexer::Token>) -> Result<ast::AST> {
let mut iter = tokens.iter().peekable();
let mut body: ast::Body = vec![];
while let Some(&token) = iter.peek() {
match token {
lexer::Token::Comment(x) => {
body.push(ast::Node::Comment(x.trim().to_string()));
iter.next();
}
lexer::Token::MacroLiteral(x) => {
iter.next();
body.push(ast::Node::MacroCall {
name: (&x[1..]).to_string(),
args: arg::parse_args(
iter.by_ref()
.take_while(|t| !matches!(t, lexer::Token::Newline(_)))
.filter(|t| !matches!(t, lexer::Token::Whitespace(_)))
.collect(),
)?,
});
}
lexer::Token::Literal(x) => {
iter.next();
if iter
.peeking_next(|t| matches!(t, lexer::Token::Colon))
.is_some()
{
body.push(ast::Node::Label(x.clone()));
} else {
let args = match arg::parse_args(
iter.by_ref()
.take_while(|t| !matches!(t, lexer::Token::Newline(_)))
.filter(|t| {
!matches!(t, lexer::Token::Whitespace(_) | lexer::Token::Comment(_))
})
.collect(),
) {
Ok(x) => x,
Err(x) => bail!("{}", x),
};
if args.len() > 1 {
bail!("Opcode call only accepts one argument");
}
body.push(ast::Node::Call {
name: x.clone(),
arg: args.first().cloned(),
});
}
}
lexer::Token::Whitespace(_) | lexer::Token::Newline(_) => {
iter.next();
}
_ => bail!("Unexpected token"),
}
}
Ok(ast::AST { body })
}