This commit is contained in:
Dominic Grimm 2023-03-08 19:06:28 +01:00
parent 0bcf2a1ef5
commit fe6086b45c
No known key found for this signature in database
GPG key ID: 6F294212DEAAC530
25 changed files with 87 additions and 2695 deletions

40
Cargo.lock generated
View file

@ -157,15 +157,6 @@ dependencies = [
"typenum", "typenum",
] ]
[[package]]
name = "dependency-graph"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5143247629540606d0888beae9ca0e0b9a81a32151bfecd0b2be4a961155c24d"
dependencies = [
"petgraph",
]
[[package]] [[package]]
name = "digest" name = "digest"
version = "0.10.5" version = "0.10.5"
@ -188,12 +179,6 @@ version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f" checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f"
[[package]]
name = "fixedbitset"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
[[package]] [[package]]
name = "generic-array" name = "generic-array"
version = "0.14.6" version = "0.14.6"
@ -249,21 +234,6 @@ dependencies = [
"unescape", "unescape",
] ]
[[package]]
name = "henceforth"
version = "0.1.0"
dependencies = [
"anyhow",
"clap",
"dependency-graph",
"hence",
"indexmap",
"itertools",
"lazy_static",
"parse_int",
"petgraph",
]
[[package]] [[package]]
name = "hermit-abi" name = "hermit-abi"
version = "0.1.19" version = "0.1.19"
@ -358,16 +328,6 @@ dependencies = [
"num-traits", "num-traits",
] ]
[[package]]
name = "petgraph"
version = "0.6.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e6d5014253a1331579ce62aa67443b4a658c5e7dd03d4bc6d302b94474888143"
dependencies = [
"fixedbitset",
"indexmap",
]
[[package]] [[package]]
name = "ppv-lite86" name = "ppv-lite86"
version = "0.2.16" version = "0.2.16"

View file

@ -1,2 +1,2 @@
[workspace] [workspace]
members = ["hence", "henceforth"] members = ["hence"]

View file

@ -5,14 +5,6 @@ edition = "2021"
authors = ["Dominic Grimm <dominic@dergrimm.net>"] authors = ["Dominic Grimm <dominic@dergrimm.net>"]
repository = "https://git.dergrimm.net/dergrimm/hence.git" repository = "https://git.dergrimm.net/dergrimm/hence.git"
[lib]
name = "hence"
path = "src/lib/lib.rs"
[[bin]]
name = "hence"
path = "src/bin/main.rs"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies] [dependencies]

View file

@ -1,201 +0,0 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<project source="3.7.2" version="1.0">
This file is intended to be loaded by Logisim-evolution v3.7.2(https://github.com/logisim-evolution/).
<lib desc="#Wiring" name="0">
<tool name="Pin">
<a name="appearance" val="classic"/>
</tool>
</lib>
<lib desc="#Gates" name="1"/>
<lib desc="#Plexers" name="2"/>
<lib desc="#Arithmetic" name="3"/>
<lib desc="#Memory" name="4"/>
<lib desc="#I/O" name="5"/>
<lib desc="#TTL" name="6"/>
<lib desc="#TCL" name="7"/>
<lib desc="#Base" name="8"/>
<lib desc="#BFH-Praktika" name="9"/>
<lib desc="#Input/Output-Extra" name="10"/>
<lib desc="#Soc" name="11"/>
<main name="main"/>
<options>
<a name="gateUndefined" val="ignore"/>
<a name="simlimit" val="1000"/>
<a name="simrand" val="0"/>
</options>
<mappings>
<tool lib="8" map="Button2" name="Menu Tool"/>
<tool lib="8" map="Button3" name="Menu Tool"/>
<tool lib="8" map="Ctrl Button1" name="Menu Tool"/>
</mappings>
<toolbar>
<tool lib="8" name="Poke Tool"/>
<tool lib="8" name="Edit Tool"/>
<tool lib="8" name="Wiring Tool"/>
<tool lib="8" name="Text Tool"/>
<sep/>
<tool lib="0" name="Pin"/>
<tool lib="0" name="Pin">
<a name="facing" val="west"/>
<a name="output" val="true"/>
</tool>
<sep/>
<tool lib="1" name="NOT Gate"/>
<tool lib="1" name="AND Gate"/>
<tool lib="1" name="OR Gate"/>
<tool lib="1" name="XOR Gate"/>
<tool lib="1" name="NAND Gate"/>
<tool lib="1" name="NOR Gate"/>
<sep/>
<tool lib="4" name="D Flip-Flop"/>
<tool lib="4" name="Register"/>
</toolbar>
<circuit name="main">
<a name="appearance" val="logisim_evolution"/>
<a name="circuit" val="main"/>
<a name="circuitnamedboxfixedsize" val="true"/>
<a name="simulationFrequency" val="1.0"/>
<comp lib="0" loc="(180,160)" name="Clock">
<a name="label" val="clk"/>
</comp>
<comp lib="0" loc="(380,210)" name="Constant">
<a name="value" val="0xffff"/>
<a name="width" val="16"/>
</comp>
<comp lib="0" loc="(90,320)" name="Pin">
<a name="appearance" val="classic"/>
<a name="label" val="rst"/>
</comp>
<comp lib="1" loc="(470,220)" name="NOT Gate"/>
<comp lib="1" loc="(540,180)" name="AND Gate"/>
<comp lib="3" loc="(430,220)" name="Comparator">
<a name="mode" val="unsigned"/>
<a name="width" val="16"/>
</comp>
<comp lib="4" loc="(260,200)" name="Register">
<a name="appearance" val="logisim_evolution"/>
<a name="label" val="reg_pc"/>
<a name="showInTab" val="true"/>
<a name="width" val="16"/>
</comp>
<comp loc="(920,350)" name="memory"/>
<wire from="(180,160)" to="(490,160)"/>
<wire from="(290,290)" to="(290,320)"/>
<wire from="(320,230)" to="(390,230)"/>
<wire from="(380,210)" to="(390,210)"/>
<wire from="(430,220)" to="(440,220)"/>
<wire from="(470,220)" to="(480,220)"/>
<wire from="(480,200)" to="(480,220)"/>
<wire from="(480,200)" to="(490,200)"/>
<wire from="(90,320)" to="(290,320)"/>
</circuit>
<circuit name="memory">
<a name="appearance" val="logisim_evolution"/>
<a name="circuit" val="memory"/>
<a name="circuitnamedboxfixedsize" val="true"/>
<a name="simulationFrequency" val="1.0"/>
<comp lib="0" loc="(260,220)" name="Pin">
<a name="appearance" val="NewPins"/>
<a name="label" val="set"/>
</comp>
<comp lib="0" loc="(260,250)" name="Pin">
<a name="appearance" val="NewPins"/>
<a name="label" val="clk"/>
</comp>
<comp lib="0" loc="(280,160)" name="Pin">
<a name="appearance" val="NewPins"/>
<a name="label" val="address"/>
<a name="radix" val="16"/>
<a name="width" val="16"/>
</comp>
<comp lib="0" loc="(280,190)" name="Pin">
<a name="appearance" val="NewPins"/>
<a name="label" val="value"/>
<a name="radix" val="16"/>
<a name="width" val="16"/>
</comp>
<comp lib="0" loc="(460,420)" name="Constant">
<a name="value" val="0x8000"/>
<a name="width" val="16"/>
</comp>
<comp lib="0" loc="(460,510)" name="Constant">
<a name="value" val="0x4000"/>
<a name="width" val="16"/>
</comp>
<comp lib="0" loc="(830,210)" name="Bit Extender">
<a name="type" val="zero"/>
</comp>
<comp lib="0" loc="(970,230)" name="Pin">
<a name="appearance" val="NewPins"/>
<a name="facing" val="west"/>
<a name="label" val="data"/>
<a name="output" val="true"/>
<a name="radix" val="16"/>
<a name="width" val="16"/>
</comp>
<comp lib="1" loc="(570,530)" name="AND Gate"/>
<comp lib="1" loc="(960,230)" name="OR Gate">
<a name="width" val="16"/>
</comp>
<comp lib="3" loc="(510,410)" name="Subtractor">
<a name="width" val="16"/>
</comp>
<comp lib="3" loc="(510,500)" name="Comparator">
<a name="mode" val="unsigned"/>
<a name="width" val="16"/>
</comp>
<comp lib="4" loc="(540,150)" name="ROM">
<a name="addrWidth" val="16"/>
<a name="appearance" val="logisim_evolution"/>
<a name="contents">addr/data: 16 8
3 7f fc 86 1 0 1 1
0 2 85 82 6 0 5 85
6 0 6 4 0 5 88 4
0 6 88 89 3 ff ff 86
32732*0 3 0 4 86
</a>
<a name="label" val="program"/>
<a name="labelvisible" val="true"/>
</comp>
<comp lib="4" loc="(640,400)" name="RAM">
<a name="addrWidth" val="16"/>
<a name="appearance" val="logisim_evolution"/>
<a name="dataWidth" val="16"/>
<a name="enables" val="line"/>
</comp>
<wire from="(260,220)" to="(400,220)"/>
<wire from="(260,250)" to="(390,250)"/>
<wire from="(280,160)" to="(440,160)"/>
<wire from="(280,190)" to="(340,190)"/>
<wire from="(340,190)" to="(340,580)"/>
<wire from="(340,580)" to="(620,580)"/>
<wire from="(390,250)" to="(390,550)"/>
<wire from="(390,550)" to="(520,550)"/>
<wire from="(400,220)" to="(400,450)"/>
<wire from="(400,450)" to="(640,450)"/>
<wire from="(440,160)" to="(440,400)"/>
<wire from="(440,160)" to="(540,160)"/>
<wire from="(440,400)" to="(470,400)"/>
<wire from="(440,460)" to="(440,490)"/>
<wire from="(440,460)" to="(530,460)"/>
<wire from="(440,490)" to="(470,490)"/>
<wire from="(460,420)" to="(470,420)"/>
<wire from="(460,510)" to="(470,510)"/>
<wire from="(510,410)" to="(530,410)"/>
<wire from="(510,510)" to="(520,510)"/>
<wire from="(530,410)" to="(530,460)"/>
<wire from="(530,410)" to="(640,410)"/>
<wire from="(570,530)" to="(590,530)"/>
<wire from="(590,470)" to="(590,530)"/>
<wire from="(590,470)" to="(640,470)"/>
<wire from="(620,490)" to="(620,580)"/>
<wire from="(620,490)" to="(640,490)"/>
<wire from="(780,210)" to="(790,210)"/>
<wire from="(830,210)" to="(910,210)"/>
<wire from="(880,490)" to="(890,490)"/>
<wire from="(890,250)" to="(890,490)"/>
<wire from="(890,250)" to="(910,250)"/>
<wire from="(960,230)" to="(970,230)"/>
</circuit>
</project>

View file

@ -1,6 +1,5 @@
; hence core lib ; hence core lib
core:
.define NULL, 0x0000 .define NULL, 0x0000
.define VOID, NULL .define VOID, NULL
@ -12,7 +11,6 @@ core:
.define CORE_KB, 1024 .define CORE_KB, 1024
core_mem:
.define CORE_MEM_PRG, (0 * CORE_KB) .define CORE_MEM_PRG, (0 * CORE_KB)
.define CORE_MEM_PRG_END, (32 * CORE_KB) .define CORE_MEM_PRG_END, (32 * CORE_KB)
.define CORE_MEM_ST, CORE_MEM_PRG_END .define CORE_MEM_ST, CORE_MEM_PRG_END
@ -22,7 +20,6 @@ core:
.define CORE_MEM_CHR, (CORE_MEM_MEM_END + 1) .define CORE_MEM_CHR, (CORE_MEM_MEM_END + 1)
.define CORE_MEM_KEY, (CORE_MEM_MEM_END + 2) .define CORE_MEM_KEY, (CORE_MEM_MEM_END + 2)
core_reg:
.define CORE_REG_PC, 0x0 .define CORE_REG_PC, 0x0
.define CORE_REG_OPC, 0x1 .define CORE_REG_OPC, 0x1
.define CORE_REG_ARG, 0x2 .define CORE_REG_ARG, 0x2
@ -33,7 +30,6 @@ core:
.define CORE_REG_C, 0x7 .define CORE_REG_C, 0x7
.define CORE_REG_D, 0x8 .define CORE_REG_D, 0x8
core_alu:
.define CORE_ALU_NOT, 0x00 .define CORE_ALU_NOT, 0x00
.define CORE_ALU_AND, 0x01 .define CORE_ALU_AND, 0x01
.define CORE_ALU_OR, 0x02 .define CORE_ALU_OR, 0x02

View file

@ -2,66 +2,65 @@
.requires "$lib/core.asm" .requires "$lib/core.asm"
std: .macro std_tclr
.macro std_tclr
ts NULL ts NULL
.endmacro .endmacro
.macro std_rclr, lib_std_rclr_arg_0_reg .macro std_rclr, lib_std_rclr_arg_0_reg
ts NULL ts NULL
tlr lib_std_rclr_arg_0_reg tlr lib_std_rclr_arg_0_reg
.endmacro .endmacro
.macro std_alu, lib_std_alu_arg_0_op .macro std_alu, lib_std_alu_arg_0_op
ts lib_std_alu_arg_0_op ts lib_std_alu_arg_0_op
alu alu
.endmacro .endmacro
.macro std_get, lib_std_get_arg_0_addr .macro std_get, lib_std_get_arg_0_addr
ts lib_std_get_arg_0_addr ts lib_std_get_arg_0_addr
get get
.endmacro .endmacro
.macro std_set, lib_std_set_arg_0_addr .macro std_set, lib_std_set_arg_0_addr
ts lib_std_set_arg_0_addr ts lib_std_set_arg_0_addr
set set
.endmacro .endmacro
.macro std_cp, lib_std_cp_arg_0_from, lib_std_cp_arg_1_to .macro std_cp, lib_std_cp_arg_0_from, lib_std_cp_arg_1_to
tsr lib_std_cp_arg_0_from tsr lib_std_cp_arg_0_from
tlr lib_std_cp_arg_1_to tlr lib_std_cp_arg_1_to
.endmacro .endmacro
.macro std_mv, lib_std_mv_arg_0_from, lib_std_cp_arg_1_to .macro std_mv, lib_std_mv_arg_0_from, lib_std_cp_arg_1_to
tsr lib_std_cp_arg_0_from tsr lib_std_cp_arg_0_from
tlr lib_std_cp_arg_1_to tlr lib_std_cp_arg_1_to
.std_rclr lib_std_cp_arg_1_to .std_rclr lib_std_cp_arg_1_to
.endmacro .endmacro
.macro std_rset, lib_std_init_arg_0_reg, lib_std_init_arg_1_val .macro std_rset, lib_std_init_arg_0_reg, lib_std_init_arg_1_val
ts lib_std_init_arg_1_val ts lib_std_init_arg_1_val
tlr lib_std_init_arg_0_reg tlr lib_std_init_arg_0_reg
.endmacro .endmacro
.macro std_jump, lib_std_jump_arg_0_label .macro std_jump, lib_std_jump_arg_0_label
.std_rset CORE_REG_PC, lib_std_jump_arg_0_label .std_rset CORE_REG_PC, lib_std_jump_arg_0_label
.endmacro .endmacro
.macro std_cond_jump, lib_std_cond_jump_arg_0_label .macro std_cond_jump, lib_std_cond_jump_arg_0_label
ts lib_std_cond_jump_arg_0_label ts lib_std_cond_jump_arg_0_label
tlrc CORE_REG_PC tlrc CORE_REG_PC
.endmacro .endmacro
.macro std_stop .macro std_stop
.std_rset CORE_REG_PC, 0xffff .std_rset CORE_REG_PC, 0xffff
.endmacro .endmacro
.macro std_inc, lib_std_inc_arg_0_reg .macro std_inc, lib_std_inc_arg_0_reg
.std_rset lib_std_inc_arg_0_reg, 1 .std_rset lib_std_inc_arg_0_reg, 1
.std_alu CORE_ALU_ADD .std_alu CORE_ALU_ADD
.endmacro .endmacro
.macro std_ld .macro std_ld
tss tss
pop pop
.endmacro .endmacro

View file

@ -1,2 +0,0 @@
examples/*.asm
examples/*.bin

View file

@ -1,28 +0,0 @@
[package]
name = "henceforth"
version = "0.1.0"
edition = "2021"
authors = ["Dominic Grimm <dominic@dergrimm.net>"]
repository = "https://git.dergrimm.net/dergrimm/hence.git"
[lib]
name = "henceforth"
path = "src/lib/lib.rs"
[[bin]]
name = "henceforth"
path = "src/bin/main.rs"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
hence = { path = "../hence" }
clap = { version = "3.2.16", features = ["derive"] }
anyhow = { version = "1.0.62", features = ["backtrace"] }
itertools = "0.10.2"
parse_int = "0.6.0"
indexmap = "1.9.1"
lazy_static = "1.4.0"
dependency-graph = "0.1.5"
petgraph = "0.6.2"

View file

@ -1,6 +0,0 @@
: test1 40 ;
: test2 test1 2 ;
1 1 = debug
if 69 else 42 then debug

View file

@ -1,83 +0,0 @@
use anyhow::Result;
use clap::{Parser, Subcommand};
use hence::assembler::ToCode;
use std::fs;
use henceforth::*;
#[derive(Debug, Parser)]
#[clap(author, version, about, long_about = None)]
struct Cli {
#[clap(subcommand)]
commands: Commands,
}
#[derive(Debug, Subcommand)]
enum Commands {
#[clap(about = "Lexes source code and outputs tokens")]
Lex {
#[clap(value_parser)]
src: String,
},
#[clap(about = "Parses source code and outputs AST")]
Parse {
#[clap(value_parser)]
src: String,
},
#[clap(about = "Compiles assembly from source code")]
Compile {
#[clap(value_parser)]
src: String,
#[clap(value_parser)]
out: Option<String>,
#[clap(short, long, action)]
optimize: Option<bool>,
#[clap(long, action)]
dump: bool,
},
}
fn main() -> Result<()> {
let args = Cli::parse();
match args.commands {
Commands::Lex { src } => {
let source = fs::read_to_string(src)?;
let tokens = lexer::lex(&source)?;
dbg!(tokens);
Ok(())
}
Commands::Parse { src } => {
let source = fs::read_to_string(src)?;
let tokens = lexer::lex(&source)?;
let body = parser::parse(tokens)?;
dbg!(body);
Ok(())
}
Commands::Compile {
src,
out,
optimize,
dump,
} => {
let source = fs::read_to_string(&src)?;
let tokens = lexer::lex(&source)?;
let ast = parser::parse(tokens)?;
let ast = compiler::compile(ast, optimize.unwrap_or(true))?;
let assembly = ast.to_code();
if let Some(x) = out {
fs::write(x, &assembly)?;
}
if dump {
println!("{}", assembly);
}
Ok(())
}
}
}
#[cfg(test)]
mod tests {}

View file

@ -1,297 +0,0 @@
use anyhow::{bail, Context, Result};
use indexmap::IndexSet;
use itertools::Itertools;
use lazy_static::lazy_static;
use petgraph::{graph::NodeIndex, Graph};
use std::collections::HashMap;
use crate::parser;
mod instruction;
pub use crate::compiler::instruction::Instruction;
pub const TEMPLATE_ASM: &str = include_str!("compiler/templates/default.asm");
lazy_static! {
#[derive(Debug)]
pub static ref TEMPLATE: hence::parser::ast::Body = hence::parser::parse(
hence::lexer::lex(TEMPLATE_ASM).unwrap()
)
.unwrap()
.body;
}
pub trait Compilable<T, U> {
fn compile(&self, data: &T) -> Result<U>;
}
#[derive(Debug)]
pub struct Word {
pub id: usize,
pub instructions: Vec<Instruction>,
pub times_used: usize,
pub callable_graph_node: NodeIndex,
}
#[derive(Debug, PartialEq, Eq, Hash)]
pub struct Condition {
pub if_instructions: Vec<Instruction>,
pub else_instructions: Vec<Instruction>,
pub callable_graph_node: NodeIndex,
}
#[derive(Debug)]
pub enum CallableId {
Word(String),
Condition(usize),
}
#[derive(Debug)]
pub struct Data {
pub strings: IndexSet<String>,
pub callable_graph: Graph<CallableId, ()>,
pub words: HashMap<String, Word>,
pub conditions: Vec<Condition>,
}
impl Data {
pub fn default() -> Self {
Self {
// words: HashMap::new(),
// conditions: IndexSet::new(),
strings: IndexSet::new(),
callable_graph: Graph::new(),
words: HashMap::new(),
conditions: vec![],
}
}
pub fn add_graph_edge(&mut self, origin: NodeIndex, instruction: Instruction) -> Result<()> {
match instruction {
Instruction::Call(x) => {
self.callable_graph.add_edge(
origin,
self.words
.get(&x)
.context(format!("Could not get already resolved referenced word: {}", x))?
.callable_graph_node,
(),
);
}
Instruction::Condition(x) => {
self.callable_graph.add_edge(
origin,
self.conditions
.get(x)
.context(format!("Could not get already resolved referenced condition: {}", x))?
.callable_graph_node,
(),
);
}
Instruction::Multiple {
instruction,
count: _,
} => {
self.add_graph_edge(origin, *instruction)?;
}
_ => {}
}
Ok(())
}
pub fn add_graph_edges(&mut self, origin: NodeIndex, ins: Vec<Instruction>) -> Result<()> {
for instruction in ins {
self.add_graph_edge(origin, instruction)?;
}
Ok(())
}
pub fn generate_instructions(
&mut self,
body: parser::ast::Body,
optimize: bool,
) -> Result<Vec<Instruction>> {
let mut instructions: Vec<Instruction> = vec![];
let mut iter = body.into_iter().peekable();
while let Some(node) = iter.next() {
match node {
_ if optimize && iter.next_if_eq(&node).is_some() => {
let count = iter.by_ref().peeking_take_while(|n| *n == node).count() + 2;
instructions.push(Instruction::Multiple {
instruction: Box::new(
self.generate_instructions(vec![node], optimize)?
.into_iter()
.next()
.unwrap(),
),
count,
});
}
parser::ast::Node::Comment(_) => {}
parser::ast::Node::String { mode, string } => {
instructions.push(match mode.as_str() {
"." => {
let id = self.strings.insert_full(string).0;
Instruction::StringPrint(id)
}
"r" => {
let id = self.strings.insert_full(string).0;
Instruction::StringReference(id)
}
"asm" => Instruction::AsmQuote(string),
_ => bail!("Unknown string mode: {}", mode),
});
}
parser::ast::Node::Number(x) => {
instructions.push(instruction::Instruction::Push(x));
}
parser::ast::Node::WordDefinition {
name,
stack: _,
body,
} => {
if Instruction::from_word(&name).is_some() {
bail!("Word already exists as compiler instruction: {}", name);
} else if self.words.contains_key(&name) {
bail!("Word already exists as user word definition: {}", name);
}
let origin = self
.callable_graph
.add_node(CallableId::Word(name.to_string()));
self.words.insert(
name.to_string(),
Word {
id: self.words.len(),
instructions: vec![],
times_used: 0,
callable_graph_node: origin,
},
);
let ins = self.generate_instructions(body, optimize)?;
self.words
.get_mut(&name)
.context(format!("Could not get word: {}", name))?
.instructions = ins.clone();
self.add_graph_edges(origin, ins)?;
}
parser::ast::Node::Condition { if_body, else_body } => {
let if_instructions = self.generate_instructions(if_body, optimize)?;
let else_instructions = self.generate_instructions(else_body, optimize)?;
let id = self.conditions.len();
let origin = self.callable_graph.add_node(CallableId::Condition(id));
self.conditions.push(Condition {
if_instructions: if_instructions.clone(),
else_instructions: else_instructions.clone(),
callable_graph_node: origin,
});
instructions.push(Instruction::Condition(id));
self.add_graph_edges(origin, if_instructions)?;
self.add_graph_edges(origin, else_instructions)?;
dbg!(&self);
}
parser::ast::Node::Word(x) => {
if let Some(ins) = Instruction::from_word(&x) {
instructions.push(ins);
} else if let Some(w) = self.words.get_mut(&x) {
w.times_used += 1;
instructions.push(Instruction::Call(x));
} else {
bail!("Word does not exist: {}", x);
}
}
}
}
Ok(instructions)
}
pub fn embed(&self, body: hence::parser::ast::Body) -> Result<hence::parser::ast::Body> {
let mut x = TEMPLATE.to_vec();
// strings
for (id, s) in self.strings.iter().enumerate() {
x.extend([
hence::parser::ast::Node::Label(format!("data_strings_{}", id)),
hence::parser::ast::Node::MacroCall {
name: "bytes".to_string(),
args: vec![hence::arg::Arg::String(s.to_string())],
},
hence::parser::ast::Node::Label(format!("data_strings_end_{}", id)),
]);
}
// conditions
// for (id, c) in self.conditions.iter().enumerate() {
// x.push(hence::parser::ast::Node::Label(format!(
// "conditions_if_{}",
// id
// )));
// x.extend(c.if_instructions.iter().map(|ins| ins.compile(self)).collect::<Result<Vec<_>>>()?.into_iter().flatten());
// x.push(hence::parser::ast::Node::Label(format!("conditions_else_{}", id)));
// x.extend(c.else_instructions.iter().map(|ins| ins.compile(self)).collect::<Result<Vec<_>>>()?.into_iter().flatten());
// }
// words
// for (name, word) in &self
// .words
// .iter()
// .filter(|(_, w)| w.times_used > 1)
// .sorted_by(|a, b| Ord::cmp(&a.1.id, &b.1.id))
// .collect::<Vec<_>>()
// {
// x.extend(vec![
// hence::parser::ast::Node::Label(format!("words_{}", word.id)),
// hence::parser::ast::Node::Comment(format!("word: \"{}\"", name)),
// ]);
// x.extend(
// word.instructions
// .iter()
// .map(|ins| ins.compile(self))
// .collect::<Result<Vec<hence::parser::ast::Body>>>()
// .unwrap()
// .into_iter()
// .flatten()
// );
// x.push(hence::parser::ast::Node::MacroCall {
// name: "return_call_stack_jump".to_string(),
// args: vec![],
// });
// }
x.extend([
hence::parser::ast::Node::Label("main".to_string()),
hence::parser::ast::Node::MacroCall {
name: "main".to_string(),
args: vec![hence::arg::Arg::Variable("main".to_string())],
},
]);
x.extend(body);
x.push(hence::parser::ast::Node::MacroCall {
name: "std_stop".to_string(),
args: vec![],
});
Ok(x)
}
}
pub fn compile(ast: parser::ast::AST, optimize: bool) -> Result<hence::parser::ast::AST> {
let mut data = Data::default();
let instructions = data.generate_instructions(ast.body, optimize)?;
Ok(hence::parser::ast::AST {
body: data.embed(
instructions
.iter()
.map(|ins| ins.compile(&data))
.collect::<Result<Vec<hence::parser::ast::Body>>>()
.unwrap()
.into_iter()
.flatten()
.collect(),
)?,
})
}

File diff suppressed because it is too large Load diff

View file

@ -1,63 +0,0 @@
.include "$lib/core.asm"
.include "$lib/std.asm"
.include "$lib/main.asm"
.define MEM_LOOP_I, CORE_MEM_MEM
.define MEM_LOOP_J, (MEM_LOOP_I + 1)
.define MEM_CALL_STACK_PTR, (MEM_LOOP_J + 1)
.define MEM_ALLOC_PTR, (MEM_CALL_STACK_PTR + 16)
.macro stack_transfer_alu
.std_ld
tlr CORE_REG_B
.std_ld
tlr CORE_REG_A
.endmacro
.macro call_stack_jump, call_stack_jump_arg_0_label, call_stack_jump_arg_1_offset
.std_rset CORE_REG_C, call_stack_jump_arg_0_label
.std_rset CORE_REG_D, (call_stack_jump_arg_1_offset + 7)
ts call_stack_jump
tlr CORE_REG_PC
.endmacro
.macro return_call_stack_jump
.std_jump return_call_stack_jump
.endmacro
.std_rset CORE_REG_A, MEM_CALL_STACK_PTR
.std_set MEM_CALL_STACK_PTR
.std_rset CORE_REG_A, (MEM_ALLOC_PTR + 1)
.std_set MEM_ALLOC_PTR
.jump_main
call_stack_jump:
.std_get MEM_CALL_STACK_PTR
tlr CORE_REG_A
.std_rset CORE_REG_B, 1
.std_alu CORE_ALU_ADD
tlr CORE_REG_A
tlr CORE_REG_B
.std_set MEM_CALL_STACK_PTR
tsr CORE_REG_D
tlr CORE_REG_A
tsr CORE_REG_B
set
tsr CORE_REG_C
tlr CORE_REG_PC
return_call_stack_jump:
.std_get MEM_CALL_STACK_PTR
tlr CORE_REG_A
tlr CORE_REG_C
.std_rset CORE_REG_B, 1
.std_alu CORE_ALU_SUB
tlr CORE_REG_A
.std_set MEM_CALL_STACK_PTR
tsr CORE_REG_C
get
tlr CORE_REG_PC

View file

@ -1,84 +0,0 @@
use anyhow::Result;
use hence::assembler::ToCode;
use itertools::Itertools;
#[derive(Debug)]
pub enum Token {
Newline(usize),
Whitespace(usize),
ParenComment(String),
BackslashComment(String),
DoubleDashComment(String),
StringLiteral { mode: String, string: String },
Number(String),
Word(String),
}
impl ToCode for Token {
fn to_code(&self) -> String {
match self {
Token::Newline(x) => ["\n"].into_iter().cycle().take(*x).join(""),
Token::Whitespace(x) => [" "].into_iter().cycle().take(*x).join(""),
Token::ParenComment(x) => format!("( {})", x),
Token::BackslashComment(x) => format!("\\{}", x),
Token::DoubleDashComment(x) => format!("-- {}", x),
Token::StringLiteral { mode, string } => format!("{}\" {}\"", mode, string),
Token::Number(x) | Token::Word(x) => x.clone(),
}
}
}
pub fn is_space(c: char) -> bool {
c.is_whitespace() || c == '\n'
}
pub fn lex(source: &str) -> Result<Vec<Token>> {
let mut chars = source.chars().peekable();
let mut tokens: Vec<Token> = vec![];
while let Some(c) = chars.peek() {
tokens.push(match c {
'\n' => Token::Newline(chars.peeking_take_while(|&c| c == '\n').count()),
_ if c.is_whitespace() => {
Token::Whitespace(chars.peeking_take_while(|&c| c.is_whitespace()).count())
}
'\\' => Token::BackslashComment(chars.peeking_take_while(|&c| c != '\n').collect()),
_ if c.is_numeric() => {
Token::Number(chars.peeking_take_while(|&c| !is_space(c)).collect())
}
_ => {
let x: String = chars.peeking_take_while(|&c| !is_space(c)).collect();
let mut iter = x.chars();
match x.as_str() {
"(" => Token::ParenComment(
chars.by_ref().skip(1).take_while(|&c| c != ')').collect(),
),
"--" => Token::DoubleDashComment(
chars.by_ref().take_while(|&c| c != '\n').collect(),
),
_ if x.ends_with('"') => Token::StringLiteral {
mode: x.chars().take(x.len() - 1).collect(),
string: chars.by_ref().skip(1).take_while(|&c| c != '"').collect(),
},
_ if iter.next() == Some('-') => {
if let Some(c) = iter.next() {
if c.is_numeric() {
Token::Number(x)
} else {
Token::Word(x)
}
} else {
Token::Word(x)
}
}
_ => Token::Word(x),
}
}
});
}
Ok(tokens)
}

View file

@ -1,3 +0,0 @@
pub mod compiler;
pub mod lexer;
pub mod parser;

View file

@ -1,168 +0,0 @@
use anyhow::{bail, Result};
use parse_int;
use crate::lexer;
pub mod ast;
pub fn parse_stack_state(s: Option<&str>) -> Vec<String> {
match s {
Some(x) if !x.trim().is_empty() => {
x.split_whitespace().map(|x| x.trim().to_string()).collect()
}
_ => vec![],
}
}
pub fn parse_stack_result(s: &str) -> ast::StackResult {
let mut splitter = s.splitn(2, "--");
ast::StackResult {
before: parse_stack_state(splitter.next()),
after: parse_stack_state(splitter.next()),
}
}
pub fn parse(tokens: Vec<lexer::Token>) -> Result<ast::AST> {
let mut iter = tokens.into_iter().peekable();
let mut body: ast::Body = vec![];
while let Some(token) = iter.next() {
match token {
lexer::Token::Newline(_) | lexer::Token::Whitespace(_) => {}
lexer::Token::ParenComment(x)
| lexer::Token::BackslashComment(x)
| lexer::Token::DoubleDashComment(x) => {
body.push(ast::Node::Comment(x.trim().to_string()));
}
lexer::Token::StringLiteral { mode, string } => {
body.push(ast::Node::String { mode, string });
}
lexer::Token::Number(x) => body.push(ast::Node::Number(parse_int::parse(&x)?)),
lexer::Token::Word(x) => match x.as_str() {
":" => {
let mut depth: usize = 1;
let mut content = iter
.by_ref()
.take_while(|t| match t {
lexer::Token::Word(x) => match x.as_str() {
":" => {
depth += 1;
true
}
";" => {
depth -= 1;
depth != 0
}
_ => true,
},
_ => true,
})
.collect::<Vec<_>>()
.into_iter()
.peekable();
if depth != 0 {
bail!("Unbalanced word definitions");
}
let name = match content.find(|t| {
!matches!(t, lexer::Token::Newline(_) | lexer::Token::Whitespace(_))
}) {
Some(t) => match t {
lexer::Token::Word(x) => x.clone(),
_ => bail!("Word definition name must be a word itself: {:?}", t),
},
None => bail!("Word definition can not be empty"),
};
let stack = loop {
if let Some(t) = content.peek() {
match t {
lexer::Token::Newline(_) | lexer::Token::Whitespace(_) => {
content.next();
}
lexer::Token::ParenComment(x)
| lexer::Token::BackslashComment(x)
| lexer::Token::DoubleDashComment(x) => {
let y = &x.to_string();
content.next();
break Some(parse_stack_result(y));
}
_ => break None,
}
} else {
break None;
}
};
body.push(ast::Node::WordDefinition {
name,
stack,
body: parse(content.collect())?.body,
});
}
"if" => {
let mut depth: usize = 1;
let mut else_used = false;
let if_toks: Vec<_> = iter
.by_ref()
.take_while(|t| match t {
lexer::Token::Word(x) => match x.as_str() {
"if" => {
depth += 1;
true
}
"else" => {
if depth == 1 {
else_used = true;
false
} else {
true
}
}
"then" => {
depth -= 1;
depth != 0
}
_ => true,
},
_ => true,
})
.collect();
let else_toks: Vec<_> = if else_used {
iter.by_ref()
.take_while(|t| match t {
lexer::Token::Word(x) => match x.as_str() {
"if" => {
depth += 1;
true
}
"then" => {
depth -= 1;
depth != 0
}
_ => true,
},
_ => true,
})
.collect()
} else {
vec![]
};
if depth != 0 {
bail!("Unbalanced conditions");
}
body.push(ast::Node::Condition {
if_body: parse(if_toks)?.body,
else_body: parse(else_toks)?.body,
});
}
_ => {
body.push(ast::Node::Word(x));
}
},
}
}
Ok(ast::AST { body })
}

View file

@ -1,90 +0,0 @@
use hence::assembler::ToCode;
use itertools::Itertools;
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct StackResult {
pub before: Vec<String>,
pub after: Vec<String>,
}
impl ToCode for StackResult {
fn to_code(&self) -> String {
format!(
"{}--{}",
if self.before.is_empty() {
"".to_string()
} else {
format!("{} ", self.before.join(" "))
},
if self.after.is_empty() {
"".to_string()
} else {
format!("{} ", self.after.join(" "))
}
)
}
}
#[derive(Debug, PartialEq, Eq)]
pub enum Node {
Comment(String),
String {
mode: String,
string: String,
},
Number(i32),
WordDefinition {
name: String,
stack: Option<StackResult>,
body: Body,
},
Condition {
if_body: Body,
else_body: Body,
},
Word(String),
}
impl ToCode for Node {
fn to_code(&self) -> String {
match self {
Node::Comment(x) => format!("\\ {}", x),
Node::String { mode, string } => format!("{}\" {}\"", mode, string),
Node::Number(x) => x.to_string(),
Node::WordDefinition { name, stack, body } => format!(
": {}{} {} ;",
name,
match stack {
Some(x) => format!(" {}", x.to_code()),
None => "".to_string(),
},
body.iter().map(|x| x.to_code()).join(" ")
),
Node::Condition { if_body, else_body } => {
if else_body.is_empty() {
format!("if {} then", if_body.iter().map(|x| x.to_code()).join(" "))
} else {
format!(
"if {} else {} then",
if_body.iter().map(|x| x.to_code()).join(" "),
else_body.iter().map(|x| x.to_code()).join(" ")
)
}
}
Node::Word(x) => x.clone(),
}
}
}
pub type Body = Vec<Node>;
#[derive(Debug)]
pub struct AST {
pub body: Body,
}
impl ToCode for AST {
fn to_code(&self) -> String {
self.body.iter().map(|x| x.to_code()).join(" ")
}
}