Compare commits

..

No commits in common. "97c940ba65de19edd8437662d11619c56162bd43" and "6bfb3763d998e3aad30eb8eafb71fc296b874657" have entirely different histories.

4 changed files with 78 additions and 7 deletions

View file

@ -1,5 +0,0 @@
use std::collections::HashMap;
fn parse_label(line: String, line_i: i64) {
}

View file

@ -39,11 +39,24 @@ pub enum BinOp {
Minus, Minus,
} }
#[derive(Debug, Clone)]
pub enum ValueNode {
Literal(Literal),
Operator(Operator),
Variable(String),
}
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub enum Literal { pub enum Literal {
Int(i64), Int(i64),
} }
#[derive(Debug, Clone)]
pub enum Operator {
Add(Box<ValueNode>, Box<ValueNode>),
Sub(Box<ValueNode>, Box<ValueNode>),
}
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct AssemblerState { pub struct AssemblerState {
pub functions: HashMap<String, Function>, pub functions: HashMap<String, Function>,
@ -454,7 +467,7 @@ C-OUT RAM-IN 0xFF ; Store address after jump in ram
} }
impl Node for Unary { impl Node for Unary {
fn to_assembly(&self, _state: &mut AssemblerState) -> String { fn to_assembly(&self, state: &mut AssemblerState) -> String {
todo!() todo!()
} }
} }

View file

@ -1,6 +1,5 @@
mod ast; mod ast;
mod parse; mod parse;
mod assembler;
mod tokeniser; mod tokeniser;
fn main() { fn main() {

View file

@ -1,7 +1,71 @@
use std::collections::{HashMap, HashSet};
use crate::ast::*; use crate::ast::*;
use crate::tokeniser::Keyword; use crate::tokeniser::Keyword;
use crate::tokeniser::Token; use crate::tokeniser::Token;
fn parse_math(tokens: &Vec<Token>) -> ValueNode {
let mut prioritizing_order: HashMap<i64, HashSet<Token>> = HashMap::new();
prioritizing_order.insert(0, HashSet::from([Token::Plus, Token::Minus]));
let mut ordered_binops: Vec<Token> = Vec::new();
let mut ordered_binops_indexes: Vec<usize> = Vec::new();
if tokens.len() == 1 {
return match &tokens[0] {
Token::Number(n) => ValueNode::Literal(Literal::Int(n.parse::<i64>().unwrap())),
Token::Identifier(identifier) => ValueNode::Variable(identifier.to_owned()),
_ => todo!(),
};
}
for j in prioritizing_order.keys() {
let mut i = 0;
let current_order_binops = prioritizing_order.get(j).unwrap();
while i < tokens.len() {
for binop in current_order_binops {
if &tokens[i] == binop {
ordered_binops.push(binop.to_owned());
ordered_binops_indexes.push(i);
}
}
i += 1;
}
}
let binop = ordered_binops.pop().unwrap();
let binop_index = ordered_binops_indexes.pop().unwrap();
let mut a: Vec<Token> = Vec::new();
let mut b: Vec<Token> = Vec::new();
let mut i = 0;
while i < tokens.len() {
if i < binop_index {
a.push(tokens[i].clone());
}
if i > binop_index {
b.push(tokens[i].clone());
}
i += 1;
}
return match binop {
Token::Plus => ValueNode::Operator(Operator::Add(
Box::new(parse_math(&a)),
Box::new(parse_math(&b)),
)),
Token::Minus => ValueNode::Operator(Operator::Sub(
Box::new(parse_math(&a)),
Box::new(parse_math(&b)),
)),
_ => todo!(),
};
}
fn parse_basic(tokens: &Vec<Token>, i: &mut usize) -> Expression { fn parse_basic(tokens: &Vec<Token>, i: &mut usize) -> Expression {
let literal = match &tokens[*i] { let literal = match &tokens[*i] {
Token::Identifier(identifier) => Expression::Variable(identifier.to_owned()), Token::Identifier(identifier) => Expression::Variable(identifier.to_owned()),