Compare commits

..

2 commits

Author SHA1 Message Date
vanten 97c940ba65
Merge branch 'main' of git.vanten-s.com:vanten-s/plonkus 2023-09-30 22:14:21 +02:00
vanten 9f828dde67
Deleted unnecesary code 2023-09-30 22:14:12 +02:00
4 changed files with 7 additions and 78 deletions

5
src/assembler.rs Normal file
View file

@ -0,0 +1,5 @@
use std::collections::HashMap;
fn parse_label(line: String, line_i: i64) {
}

View file

@ -39,24 +39,11 @@ pub enum BinOp {
Minus, Minus,
} }
#[derive(Debug, Clone)]
pub enum ValueNode {
Literal(Literal),
Operator(Operator),
Variable(String),
}
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub enum Literal { pub enum Literal {
Int(i64), Int(i64),
} }
#[derive(Debug, Clone)]
pub enum Operator {
Add(Box<ValueNode>, Box<ValueNode>),
Sub(Box<ValueNode>, Box<ValueNode>),
}
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct AssemblerState { pub struct AssemblerState {
pub functions: HashMap<String, Function>, pub functions: HashMap<String, Function>,
@ -467,7 +454,7 @@ C-OUT RAM-IN 0xFF ; Store address after jump in ram
} }
impl Node for Unary { impl Node for Unary {
fn to_assembly(&self, state: &mut AssemblerState) -> String { fn to_assembly(&self, _state: &mut AssemblerState) -> String {
todo!() todo!()
} }
} }

View file

@ -1,5 +1,6 @@
mod ast; mod ast;
mod parse; mod parse;
mod assembler;
mod tokeniser; mod tokeniser;
fn main() { fn main() {

View file

@ -1,71 +1,7 @@
use std::collections::{HashMap, HashSet};
use crate::ast::*; use crate::ast::*;
use crate::tokeniser::Keyword; use crate::tokeniser::Keyword;
use crate::tokeniser::Token; use crate::tokeniser::Token;
fn parse_math(tokens: &Vec<Token>) -> ValueNode {
let mut prioritizing_order: HashMap<i64, HashSet<Token>> = HashMap::new();
prioritizing_order.insert(0, HashSet::from([Token::Plus, Token::Minus]));
let mut ordered_binops: Vec<Token> = Vec::new();
let mut ordered_binops_indexes: Vec<usize> = Vec::new();
if tokens.len() == 1 {
return match &tokens[0] {
Token::Number(n) => ValueNode::Literal(Literal::Int(n.parse::<i64>().unwrap())),
Token::Identifier(identifier) => ValueNode::Variable(identifier.to_owned()),
_ => todo!(),
};
}
for j in prioritizing_order.keys() {
let mut i = 0;
let current_order_binops = prioritizing_order.get(j).unwrap();
while i < tokens.len() {
for binop in current_order_binops {
if &tokens[i] == binop {
ordered_binops.push(binop.to_owned());
ordered_binops_indexes.push(i);
}
}
i += 1;
}
}
let binop = ordered_binops.pop().unwrap();
let binop_index = ordered_binops_indexes.pop().unwrap();
let mut a: Vec<Token> = Vec::new();
let mut b: Vec<Token> = Vec::new();
let mut i = 0;
while i < tokens.len() {
if i < binop_index {
a.push(tokens[i].clone());
}
if i > binop_index {
b.push(tokens[i].clone());
}
i += 1;
}
return match binop {
Token::Plus => ValueNode::Operator(Operator::Add(
Box::new(parse_math(&a)),
Box::new(parse_math(&b)),
)),
Token::Minus => ValueNode::Operator(Operator::Sub(
Box::new(parse_math(&a)),
Box::new(parse_math(&b)),
)),
_ => todo!(),
};
}
fn parse_basic(tokens: &Vec<Token>, i: &mut usize) -> Expression { fn parse_basic(tokens: &Vec<Token>, i: &mut usize) -> Expression {
let literal = match &tokens[*i] { let literal = match &tokens[*i] {
Token::Identifier(identifier) => Expression::Variable(identifier.to_owned()), Token::Identifier(identifier) => Expression::Variable(identifier.to_owned()),