diff --git a/src/main.rs b/src/main.rs index b01cbec..87c1654 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,12 +1,17 @@ -mod assembly; +mod ast; +mod parse; mod tokeniser; fn main() { - let filename = std::env::args().nth(1).expect("No filename argument"); - let contents = - std::fs::read_to_string(&filename).expect(&format!("No file named {}", &filename)); + let filename = "test.plonkus"; - let tokens = dbg!(tokeniser::tokenise(contents)); + let contents = std::fs::read_to_string(filename).expect(&format!("No file named {}", filename)); - let assembled = dbg!(assembly::assemblize(tokens)); + let tokens = tokeniser::tokenise(contents); + + let functions = parse::ast(tokens); + + let assembly = ast::to_assembly(functions); + + println!("{}", assembly); } diff --git a/src/parse.rs b/src/parse.rs new file mode 100644 index 0000000..d258998 --- /dev/null +++ b/src/parse.rs @@ -0,0 +1,199 @@ +use std::collections::{HashMap, HashSet}; + +use crate::ast::*; +use crate::tokeniser::Keyword; +use crate::tokeniser::Token; + +#[derive(Clone, Debug)] +enum ParseState { + Function(Function), + Statement(Function), + None(Vec), +} + +impl Function { + fn push_statement(&mut self, statement: Statement) -> Function { + self.statements.push(statement); + self.to_owned() + } +} + +fn parse_math(tokens: &Vec) -> ValueNode { + let mut prioritizing_order: HashMap> = HashMap::new(); + prioritizing_order.insert(0, HashSet::from([Token::Plus, Token::Minus])); + + let mut ordered_binops: Vec = Vec::new(); + let mut ordered_binops_indexes: Vec = Vec::new(); + + if tokens.len() == 1 { + return match &tokens[0] { + Token::Number(n) => ValueNode::Literal(Literal::Int(n.parse::().unwrap())), + _ => todo!(), + }; + } + + for j in prioritizing_order.keys() { + let mut i = 0; + let current_order_binops = prioritizing_order.get(j).unwrap(); + + while i < tokens.len() { + for binop in current_order_binops { + if &tokens[i] == binop { + ordered_binops.push(binop.to_owned()); + ordered_binops_indexes.push(i); + } + } + + i += 1; + } + } + + let binop = ordered_binops.pop().unwrap(); + let binop_index = ordered_binops_indexes.pop().unwrap(); + + let mut a: Vec = Vec::new(); + let mut b: Vec = Vec::new(); + + let mut i = 0; + while i < tokens.len() { + if i < binop_index { + a.push(tokens[i].clone()); + } + if i > binop_index { + b.push(tokens[i].clone()); + } + + i += 1; + } + + return match binop { + Token::Plus => ValueNode::Operator(Operator::Add( + Box::new(parse_math(&a)), + Box::new(parse_math(&b)), + )), + Token::Minus => ValueNode::Operator(Operator::Sub( + Box::new(parse_math(&a)), + Box::new(parse_math(&b)), + )), + _ => todo!(), + }; +} + +fn parse_statement( + parse_state: &mut ParseState, + i: &mut usize, + tokens: &Vec, + functions: &Vec, +) -> ParseState { + let mut current_function = match parse_state { + ParseState::Statement(f) => f.to_owned(), + _ => todo!(), + }; + + if tokens[i.to_owned()] == Token::CloseSquiglyBracket { + let mut functions = functions.to_owned(); + functions.push(current_function); + + return ParseState::None(functions); + } + + let statement = match &tokens[i.to_owned()] { + Token::Keyword(Keyword::Let) => { + *i += 1; + let name = match tokens[*i].clone() { + Token::Identifier(identifier) => identifier, + _ => todo!(), + }; + + *i += 1; + match tokens[*i].clone() { + Token::Equals => {} + _ => todo!(), + }; + + let mut declaration_tokens: Vec = Vec::new(); + while tokens[*i] != Token::Semi { + *i += 1; + declaration_tokens.push(tokens[*i].clone()); + } + Statement::VariableDeclaration(name, parse_math(&declaration_tokens)) + } + Token::Identifier(identifier) => { + *i += 1; + + let mut declaration_tokens: Vec> = Vec::new(); + + *i += 1; + while tokens[*i] != Token::CloseParanthesis { + let mut argument_tokens: Vec = Vec::new(); + while tokens[*i] != Token::Comma { + argument_tokens.push(tokens[*i].clone()); + *i += 1; + } + declaration_tokens.push(argument_tokens); + *i += 1; + } + + *i += 1; + + let mut arguments: Vec = Vec::new(); + + for token_vec in &declaration_tokens { + if token_vec == &Vec::new() { + continue; + } + arguments.push(parse_math(token_vec)); + } + + Statement::FunctionCall(identifier.to_owned(), arguments) + } + _other => { + todo!() + } + }; + + current_function.push_statement(statement); + + return ParseState::Statement(current_function); +} + +pub fn ast(tokens: Vec) -> Vec { + let mut parse_state = ParseState::None(Vec::new()); + let mut functions: Vec = Vec::new(); + + let mut i = 0; + while i < tokens.len() { + parse_state = match &parse_state { + ParseState::None(funcs) => { + functions = funcs.to_owned(); + // Expect function and shit + match &tokens[i] { + Token::Keyword(keyword) => match keyword { + Keyword::Function => { + ParseState::Function(Function::new("".to_string(), Vec::new())) + } + _ => todo!(), + }, + _ => todo!(), + } + } + ParseState::Function(func) => match &tokens[i] { + Token::Identifier(identifier) => { + ParseState::Function(Function::new(identifier.to_owned(), Vec::new())) + } + Token::OpenSquiglyBracket => ParseState::Statement(func.to_owned()), + _ => todo!(), + }, + ParseState::Statement(_function) => { + parse_statement(&mut parse_state, &mut i, &tokens, &functions) + } + }; + + i += 1; + } + + match parse_state { + ParseState::None(v) => v, + _ => todo!(), + } +} diff --git a/test.plonkus b/test.plonkus index d232c62..ce0a0df 100644 --- a/test.plonkus +++ b/test.plonkus @@ -1 +1,4 @@ -return 42; +fn main { + out(42 - 3 - 1,); + exit(,); +}