diff --git a/src/bong/lexer.rs b/src/bong/lexer.rs index 818aed6..ea45e13 100644 --- a/src/bong/lexer.rs +++ b/src/bong/lexer.rs @@ -1,23 +1,35 @@ -enum LexerErrorType { +#[derive(Debug, Clone, PartialEq)] +pub enum LexerErrorType { UnexpectedToken(char), InvalidConstructor, } -struct LexerError { +#[derive(Debug, Clone, PartialEq)] +pub struct LexerError { error: LexerErrorType, line: isize, col: isize, } -enum Token { +#[derive(Debug, Clone, PartialEq)] +pub enum Token { Name(String), + Id(String), // not implemented Class(String), SlWhitespace(String), MlWhitespace(String), SlComment(String), + MlComment(String), // not implemented + Int(String), // not implemented + Float(String), // not implemented String(String), + Null(String), // not implemented + True(String), // not implemented + False(String), // not implemented LBrace(String), RBrace(String), + LBracket(String), // not implemented + RBracket(String), // not implemented } enum Mode { @@ -44,7 +56,7 @@ impl Mode { } } -fn lex(code: String) -> Result, LexerError> { +pub fn lex(code: String) -> Result, LexerError> { let mut tokens = Vec::new(); let mut value = Vec::new(); let mut iter = code.chars(); @@ -276,3 +288,28 @@ fn lex(code: String) -> Result, LexerError> { Ok(tokens) } + +#[test] +fn test_example_1() { + let text = "text.title { + // text { \"hello world\" } + \"hello world\" +}"; + let tokens = lex(text.to_string()); + assert_eq!( + tokens, + Ok(vec![ + Token::SlWhitespace("".to_string()), + Token::Name("text".to_string()), + Token::Class(".title".to_string()), + Token::SlWhitespace(" ".to_string()), + Token::LBrace("{".to_string()), + Token::MlWhitespace("\n ".to_string()), + Token::SlComment("// text { \"hello world\" }".to_string()), + Token::MlWhitespace("\n ".to_string()), + Token::String("\"hello world\"".to_string()), + Token::MlWhitespace("\n".to_string()), + Token::RBrace("}".to_string()), + ]) + ) +} diff --git a/src/bong/parser.rs b/src/bong/parser.rs index 95b8dfe..ffe47d1 100644 --- a/src/bong/parser.rs +++ b/src/bong/parser.rs @@ -1,6 +1,6 @@ use std::iter::Map; -use crate::bong::lexer::Lexer; +use crate::bong::lexer::Token; pub enum Node { Element { @@ -18,12 +18,53 @@ pub enum Node { Bool(bool), } -pub struct Parser<'a> { - lexer: Lexer<'a>, -} +// pub struct Parser { +// tokens: Vec, +// } -impl<'a> Parser<'a> { - pub fn new(lexer: Lexer<'a>) -> Self { - Self { lexer } - } -} +// impl Parser { +// pub fn new(lexer: Lexer) -> Self { +// Self { lexer } +// } +// } + +// type ParserError = String; + +// impl Parser { +// pub fn new(tokens: Vec) -> Self { +// Self { tokens, index: 0 } +// } + +// pub fn parse_top_level(&mut self) -> Result { +// match self.peek() { +// Some(Token::Name(_)) => self.parse_element(), +// Some(_) => self.parse_value(), +// None => Err("expected value or element".to_owned()), +// } +// } + +// fn parse_element(&mut self) -> Result {} + +// fn parse_value(&mut self) -> Result { +// match self.peek() { +// Some(Token::LBrace(_)) => self.parse_object(), +// Some(Token::LBracket(_)) => self.parse_object(), +// Some(_) => Err("unexpected token, expected value".to_owned()), +// None => Err("expected value".to_owned()), +// } +// } + +// fn parse_object(&mut self) -> Result {} +// fn parse_array(&mut self) -> Result {} +// fn parse_number(&mut self) -> Result {} +// fn parse_string(&mut self) -> Result {} +// fn parse_bool(&mut self) -> Result {} +// fn parse_null(&mut self) -> Result {} + +// fn step(&mut self) { +// self.index += 1 +// } +// fn peek(&self) -> Option<&Token> { +// self.tokens.get(self.index) +// } +// }