added a few tokens, a test and some parsing

This commit is contained in:
SimonFJ20 2023-01-20 01:17:06 +01:00
parent 67ed64e3b4
commit 8b5d7cb0eb
2 changed files with 91 additions and 13 deletions

View File

@ -1,23 +1,35 @@
enum LexerErrorType {
#[derive(Debug, Clone, PartialEq)]
pub enum LexerErrorType {
UnexpectedToken(char),
InvalidConstructor,
}
struct LexerError {
#[derive(Debug, Clone, PartialEq)]
pub struct LexerError {
error: LexerErrorType,
line: isize,
col: isize,
}
enum Token {
#[derive(Debug, Clone, PartialEq)]
pub enum Token {
Name(String),
Id(String), // not implemented
Class(String),
SlWhitespace(String),
MlWhitespace(String),
SlComment(String),
MlComment(String), // not implemented
Int(String), // not implemented
Float(String), // not implemented
String(String),
Null(String), // not implemented
True(String), // not implemented
False(String), // not implemented
LBrace(String),
RBrace(String),
LBracket(String), // not implemented
RBracket(String), // not implemented
}
enum Mode {
@ -44,7 +56,7 @@ impl Mode {
}
}
fn lex(code: String) -> Result<Vec<Token>, LexerError> {
pub fn lex(code: String) -> Result<Vec<Token>, LexerError> {
let mut tokens = Vec::new();
let mut value = Vec::new();
let mut iter = code.chars();
@ -276,3 +288,28 @@ fn lex(code: String) -> Result<Vec<Token>, LexerError> {
Ok(tokens)
}
#[test]
fn test_example_1() {
let text = "text.title {
// text { \"hello world\" }
\"hello world\"
}";
let tokens = lex(text.to_string());
assert_eq!(
tokens,
Ok(vec![
Token::SlWhitespace("".to_string()),
Token::Name("text".to_string()),
Token::Class(".title".to_string()),
Token::SlWhitespace(" ".to_string()),
Token::LBrace("{".to_string()),
Token::MlWhitespace("\n ".to_string()),
Token::SlComment("// text { \"hello world\" }".to_string()),
Token::MlWhitespace("\n ".to_string()),
Token::String("\"hello world\"".to_string()),
Token::MlWhitespace("\n".to_string()),
Token::RBrace("}".to_string()),
])
)
}

View File

@ -1,6 +1,6 @@
use std::iter::Map;
use crate::bong::lexer::Lexer;
use crate::bong::lexer::Token;
pub enum Node {
Element {
@ -18,12 +18,53 @@ pub enum Node {
Bool(bool),
}
pub struct Parser<'a> {
lexer: Lexer<'a>,
}
// pub struct Parser {
// tokens: Vec<Token>,
// }
impl<'a> Parser<'a> {
pub fn new(lexer: Lexer<'a>) -> Self {
Self { lexer }
}
}
// impl Parser {
// pub fn new(lexer: Lexer) -> Self {
// Self { lexer }
// }
// }
// type ParserError = String;
// impl Parser {
// pub fn new(tokens: Vec<Token>) -> Self {
// Self { tokens, index: 0 }
// }
// pub fn parse_top_level(&mut self) -> Result<Node, ParserError> {
// match self.peek() {
// Some(Token::Name(_)) => self.parse_element(),
// Some(_) => self.parse_value(),
// None => Err("expected value or element".to_owned()),
// }
// }
// fn parse_element(&mut self) -> Result<Node, ParserError> {}
// fn parse_value(&mut self) -> Result<Node, ParserError> {
// match self.peek() {
// Some(Token::LBrace(_)) => self.parse_object(),
// Some(Token::LBracket(_)) => self.parse_object(),
// Some(_) => Err("unexpected token, expected value".to_owned()),
// None => Err("expected value".to_owned()),
// }
// }
// fn parse_object(&mut self) -> Result<Node, ParserError> {}
// fn parse_array(&mut self) -> Result<Node, ParserError> {}
// fn parse_number(&mut self) -> Result<Node, ParserError> {}
// fn parse_string(&mut self) -> Result<Node, ParserError> {}
// fn parse_bool(&mut self) -> Result<Node, ParserError> {}
// fn parse_null(&mut self) -> Result<Node, ParserError> {}
// fn step(&mut self) {
// self.index += 1
// }
// fn peek(&self) -> Option<&Token> {
// self.tokens.get(self.index)
// }
// }