diff --git a/bong_grammar.txt b/bong_grammar.txt index 04becae..002ab9d 100644 --- a/bong_grammar.txt +++ b/bong_grammar.txt @@ -56,7 +56,8 @@ value -> object -> "{" object_properties "}" -object_properties -> (_ object_property (_ "," _ object_property):* _ ",":?):? _ +object_properties -> + (_ object_property (_ "," _ object_property):* _ ",":?):? _ object_property -> (Name | String) _ ("=" | ":") _ value diff --git a/src/bong/lexer.rs b/src/bong/lexer.rs index b9ebcad..ed12127 100644 --- a/src/bong/lexer.rs +++ b/src/bong/lexer.rs @@ -1,17 +1,17 @@ -#[derive(Debug, Clone, PartialEq, Eq)] +#[derive(Debug, Clone, PartialEq)] pub enum ErrorType { UnexpectedToken(char), InvalidConstructor, } -#[derive(Debug, Clone, PartialEq, Eq)] +#[derive(Debug, Clone, PartialEq)] pub struct Error { error: ErrorType, line: isize, col: isize, } -#[derive(Debug, Clone, PartialEq, Eq)] +#[derive(Debug, Clone, PartialEq)] pub enum Token { Name(String), Id(String), @@ -30,10 +30,10 @@ pub enum Token { RBrace(String), LBracket(String), RBracket(String), - Equal(String), - Colon(String), - SemiColon(String), - Comma(String), + Equal(String), // not implemented + Colon(String), // not implemented + SemiColon(String), // not implemented + Comma(String), // not implemented } #[derive(PartialEq)] diff --git a/src/bong/parser.rs b/src/bong/parser.rs index 8f0d5dc..37bfd36 100644 --- a/src/bong/parser.rs +++ b/src/bong/parser.rs @@ -41,9 +41,20 @@ impl Parser { } fn parse_element(&mut self) -> Result { + let name = match self.current() { + Some(Token::Name(value)) => value.clone(), + _ => panic!("checked by previous predicate"), + }; + self.step(); todo!() } + fn parse_singe_line_field(&mut self) -> Result { + match self.current() { + _ => todo!(), + } + } + fn parse_value(&mut self) -> Result { match self.current() { Some(Token::LBrace(_)) => self.parse_object(), @@ -72,26 +83,28 @@ impl Parser { Ok(Node::Object(values)) } Some(t @ (Token::Name(_) | Token::String(_))) => { - // let key = match t { - // Token::Name(v) => v, - // Token::String(v) => &v[1..v.len() - 1].to_string(), - // _ => panic!("checked by previous predicate"), - // }; - // self.step(); - // match self.current() { - // Some(Token::Equal(_) | Token::Colon(_)) => {} - // _ => return Err("expected ':' or '='".to_string()), - // } - // self.step(); - // values[key] = Box::new(self.parse_value()?); - // self.parse_object_tail(values) - todo!() + let key = match t { + Token::Name(v) => v.clone(), + Token::String(v) => v[1..v.len() - 1].to_string(), + _ => panic!("checked by previous predicate"), + }; + self.step(); + match self.current() { + Some(Token::Equal(_) | Token::Colon(_)) => {} + _ => return Err("expected ':' or '='".to_string()), + } + self.step(); + values.insert(key, Box::new(self.parse_value()?)); + self.parse_object_tail(values) } _ => Err("expected Name, String or '}'".to_string()), } } - fn parse_object_tail(&mut self, values: HashMap>) -> Result { + fn parse_object_tail( + &mut self, + mut values: HashMap>, + ) -> Result { loop { match self.current() { Some(Token::RBrace(_)) => { @@ -106,18 +119,18 @@ impl Parser { break Ok(Node::Object(values)); } Some(t @ (Token::Name(_) | Token::String(_))) => { - // let key = match t { - // Token::Name(v) => v, - // Token::String(v) => &v[1..v.len() - 1].to_string(), - // _ => panic!("unterminated object, checked by previous predicate"), - // }; - // self.step(); - // match self.current() { - // Some(Token::Equal(_) | Token::Colon(_)) => {} - // _ => return Err("expected ':' or '='".to_string()), - // } - // self.step(); - // values[key] = Box::new(self.parse_value()?); + let key = match t { + Token::Name(v) => v.clone(), + Token::String(v) => v[1..v.len() - 1].to_string(), + _ => panic!("unterminated object, checked by previous predicate"), + }; + self.step(); + match self.current() { + Some(Token::Equal(_) | Token::Colon(_)) => {} + _ => return Err("expected ':' or '='".to_string()), + } + self.step(); + values.insert(key, Box::new(self.parse_value()?)); todo!() } _ => {