From a9724a4636b417c5a2ea75318b6744cd340abb68 Mon Sep 17 00:00:00 2001 From: ElnuDev Date: Mon, 22 May 2023 21:20:25 -0700 Subject: [PATCH] Implement booleans --- demo/demo.rpy | 2 +- renrs/src/lib.rs | 37 ++++++++++++++++++++++--------------- renrs/src/rpy.pest | 5 ++++- 3 files changed, 27 insertions(+), 17 deletions(-) diff --git a/demo/demo.rpy b/demo/demo.rpy index dbe8e1d..58d5e20 100644 --- a/demo/demo.rpy +++ b/demo/demo.rpy @@ -1,3 +1,3 @@ "Bob sat on the bench." "Bob" "Good morning!" -eat "potato" \ No newline at end of file +eat "potato" True \ No newline at end of file diff --git a/renrs/src/lib.rs b/renrs/src/lib.rs index d980324..c347d82 100644 --- a/renrs/src/lib.rs +++ b/renrs/src/lib.rs @@ -13,6 +13,7 @@ pub enum Token { Keyword(String), Str(String), Array(Vec), + Boolean(bool), } impl Token { @@ -21,6 +22,7 @@ impl Token { Keyword(keyword) => keyword, Str(_) => "String", Array(_) => "Array", + Boolean(_) => "Boolean", } } } @@ -30,13 +32,8 @@ use Token::*; // Parsed script commands #[derive(Debug)] pub enum Command { - Say { - name: Option, - text: String, - }, - Eat { - food: String, - }, + Say { name: Option, text: String }, + Eat { food: String, politely: bool }, } use Command::*; @@ -45,7 +42,8 @@ use Command::*; fn tokenize(script: &str) -> Vec> { let file = RpyParser::parse(Rule::file, script) .expect("unsuccessful parse") - .next().unwrap(); + .next() + .unwrap(); // TODO: Init with capacity let mut lines = Vec::new(); for line in file.into_inner() { @@ -55,7 +53,7 @@ fn tokenize(script: &str) -> Vec> { for token in line.into_inner() { tokens.push(parse_pair(token)); } - }, + } Rule::EOI => (), _ => unreachable!(), } @@ -63,7 +61,7 @@ fn tokenize(script: &str) -> Vec> { if tokens.len() > 0 { lines.push(tokens); } - } + } lines } @@ -71,7 +69,7 @@ fn tokenize(script: &str) -> Vec> { fn parse_pair(pair: pest::iterators::Pair) -> Token { let token = pair.as_rule(); match token { - Rule::token => {}, + Rule::token => {} _ => panic!("Not a token!"), }; let contents = pair.into_inner().next().unwrap(); @@ -84,7 +82,7 @@ fn parse_pair(pair: pest::iterators::Pair) -> Token { Rule::double_quote_string_data => data.as_str().replace("\\\"", "\""), _ => unreachable!(), }) - }, + } Rule::array => { let mut array = Vec::new(); for token in contents.into_inner() { @@ -92,6 +90,11 @@ fn parse_pair(pair: pest::iterators::Pair) -> Token { } Token::Array(array) } + Rule::boolean => Token::Boolean(match contents.as_str() { + "True" => true, + "False" => false, + _ => unreachable!(), + }), Rule::keyword => Token::Keyword(contents.as_str().to_owned()), __ => unreachable!(), } @@ -109,7 +112,7 @@ fn describe_line(line: &Vec) -> String { description.push_str(&format!("{}", iter.next().unwrap().print())); for token in iter { description.push_str(&format!(", {}", token.print())); - } + } description.push_str("]"); description } @@ -128,8 +131,12 @@ fn parse_file(file_path: PathBuf) -> Vec { name: Some(name.to_owned()), text: text.to_owned(), }, - [Keyword(keyword), Str(food)] if keyword.eq("eat") => Eat { + [Keyword(keyword), Str(food), tail @ ..] if keyword.eq("eat") => Eat { food: food.to_owned(), + politely: match tail { + [Boolean(politely)] => *politely, + _ => false, + }, }, _ => panic!("Unknown command {}", describe_line(&line)), }); @@ -159,4 +166,4 @@ impl State { Some(self.command_queue.remove(self.command_queue.len() - 1)) } } -} \ No newline at end of file +} diff --git a/renrs/src/rpy.pest b/renrs/src/rpy.pest index 5600fa6..a49b9e2 100644 --- a/renrs/src/rpy.pest +++ b/renrs/src/rpy.pest @@ -8,7 +8,7 @@ char = { !NEWLINE ~ ANY } // http://pest.rs/book/grammars/syntax.html#atomic inner = @{ char* } -token = { string | array | keyword } +token = { string | array | boolean | keyword } // KEYWORDS // has to be atomic for no implicit separate (spaces) @@ -34,6 +34,9 @@ array = { | "[" ~ NEWLINE* ~ token ~ ("," ~ NEWLINE* ~ token)* ~ NEWLINE* ~ "]" } +// BOOLEAN +boolean = { "True" | "False" } + // comments are a # followed by // any number of non-newline characters COMMENT = _{ "#" ~ char* }