From 10db5e959b9d97d190adcbb3d4c708ed553073dd Mon Sep 17 00:00:00 2001 From: ElnuDev Date: Sat, 20 May 2023 19:37:16 -0700 Subject: [PATCH] Add command parsing --- demo/demo.rpy | 23 ++------------- demo/src/main.rs | 6 ++-- src/lib.rs | 74 ++++++++++++++++++++++++++++++++++++++++++------ 3 files changed, 72 insertions(+), 31 deletions(-) diff --git a/demo/demo.rpy b/demo/demo.rpy index 6d2ff52..54b0d05 100644 --- a/demo/demo.rpy +++ b/demo/demo.rpy @@ -1,20 +1,3 @@ -show black amogus # this is a comment -# this is a full line comment -what the heck -"this is a string with a # comment" -"this is a string over -multiple lines" -"this is \"escaped\"" -'this is a single quote string' -'this also has escaped \'quotes\'' -this is cool # comment -any empty array [] -[ - "this", # test - "is", - "an", - "array" -] -["this","is","an","array"] - -huh \ No newline at end of file +"Bob sat on the bench." +"Bob" "Good morning!" +eat "apple" \ No newline at end of file diff --git a/demo/src/main.rs b/demo/src/main.rs index 8045c4e..08a09bf 100644 --- a/demo/src/main.rs +++ b/demo/src/main.rs @@ -1,8 +1,8 @@ use renrs; fn main() { - let tokens = renrs::parse_file("demo.rpy"); - for token in tokens { - println!("{:?}", token); + let commands = renrs::parse_file("demo.rpy"); + for command in commands { + println!("{:?}", command); } } diff --git a/src/lib.rs b/src/lib.rs index 0e8fe86..a459740 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -7,14 +7,39 @@ use pest_derive::Parser; #[grammar = "rpy.pest"] struct RpyParser; +// Raw script tokens #[derive(Debug)] pub enum Token { Keyword(String), - String(String), + Str(String), Array(Vec), } -pub fn parse(script: &str) -> Vec> { +impl Token { + fn print(&self) -> String { + match &self { + Keyword(keyword) => keyword.to_owned(), + Str(string) => "String".to_owned(), + Array(tokens) => describe_token_array(&tokens), + } + } +} + +use Token::*; + +// Parsed script commands +#[derive(Debug)] +pub enum Command { + Say { + name: Option, + text: String, + } +} + +use Command::*; + +// Tokenize raw script string +fn tokenize(script: &str) -> Vec> { let file = RpyParser::parse(Rule::file, script) .expect("unsuccessful parse") .next().unwrap(); @@ -24,7 +49,7 @@ pub fn parse(script: &str) -> Vec> { match line.as_rule() { Rule::line => { for token in line.into_inner() { - tokens.push(parse_token(token)); + tokens.push(parse_pair(token)); } }, Rule::EOI => (), @@ -38,7 +63,8 @@ pub fn parse(script: &str) -> Vec> { lines } -fn parse_token(pair: pest::iterators::Pair) -> Token { +// Parse raw pest data into Token +fn parse_pair(pair: pest::iterators::Pair) -> Token { let token = pair.as_rule(); match token { Rule::token => {}, @@ -49,7 +75,7 @@ fn parse_token(pair: pest::iterators::Pair) -> Token { match contents_rule { Rule::string => { let data = contents.into_inner().next().unwrap(); - Token::String(match data.as_rule() { + Token::Str(match data.as_rule() { Rule::single_quote_string_data => data.as_str().replace("\\'", "'"), Rule::double_quote_string_data => data.as_str().replace("\\\"", "\""), _ => unreachable!(), @@ -58,7 +84,7 @@ fn parse_token(pair: pest::iterators::Pair) -> Token { Rule::array => { let mut array = Vec::new(); for token in contents.into_inner() { - array.push(parse_token(token)); + array.push(parse_pair(token)); } Token::Array(array) } @@ -67,7 +93,39 @@ fn parse_token(pair: pest::iterators::Pair) -> Token { } } -pub fn parse_file(file_path: &str) -> Vec> { +// Tokenize file +fn tokenize_file(file_path: &str) -> Vec> { let unparsed_file = fs::read_to_string(file_path).expect("cannot find file"); - parse(&unparsed_file) + tokenize(&unparsed_file) +} + +fn describe_token_array(line: &Vec) -> String { + let mut description = "[".to_owned(); + let mut iter = line.iter(); + description.push_str(&format!("{}", iter.next().unwrap().print())); + for token in iter { + description.push_str(&format!(", {}", token.print())); + } + description.push_str("]"); + description +} + +// Parse file into commands +pub fn parse_file(file_path: &str) -> Vec { + let token_lines = tokenize_file(file_path); + let mut commands = Vec::new(); + for line in token_lines { + commands.push(match line.as_slice() { + [Str(text)] => Say { + name: None, + text: text.to_owned() + }, + [Str(name), Str(text)] => Say { + name: Some(name.to_owned()), + text: text.to_owned() + }, + _ => panic!("Unknown command {}", describe_token_array(&line)), + }); + } + commands }