use std::fs; use pest::Parser; use pest_derive::Parser; #[derive(Parser)] #[grammar = "rpy.pest"] struct RpyParser; // Raw script tokens #[derive(Debug)] pub enum Token { Keyword(String), Str(String), Array(Vec), } impl Token { fn print(&self) -> String { match &self { Keyword(keyword) => keyword.to_owned(), Str(string) => "String".to_owned(), Array(tokens) => describe_token_array(&tokens), } } } use Token::*; // Parsed script commands #[derive(Debug)] pub enum Command { Say { name: Option, text: String, } } use Command::*; // Tokenize raw script string fn tokenize(script: &str) -> Vec> { let file = RpyParser::parse(Rule::file, script) .expect("unsuccessful parse") .next().unwrap(); let mut lines = Vec::new(); for line in file.into_inner() { let mut tokens = Vec::new(); match line.as_rule() { Rule::line => { for token in line.into_inner() { tokens.push(parse_pair(token)); } }, Rule::EOI => (), _ => unreachable!(), } // TODO: For some a blank final line is always parsed if tokens.len() > 0 { lines.push(tokens); } } lines } // Parse raw pest data into Token fn parse_pair(pair: pest::iterators::Pair) -> Token { let token = pair.as_rule(); match token { Rule::token => {}, _ => panic!("Not a token!"), }; let contents = pair.into_inner().next().unwrap(); let contents_rule = contents.as_rule(); match contents_rule { Rule::string => { let data = contents.into_inner().next().unwrap(); Token::Str(match data.as_rule() { Rule::single_quote_string_data => data.as_str().replace("\\'", "'"), Rule::double_quote_string_data => data.as_str().replace("\\\"", "\""), _ => unreachable!(), }) }, Rule::array => { let mut array = Vec::new(); for token in contents.into_inner() { array.push(parse_pair(token)); } Token::Array(array) } Rule::keyword => Token::Keyword(contents.as_str().to_owned()), __ => unreachable!(), } } // Tokenize file fn tokenize_file(file_path: &str) -> Vec> { let unparsed_file = fs::read_to_string(file_path).expect("cannot find file"); tokenize(&unparsed_file) } fn describe_token_array(line: &Vec) -> String { let mut description = "[".to_owned(); let mut iter = line.iter(); description.push_str(&format!("{}", iter.next().unwrap().print())); for token in iter { description.push_str(&format!(", {}", token.print())); } description.push_str("]"); description } // Parse file into commands pub fn parse_file(file_path: &str) -> Vec { let token_lines = tokenize_file(file_path); let mut commands = Vec::new(); for line in token_lines { commands.push(match line.as_slice() { [Str(text)] => Say { name: None, text: text.to_owned() }, [Str(name), Str(text)] => Say { name: Some(name.to_owned()), text: text.to_owned() }, _ => panic!("Unknown command {}", describe_token_array(&line)), }); } commands }