|
|
@ -7,14 +7,39 @@ use pest_derive::Parser;
|
|
|
|
#[grammar = "rpy.pest"]
|
|
|
|
#[grammar = "rpy.pest"]
|
|
|
|
struct RpyParser;
|
|
|
|
struct RpyParser;
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// Raw script tokens
|
|
|
|
#[derive(Debug)]
|
|
|
|
#[derive(Debug)]
|
|
|
|
pub enum Token {
|
|
|
|
pub enum Token {
|
|
|
|
Keyword(String),
|
|
|
|
Keyword(String),
|
|
|
|
String(String),
|
|
|
|
Str(String),
|
|
|
|
Array(Vec<Token>),
|
|
|
|
Array(Vec<Token>),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
pub fn parse(script: &str) -> Vec<Vec<Token>> {
|
|
|
|
impl Token {
|
|
|
|
|
|
|
|
fn print(&self) -> String {
|
|
|
|
|
|
|
|
match &self {
|
|
|
|
|
|
|
|
Keyword(keyword) => keyword.to_owned(),
|
|
|
|
|
|
|
|
Str(string) => "String".to_owned(),
|
|
|
|
|
|
|
|
Array(tokens) => describe_token_array(&tokens),
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
use Token::*;
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// Parsed script commands
|
|
|
|
|
|
|
|
#[derive(Debug)]
|
|
|
|
|
|
|
|
pub enum Command {
|
|
|
|
|
|
|
|
Say {
|
|
|
|
|
|
|
|
name: Option<String>,
|
|
|
|
|
|
|
|
text: String,
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
use Command::*;
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// Tokenize raw script string
|
|
|
|
|
|
|
|
fn tokenize(script: &str) -> Vec<Vec<Token>> {
|
|
|
|
let file = RpyParser::parse(Rule::file, script)
|
|
|
|
let file = RpyParser::parse(Rule::file, script)
|
|
|
|
.expect("unsuccessful parse")
|
|
|
|
.expect("unsuccessful parse")
|
|
|
|
.next().unwrap();
|
|
|
|
.next().unwrap();
|
|
|
@ -24,7 +49,7 @@ pub fn parse(script: &str) -> Vec<Vec<Token>> {
|
|
|
|
match line.as_rule() {
|
|
|
|
match line.as_rule() {
|
|
|
|
Rule::line => {
|
|
|
|
Rule::line => {
|
|
|
|
for token in line.into_inner() {
|
|
|
|
for token in line.into_inner() {
|
|
|
|
tokens.push(parse_token(token));
|
|
|
|
tokens.push(parse_pair(token));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
},
|
|
|
|
},
|
|
|
|
Rule::EOI => (),
|
|
|
|
Rule::EOI => (),
|
|
|
@ -38,7 +63,8 @@ pub fn parse(script: &str) -> Vec<Vec<Token>> {
|
|
|
|
lines
|
|
|
|
lines
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
fn parse_token(pair: pest::iterators::Pair<Rule>) -> Token {
|
|
|
|
// Parse raw pest data into Token
|
|
|
|
|
|
|
|
fn parse_pair(pair: pest::iterators::Pair<Rule>) -> Token {
|
|
|
|
let token = pair.as_rule();
|
|
|
|
let token = pair.as_rule();
|
|
|
|
match token {
|
|
|
|
match token {
|
|
|
|
Rule::token => {},
|
|
|
|
Rule::token => {},
|
|
|
@ -49,7 +75,7 @@ fn parse_token(pair: pest::iterators::Pair<Rule>) -> Token {
|
|
|
|
match contents_rule {
|
|
|
|
match contents_rule {
|
|
|
|
Rule::string => {
|
|
|
|
Rule::string => {
|
|
|
|
let data = contents.into_inner().next().unwrap();
|
|
|
|
let data = contents.into_inner().next().unwrap();
|
|
|
|
Token::String(match data.as_rule() {
|
|
|
|
Token::Str(match data.as_rule() {
|
|
|
|
Rule::single_quote_string_data => data.as_str().replace("\\'", "'"),
|
|
|
|
Rule::single_quote_string_data => data.as_str().replace("\\'", "'"),
|
|
|
|
Rule::double_quote_string_data => data.as_str().replace("\\\"", "\""),
|
|
|
|
Rule::double_quote_string_data => data.as_str().replace("\\\"", "\""),
|
|
|
|
_ => unreachable!(),
|
|
|
|
_ => unreachable!(),
|
|
|
@ -58,7 +84,7 @@ fn parse_token(pair: pest::iterators::Pair<Rule>) -> Token {
|
|
|
|
Rule::array => {
|
|
|
|
Rule::array => {
|
|
|
|
let mut array = Vec::new();
|
|
|
|
let mut array = Vec::new();
|
|
|
|
for token in contents.into_inner() {
|
|
|
|
for token in contents.into_inner() {
|
|
|
|
array.push(parse_token(token));
|
|
|
|
array.push(parse_pair(token));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Token::Array(array)
|
|
|
|
Token::Array(array)
|
|
|
|
}
|
|
|
|
}
|
|
|
@ -67,7 +93,39 @@ fn parse_token(pair: pest::iterators::Pair<Rule>) -> Token {
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
pub fn parse_file(file_path: &str) -> Vec<Vec<Token>> {
|
|
|
|
// Tokenize file
|
|
|
|
|
|
|
|
fn tokenize_file(file_path: &str) -> Vec<Vec<Token>> {
|
|
|
|
let unparsed_file = fs::read_to_string(file_path).expect("cannot find file");
|
|
|
|
let unparsed_file = fs::read_to_string(file_path).expect("cannot find file");
|
|
|
|
parse(&unparsed_file)
|
|
|
|
tokenize(&unparsed_file)
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
fn describe_token_array(line: &Vec<Token>) -> String {
|
|
|
|
|
|
|
|
let mut description = "[".to_owned();
|
|
|
|
|
|
|
|
let mut iter = line.iter();
|
|
|
|
|
|
|
|
description.push_str(&format!("{}", iter.next().unwrap().print()));
|
|
|
|
|
|
|
|
for token in iter {
|
|
|
|
|
|
|
|
description.push_str(&format!(", {}", token.print()));
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
description.push_str("]");
|
|
|
|
|
|
|
|
description
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// Parse file into commands
|
|
|
|
|
|
|
|
pub fn parse_file(file_path: &str) -> Vec<Command> {
|
|
|
|
|
|
|
|
let token_lines = tokenize_file(file_path);
|
|
|
|
|
|
|
|
let mut commands = Vec::new();
|
|
|
|
|
|
|
|
for line in token_lines {
|
|
|
|
|
|
|
|
commands.push(match line.as_slice() {
|
|
|
|
|
|
|
|
[Str(text)] => Say {
|
|
|
|
|
|
|
|
name: None,
|
|
|
|
|
|
|
|
text: text.to_owned()
|
|
|
|
|
|
|
|
},
|
|
|
|
|
|
|
|
[Str(name), Str(text)] => Say {
|
|
|
|
|
|
|
|
name: Some(name.to_owned()),
|
|
|
|
|
|
|
|
text: text.to_owned()
|
|
|
|
|
|
|
|
},
|
|
|
|
|
|
|
|
_ => panic!("Unknown command {}", describe_token_array(&line)),
|
|
|
|
|
|
|
|
});
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
commands
|
|
|
|
}
|
|
|
|
}
|
|
|
|