|
|
@ -13,6 +13,7 @@ pub enum Token {
|
|
|
|
Keyword(String),
|
|
|
|
Keyword(String),
|
|
|
|
Str(String),
|
|
|
|
Str(String),
|
|
|
|
Array(Vec<Token>),
|
|
|
|
Array(Vec<Token>),
|
|
|
|
|
|
|
|
Boolean(bool),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
impl Token {
|
|
|
|
impl Token {
|
|
|
@ -21,6 +22,7 @@ impl Token {
|
|
|
|
Keyword(keyword) => keyword,
|
|
|
|
Keyword(keyword) => keyword,
|
|
|
|
Str(_) => "String",
|
|
|
|
Str(_) => "String",
|
|
|
|
Array(_) => "Array",
|
|
|
|
Array(_) => "Array",
|
|
|
|
|
|
|
|
Boolean(_) => "Boolean",
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
@ -30,13 +32,8 @@ use Token::*;
|
|
|
|
// Parsed script commands
|
|
|
|
// Parsed script commands
|
|
|
|
#[derive(Debug)]
|
|
|
|
#[derive(Debug)]
|
|
|
|
pub enum Command {
|
|
|
|
pub enum Command {
|
|
|
|
Say {
|
|
|
|
Say { name: Option<String>, text: String },
|
|
|
|
name: Option<String>,
|
|
|
|
Eat { food: String, politely: bool },
|
|
|
|
text: String,
|
|
|
|
|
|
|
|
},
|
|
|
|
|
|
|
|
Eat {
|
|
|
|
|
|
|
|
food: String,
|
|
|
|
|
|
|
|
},
|
|
|
|
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
use Command::*;
|
|
|
|
use Command::*;
|
|
|
@ -45,7 +42,8 @@ use Command::*;
|
|
|
|
fn tokenize(script: &str) -> Vec<Vec<Token>> {
|
|
|
|
fn tokenize(script: &str) -> Vec<Vec<Token>> {
|
|
|
|
let file = RpyParser::parse(Rule::file, script)
|
|
|
|
let file = RpyParser::parse(Rule::file, script)
|
|
|
|
.expect("unsuccessful parse")
|
|
|
|
.expect("unsuccessful parse")
|
|
|
|
.next().unwrap();
|
|
|
|
.next()
|
|
|
|
|
|
|
|
.unwrap();
|
|
|
|
// TODO: Init with capacity
|
|
|
|
// TODO: Init with capacity
|
|
|
|
let mut lines = Vec::new();
|
|
|
|
let mut lines = Vec::new();
|
|
|
|
for line in file.into_inner() {
|
|
|
|
for line in file.into_inner() {
|
|
|
@ -55,7 +53,7 @@ fn tokenize(script: &str) -> Vec<Vec<Token>> {
|
|
|
|
for token in line.into_inner() {
|
|
|
|
for token in line.into_inner() {
|
|
|
|
tokens.push(parse_pair(token));
|
|
|
|
tokens.push(parse_pair(token));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
},
|
|
|
|
}
|
|
|
|
Rule::EOI => (),
|
|
|
|
Rule::EOI => (),
|
|
|
|
_ => unreachable!(),
|
|
|
|
_ => unreachable!(),
|
|
|
|
}
|
|
|
|
}
|
|
|
@ -63,7 +61,7 @@ fn tokenize(script: &str) -> Vec<Vec<Token>> {
|
|
|
|
if tokens.len() > 0 {
|
|
|
|
if tokens.len() > 0 {
|
|
|
|
lines.push(tokens);
|
|
|
|
lines.push(tokens);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
lines
|
|
|
|
lines
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
@ -71,7 +69,7 @@ fn tokenize(script: &str) -> Vec<Vec<Token>> {
|
|
|
|
fn parse_pair(pair: pest::iterators::Pair<Rule>) -> Token {
|
|
|
|
fn parse_pair(pair: pest::iterators::Pair<Rule>) -> Token {
|
|
|
|
let token = pair.as_rule();
|
|
|
|
let token = pair.as_rule();
|
|
|
|
match token {
|
|
|
|
match token {
|
|
|
|
Rule::token => {},
|
|
|
|
Rule::token => {}
|
|
|
|
_ => panic!("Not a token!"),
|
|
|
|
_ => panic!("Not a token!"),
|
|
|
|
};
|
|
|
|
};
|
|
|
|
let contents = pair.into_inner().next().unwrap();
|
|
|
|
let contents = pair.into_inner().next().unwrap();
|
|
|
@ -84,7 +82,7 @@ fn parse_pair(pair: pest::iterators::Pair<Rule>) -> Token {
|
|
|
|
Rule::double_quote_string_data => data.as_str().replace("\\\"", "\""),
|
|
|
|
Rule::double_quote_string_data => data.as_str().replace("\\\"", "\""),
|
|
|
|
_ => unreachable!(),
|
|
|
|
_ => unreachable!(),
|
|
|
|
})
|
|
|
|
})
|
|
|
|
},
|
|
|
|
}
|
|
|
|
Rule::array => {
|
|
|
|
Rule::array => {
|
|
|
|
let mut array = Vec::new();
|
|
|
|
let mut array = Vec::new();
|
|
|
|
for token in contents.into_inner() {
|
|
|
|
for token in contents.into_inner() {
|
|
|
@ -92,6 +90,11 @@ fn parse_pair(pair: pest::iterators::Pair<Rule>) -> Token {
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Token::Array(array)
|
|
|
|
Token::Array(array)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Rule::boolean => Token::Boolean(match contents.as_str() {
|
|
|
|
|
|
|
|
"True" => true,
|
|
|
|
|
|
|
|
"False" => false,
|
|
|
|
|
|
|
|
_ => unreachable!(),
|
|
|
|
|
|
|
|
}),
|
|
|
|
Rule::keyword => Token::Keyword(contents.as_str().to_owned()),
|
|
|
|
Rule::keyword => Token::Keyword(contents.as_str().to_owned()),
|
|
|
|
__ => unreachable!(),
|
|
|
|
__ => unreachable!(),
|
|
|
|
}
|
|
|
|
}
|
|
|
@ -109,7 +112,7 @@ fn describe_line(line: &Vec<Token>) -> String {
|
|
|
|
description.push_str(&format!("{}", iter.next().unwrap().print()));
|
|
|
|
description.push_str(&format!("{}", iter.next().unwrap().print()));
|
|
|
|
for token in iter {
|
|
|
|
for token in iter {
|
|
|
|
description.push_str(&format!(", {}", token.print()));
|
|
|
|
description.push_str(&format!(", {}", token.print()));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
description.push_str("]");
|
|
|
|
description.push_str("]");
|
|
|
|
description
|
|
|
|
description
|
|
|
|
}
|
|
|
|
}
|
|
|
@ -128,8 +131,12 @@ fn parse_file(file_path: PathBuf) -> Vec<Command> {
|
|
|
|
name: Some(name.to_owned()),
|
|
|
|
name: Some(name.to_owned()),
|
|
|
|
text: text.to_owned(),
|
|
|
|
text: text.to_owned(),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
[Keyword(keyword), Str(food)] if keyword.eq("eat") => Eat {
|
|
|
|
[Keyword(keyword), Str(food), tail @ ..] if keyword.eq("eat") => Eat {
|
|
|
|
food: food.to_owned(),
|
|
|
|
food: food.to_owned(),
|
|
|
|
|
|
|
|
politely: match tail {
|
|
|
|
|
|
|
|
[Boolean(politely)] => *politely,
|
|
|
|
|
|
|
|
_ => false,
|
|
|
|
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
_ => panic!("Unknown command {}", describe_line(&line)),
|
|
|
|
_ => panic!("Unknown command {}", describe_line(&line)),
|
|
|
|
});
|
|
|
|
});
|
|
|
@ -159,4 +166,4 @@ impl State {
|
|
|
|
Some(self.command_queue.remove(self.command_queue.len() - 1))
|
|
|
|
Some(self.command_queue.remove(self.command_queue.len() - 1))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|