Convert to internal tokens

main
Elnu 1 year ago
parent 5442a8fc87
commit d957816ded

@ -1,5 +1,8 @@
use renrs; use renrs;
fn main() { fn main() {
renrs::parse("demo.rpy"); let tokens = renrs::parse_file("demo.rpy");
for token in tokens {
println!("{:?}", token);
}
} }

@ -7,7 +7,35 @@ use pest_derive::Parser;
#[grammar = "rpy.pest"] #[grammar = "rpy.pest"]
struct RpyParser; struct RpyParser;
fn describe_token(pair: pest::iterators::Pair<Rule>) { #[derive(Debug)]
pub enum Token {
Keyword(String),
String(String),
Array(Vec<Token>),
}
pub fn parse(script: &str) -> Vec<Vec<Token>> {
let file = RpyParser::parse(Rule::file, script)
.expect("unsuccessful parse")
.next().unwrap();
let mut lines = Vec::new();
for line in file.into_inner() {
let mut tokens = Vec::new();
match line.as_rule() {
Rule::line => {
for token in line.into_inner() {
tokens.push(parse_token(token));
}
},
Rule::EOI => (),
_ => unreachable!(),
}
lines.push(tokens);
}
lines
}
fn parse_token(pair: pest::iterators::Pair<Rule>) -> Token {
let token = pair.as_rule(); let token = pair.as_rule();
match token { match token {
Rule::token => {}, Rule::token => {},
@ -15,42 +43,28 @@ fn describe_token(pair: pest::iterators::Pair<Rule>) {
}; };
let contents = pair.into_inner().next().unwrap(); let contents = pair.into_inner().next().unwrap();
let contents_rule = contents.as_rule(); let contents_rule = contents.as_rule();
let str = match contents_rule { match contents_rule {
Rule::string => { Rule::string => {
let data = contents.into_inner().next().unwrap(); let data = contents.into_inner().next().unwrap();
match data.as_rule() { Token::String(match data.as_rule() {
Rule::single_quote_string_data => data.as_str().replace("\\'", "'"), Rule::single_quote_string_data => data.as_str().replace("\\'", "'"),
Rule::double_quote_string_data => data.as_str().replace("\\\"", "\""), Rule::double_quote_string_data => data.as_str().replace("\\\"", "\""),
_ => unreachable!(), _ => unreachable!(),
} })
}, },
Rule::array => { Rule::array => {
println!("array: Start array"); let mut array = Vec::new();
for token in contents.into_inner() { for token in contents.into_inner() {
describe_token(token); array.push(parse_token(token));
} }
"End array".to_string() Token::Array(array)
}
Rule::keyword => Token::Keyword(contents.as_str().to_owned()),
__ => unreachable!(),
} }
_ => contents.as_str().to_owned(),
};
println!("{:?}: {}", contents_rule, str);
} }
pub fn parse(file_path: &str) { pub fn parse_file(file_path: &str) -> Vec<Vec<Token>> {
let unparsed_file = fs::read_to_string(file_path).expect("cannot find file"); let unparsed_file = fs::read_to_string(file_path).expect("cannot find file");
let file = RpyParser::parse(Rule::file, &unparsed_file) parse(&unparsed_file)
.expect("unsuccessful parse") // unwrap the parse result
.next().unwrap(); // get and unwrap the `file` rule; never fails
for line in file.into_inner() {
match line.as_rule() {
Rule::line => {
for token in line.into_inner() {
describe_token(token);
}
println!()
},
Rule::EOI => (),
_ => unreachable!(),
}
}
} }

Loading…
Cancel
Save