generated from ElnuDev/rust-project
Convert to internal tokens
This commit is contained in:
parent
5442a8fc87
commit
d957816ded
2 changed files with 46 additions and 29 deletions
|
@ -1,5 +1,8 @@
|
|||
use renrs;
|
||||
|
||||
fn main() {
|
||||
renrs::parse("demo.rpy");
|
||||
let tokens = renrs::parse_file("demo.rpy");
|
||||
for token in tokens {
|
||||
println!("{:?}", token);
|
||||
}
|
||||
}
|
||||
|
|
66
src/lib.rs
66
src/lib.rs
|
@ -7,7 +7,35 @@ use pest_derive::Parser;
|
|||
#[grammar = "rpy.pest"]
|
||||
struct RpyParser;
|
||||
|
||||
fn describe_token(pair: pest::iterators::Pair<Rule>) {
|
||||
#[derive(Debug)]
|
||||
pub enum Token {
|
||||
Keyword(String),
|
||||
String(String),
|
||||
Array(Vec<Token>),
|
||||
}
|
||||
|
||||
pub fn parse(script: &str) -> Vec<Vec<Token>> {
|
||||
let file = RpyParser::parse(Rule::file, script)
|
||||
.expect("unsuccessful parse")
|
||||
.next().unwrap();
|
||||
let mut lines = Vec::new();
|
||||
for line in file.into_inner() {
|
||||
let mut tokens = Vec::new();
|
||||
match line.as_rule() {
|
||||
Rule::line => {
|
||||
for token in line.into_inner() {
|
||||
tokens.push(parse_token(token));
|
||||
}
|
||||
},
|
||||
Rule::EOI => (),
|
||||
_ => unreachable!(),
|
||||
}
|
||||
lines.push(tokens);
|
||||
}
|
||||
lines
|
||||
}
|
||||
|
||||
fn parse_token(pair: pest::iterators::Pair<Rule>) -> Token {
|
||||
let token = pair.as_rule();
|
||||
match token {
|
||||
Rule::token => {},
|
||||
|
@ -15,42 +43,28 @@ fn describe_token(pair: pest::iterators::Pair<Rule>) {
|
|||
};
|
||||
let contents = pair.into_inner().next().unwrap();
|
||||
let contents_rule = contents.as_rule();
|
||||
let str = match contents_rule {
|
||||
match contents_rule {
|
||||
Rule::string => {
|
||||
let data = contents.into_inner().next().unwrap();
|
||||
match data.as_rule() {
|
||||
Token::String(match data.as_rule() {
|
||||
Rule::single_quote_string_data => data.as_str().replace("\\'", "'"),
|
||||
Rule::double_quote_string_data => data.as_str().replace("\\\"", "\""),
|
||||
_ => unreachable!(),
|
||||
}
|
||||
})
|
||||
},
|
||||
Rule::array => {
|
||||
println!("array: Start array");
|
||||
let mut array = Vec::new();
|
||||
for token in contents.into_inner() {
|
||||
describe_token(token);
|
||||
array.push(parse_token(token));
|
||||
}
|
||||
"End array".to_string()
|
||||
Token::Array(array)
|
||||
}
|
||||
Rule::keyword => Token::Keyword(contents.as_str().to_owned()),
|
||||
__ => unreachable!(),
|
||||
}
|
||||
_ => contents.as_str().to_owned(),
|
||||
};
|
||||
println!("{:?}: {}", contents_rule, str);
|
||||
}
|
||||
|
||||
pub fn parse(file_path: &str) {
|
||||
pub fn parse_file(file_path: &str) -> Vec<Vec<Token>> {
|
||||
let unparsed_file = fs::read_to_string(file_path).expect("cannot find file");
|
||||
let file = RpyParser::parse(Rule::file, &unparsed_file)
|
||||
.expect("unsuccessful parse") // unwrap the parse result
|
||||
.next().unwrap(); // get and unwrap the `file` rule; never fails
|
||||
for line in file.into_inner() {
|
||||
match line.as_rule() {
|
||||
Rule::line => {
|
||||
for token in line.into_inner() {
|
||||
describe_token(token);
|
||||
}
|
||||
println!()
|
||||
},
|
||||
Rule::EOI => (),
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
parse(&unparsed_file)
|
||||
}
|
||||
|
|
Loading…
Add table
Reference in a new issue