Refactoring and cleanup for parser module

This commit is contained in:
xenofem 2020-06-19 12:58:59 -04:00
parent c990de795e
commit 3beb298a6d
4 changed files with 65 additions and 31 deletions

View file

@ -1,6 +1,6 @@
[package]
name = "purrchance"
version = "0.2.0"
version = "0.3.0"
authors = ["xenofem <xenofem@xeno.science>"]
description = "An unofficial Rust implementation of the Perchance grammar engine"
repository = "https://git.xeno.science/xenofem/purrchance"

View file

@ -14,7 +14,7 @@ fn main() {
}
let raw_grammar = read_to_string(&args[1]).unwrap();
let grammar = load_grammar(&raw_grammar);
let grammar = load_grammar(&raw_grammar).unwrap();
if args.len() < 3 {
eprintln!("{:?}", grammar);
exit(0);

View file

@ -115,7 +115,7 @@ mod tests {
#[test]
fn eval_loaded_grammar() {
let g = load_grammar("test\n foo\n");
let g = load_grammar("test\n foo\n").unwrap();
let nt = Symbol::NonTerminal(String::from("test"));
assert_eq!(nt.eval(&g), Some(String::from("foo")));
}
@ -125,7 +125,7 @@ mod tests {
let g = load_grammar("// testing
test
foo // blah blah
// isn't this fun?");
// isn't this fun?").unwrap();
let nt = Symbol::NonTerminal(String::from("test"));
assert_eq!(nt.eval(&g), Some(String::from("foo")));
}
@ -135,7 +135,7 @@ test
let g = load_grammar("// testing
test
foo ^100
// isn't this fun?");
// isn't this fun?").unwrap();
let nt = Symbol::NonTerminal(String::from("test"));
assert_eq!(nt.eval(&g), Some(String::from("foo")));
}
@ -146,11 +146,32 @@ test
test
foo ^1000000
bar ^1/1000000
");
").unwrap();
let nt = Symbol::NonTerminal(String::from("test"));
assert_eq!(nt.eval(&g), Some(String::from("foo")));
}
#[test]
fn eval_loaded_grammar_comments_fraction_weights_tabs2() {
let g = load_grammar("
test
bar ^1/1000000
foo ^1000000
").unwrap();
let nt = Symbol::NonTerminal(String::from("test"));
assert_eq!(nt.eval(&g), Some(String::from("foo")));
}
#[test]
fn eval_loaded_grammar_comments_fraction_weights3() {
let g = load_grammar("
test
bar ^1/1000000000
foo ^1/2
").unwrap();
let nt = Symbol::NonTerminal(String::from("test"));
assert_eq!(nt.eval(&g), Some(String::from("foo")));
}
#[test]
fn eval_loaded_grammar_multiple_lists() {
let g = load_grammar("
@ -159,7 +180,7 @@ test
test1
foo
");
").unwrap();
let nt = Symbol::NonTerminal(String::from("test"));
assert_eq!(nt.eval(&g), Some(String::from("foo")));
}

View file

@ -11,24 +11,7 @@ use nom::multi::*;
use nom::number::complete::*;
use nom::sequence::*;
fn nonterminal(input: &str) -> IResult<&str, Symbol> {
map(delimited(tag("["), nonterminal_name, tag("]")), Symbol::NonTerminal)(input)
}
fn weight(input: &str) -> IResult<&str, f64> {
preceded(tag("^"), alt((double, rat)))(input)
}
fn comment(input: &str) -> IResult<&str, ()> {
map(tuple((tag("//"), is_not("\n"))), |_| ())(input)
}
fn whitespace(input: &str) -> IResult<&str, ()> {
map(take_while(|c| (c == ' ' || c == '\t')), |_| ())(input)
}
fn empty_line(input: &str) -> IResult<&str, ()> {
map(tuple((whitespace, opt(comment), tag("\n"))), |_| ())(input)
}
pub fn take_until_any<T, Input, Error: ParseError<Input>>(tags: Vec<T>) -> impl Fn(Input) -> IResult<Input, Input, Error>
fn take_until_any<T, Input, Error: ParseError<Input>>(tags: Vec<T>) -> impl Fn(Input) -> IResult<Input, Input, Error>
where
Input: InputTake + FindSubstring<T>,
T: InputLength + Clone,
@ -59,6 +42,10 @@ fn nonterminal_name(input: &str) -> IResult<&str, String> {
Ok((input, String::from(head.to_owned() + tail)))
}
fn nonterminal(input: &str) -> IResult<&str, Symbol> {
map(delimited(tag("["), nonterminal_name, tag("]")), Symbol::NonTerminal)(input)
}
fn rat(input: &str) -> IResult<&str, f64> {
let (input, num) = double(input)?;
let (input, _) = tag("/")(input)?;
@ -67,17 +54,43 @@ fn rat(input: &str) -> IResult<&str, f64> {
Ok((input, num / denom))
}
fn weight(input: &str) -> IResult<&str, f64> {
preceded(tag("^"), alt((rat, double)))(input)
}
fn comment(input: &str) -> IResult<&str, ()> {
map(tuple((tag("//"), is_not("\n"))), |_| ())(input)
}
fn whitespace(input: &str) -> IResult<&str, ()> {
map(take_while(|c| (c == ' ' || c == '\t')), |_| ())(input)
}
fn eol(input: &str) -> IResult<&str, ()> {
map(alt((tag("\n"), all_consuming(take(0usize)))), |_| ())(input)
}
fn empty_lines(input: &str) -> IResult<&str, ()> {
map(
tuple((
many0(tuple((whitespace, opt(comment), tag("\n")))),
opt(tuple((whitespace, opt(comment), eol))),
)),
|_| ()
)(input)
}
fn expr(input: &str) -> IResult<&str, (Expr, f64)> {
let (input, (_, _, syms, _, weight, _, _, _, _)) = tuple((
many0(empty_line),
empty_lines,
alt((tag(" "), tag("\t"))),
many1(alt((terminal, nonterminal))),
whitespace,
opt(weight),
whitespace,
opt(comment),
tag("\n"),
many0(empty_line),
eol,
empty_lines,
))(input)?;
Ok((input, (Expr(syms), weight.unwrap_or(1.0))))
@ -85,7 +98,7 @@ fn expr(input: &str) -> IResult<&str, (Expr, f64)> {
fn list(input: &str) -> IResult<&str, (String, List)> {
let (input, (_, name, _, _, _, exprs)) = tuple((
many0(empty_line),
empty_lines,
nonterminal_name,
whitespace,
opt(comment),
@ -102,6 +115,6 @@ fn grammar(input: &str) -> IResult<&str, Grammar> {
Ok((input, Grammar(HashMap::from_iter(lists.into_iter()))))
}
pub fn load_grammar(input: &str) -> Grammar {
grammar(input).unwrap().1
pub fn load_grammar(input: &str) -> Result<Grammar, Err<(&str, ErrorKind)>> {
all_consuming(grammar)(input).map(|(_, g)| g)
}