Convert parser tests to macros (#257)

This commit is contained in:
Casey Rodarmor 2017-11-17 23:59:55 -08:00 committed by GitHub
parent 87b2130db0
commit bc79d16eac
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 251 additions and 241 deletions

View File

@ -386,7 +386,7 @@ mod test {
use super::*; use super::*;
use brev; use brev;
use testing::parse_success; use testing::parse_success;
use testing::parse_error; use tokenize;
macro_rules! summary_test { macro_rules! summary_test {
($name:ident, $input:expr, $expected:expr $(,)*) => { ($name:ident, $input:expr, $expected:expr $(,)*) => {
@ -405,7 +405,49 @@ mod test {
} }
} }
summary_test!{parse_empty, macro_rules! error_test {
(
name: $name:ident,
input: $input:expr,
index: $index:expr,
line: $line:expr,
column: $column:expr,
width: $width:expr,
kind: $kind:expr,
) => {
#[test]
fn $name() {
let input = $input;
let expected = CompilationError {
text: input,
index: $index,
line: $line,
column: $column,
width: $width,
kind: $kind,
};
let tokens = tokenize(input).unwrap();
let parser = Parser::new(input, tokens);
if let Err(error) = parser.justfile() {
assert_eq!(error.text, expected.text);
assert_eq!(error.index, expected.index);
assert_eq!(error.line, expected.line);
assert_eq!(error.column, expected.column);
assert_eq!(error.width, expected.width);
assert_eq!(error.kind, expected.kind);
assert_eq!(error, expected);
} else {
panic!("parse succeeded but expected: {}\n{}", expected, input);
}
}
}
}
summary_test! {
parse_empty,
" "
# hello # hello
@ -415,7 +457,8 @@ mod test {
"", "",
} }
summary_test!{parse_string_default, summary_test! {
parse_string_default,
r#" r#"
foo a="b\t": foo a="b\t":
@ -425,7 +468,8 @@ foo a="b\t":
r#"foo a='b\t':"#, r#"foo a='b\t':"#,
} }
summary_test!{parse_variadic, summary_test! {
parse_variadic,
r#" r#"
foo +a: foo +a:
@ -435,7 +479,8 @@ foo +a:
r#"foo +a:"#, r#"foo +a:"#,
} }
summary_test!{parse_variadic_string_default, summary_test! {
parse_variadic_string_default,
r#" r#"
foo +a="Hello": foo +a="Hello":
@ -445,7 +490,8 @@ foo +a="Hello":
r#"foo +a='Hello':"#, r#"foo +a='Hello':"#,
} }
summary_test!{parse_raw_string_default, summary_test! {
parse_raw_string_default,
r#" r#"
foo a='b\t': foo a='b\t':
@ -455,7 +501,8 @@ foo a='b\t':
r#"foo a='b\\t':"#, r#"foo a='b\\t':"#,
} }
summary_test!{parse_export, summary_test! {
parse_export,
r#" r#"
export a = "hello" export a = "hello"
@ -463,7 +510,8 @@ export a = "hello"
r#"export a = "hello""#, r#"export a = "hello""#,
} }
summary_test!{parse_complex, summary_test! {
parse_complex,
" "
x: x:
y: y:
@ -500,7 +548,8 @@ y:
z:" z:"
} }
summary_test!{parse_shebang, summary_test! {
parse_shebang,
" "
practicum = 'hello' practicum = 'hello'
install: install:
@ -518,7 +567,14 @@ install:
fi", fi",
} }
summary_test!{parse_assignments, summary_test! {
parse_simple_shebang,
"a:\n #!\n print(1)",
"a:\n #!\n print(1)",
}
summary_test! {
parse_assignments,
r#"a = "0" r#"a = "0"
c = a + b + a + b c = a + b + a + b
b = "1" b = "1"
@ -530,7 +586,8 @@ b = "1"
c = a + b + a + b"#, c = a + b + a + b"#,
} }
summary_test!{parse_assignment_backticks, summary_test! {
parse_assignment_backticks,
"a = `echo hello` "a = `echo hello`
c = a + b + a + b c = a + b + a + b
b = `echo goodbye`", b = `echo goodbye`",
@ -541,281 +598,218 @@ b = `echo goodbye`
c = a + b + a + b", c = a + b + a + b",
} }
summary_test!{parse_interpolation_backticks, summary_test! {
parse_interpolation_backticks,
r#"a: r#"a:
echo {{ `echo hello` + "blarg" }} {{ `echo bob` }}"#, echo {{ `echo hello` + "blarg" }} {{ `echo bob` }}"#,
r#"a: r#"a:
echo {{`echo hello` + "blarg"}} {{`echo bob`}}"#, echo {{`echo hello` + "blarg"}} {{`echo bob`}}"#,
} }
summary_test!{eof_test, summary_test! {
eof_test,
"x:\ny:\nz:\na b c: x y z", "x:\ny:\nz:\na b c: x y z",
"a b c: x y z\n\nx:\n\ny:\n\nz:", "a b c: x y z\n\nx:\n\ny:\n\nz:",
} }
summary_test!{string_quote_escape, summary_test! {
string_quote_escape,
r#"a = "hello\"""#, r#"a = "hello\"""#,
r#"a = "hello\"""#, r#"a = "hello\"""#,
} }
summary_test!{string_escapes, summary_test! {
string_escapes,
r#"a = "\n\t\r\"\\""#, r#"a = "\n\t\r\"\\""#,
r#"a = "\n\t\r\"\\""#, r#"a = "\n\t\r\"\\""#,
} }
summary_test!{parameters, summary_test! {
parameters,
"a b c: "a b c:
{{b}} {{c}}", {{b}} {{c}}",
"a b c: "a b c:
{{b}} {{c}}", {{b}} {{c}}",
} }
#[test] error_test! {
fn missing_colon() { name: missing_colon,
let text = "a b c\nd e f"; input: "a b c\nd e f",
parse_error(text, CompilationError {
text: text,
index: 5, index: 5,
line: 0, line: 0,
column: 5, column: 5,
width: Some(1), width: Some(1),
kind: UnexpectedToken{expected: vec![Name, Plus, Colon], found: Eol}, kind: UnexpectedToken{expected: vec![Name, Plus, Colon], found: Eol},
});
} }
#[test] error_test! {
fn missing_default_eol() { name: missing_default_eol,
let text = "hello arg=\n"; input: "hello arg=\n",
let expected = vec![StringToken, RawString];
let found = Eol;
parse_error(text, CompilationError {
text: text,
index: 10, index: 10,
line: 0, line: 0,
column: 10, column: 10,
width: Some(1), width: Some(1),
kind: UnexpectedToken{expected, found}, kind: UnexpectedToken{expected: vec![StringToken, RawString], found: Eol},
});
} }
#[test] error_test! {
fn missing_default_eof() { name: missing_default_eof,
let text = "hello arg="; input: "hello arg=",
let expected = vec![StringToken, RawString];
let found = Eof;
parse_error(text, CompilationError {
text: text,
index: 10, index: 10,
line: 0, line: 0,
column: 10, column: 10,
width: Some(0), width: Some(0),
kind: UnexpectedToken{expected, found}, kind: UnexpectedToken{expected: vec![StringToken, RawString], found: Eof},
});
} }
#[test] error_test! {
fn missing_default_colon() { name: missing_default_colon,
let text = "hello arg=:"; input: "hello arg=:",
let expected = vec![StringToken, RawString];
let found = Colon;
parse_error(text, CompilationError {
text: text,
index: 10, index: 10,
line: 0, line: 0,
column: 10, column: 10,
width: Some(1), width: Some(1),
kind: UnexpectedToken{expected, found}, kind: UnexpectedToken{expected: vec![StringToken, RawString], found: Colon},
});
} }
#[test] error_test! {
fn missing_default_backtick() { name: missing_default_backtick,
let text = "hello arg=`hello`"; input: "hello arg=`hello`",
let expected = vec![StringToken, RawString];
let found = Backtick;
parse_error(text, CompilationError {
text: text,
index: 10, index: 10,
line: 0, line: 0,
column: 10, column: 10,
width: Some(7), width: Some(7),
kind: UnexpectedToken{expected, found}, kind: UnexpectedToken{expected: vec![StringToken, RawString], found: Backtick},
});
} }
#[test] error_test! {
fn parameter_after_variadic() { name: parameter_after_variadic,
let text = "foo +a bbb:"; input: "foo +a bbb:",
parse_error(text, CompilationError {
text: text,
index: 7, index: 7,
line: 0, line: 0,
column: 7, column: 7,
width: Some(3), width: Some(3),
kind: ParameterFollowsVariadicParameter{parameter: "bbb"} kind: ParameterFollowsVariadicParameter{parameter: "bbb"},
});
} }
#[test] error_test! {
fn required_after_default() { name: required_after_default,
let text = "hello arg='foo' bar:"; input: "hello arg='foo' bar:",
parse_error(text, CompilationError {
text: text,
index: 16, index: 16,
line: 0, line: 0,
column: 16, column: 16,
width: Some(3), width: Some(3),
kind: RequiredParameterFollowsDefaultParameter{parameter: "bar"}, kind: RequiredParameterFollowsDefaultParameter{parameter: "bar"},
});
} }
#[test] error_test! {
fn missing_eol() { name: missing_eol,
let text = "a b c: z ="; input: "a b c: z =",
parse_error(text, CompilationError {
text: text,
index: 9, index: 9,
line: 0, line: 0,
column: 9, column: 9,
width: Some(1), width: Some(1),
kind: UnexpectedToken{expected: vec![Name, Eol, Eof], found: Equals}, kind: UnexpectedToken{expected: vec![Name, Eol, Eof], found: Equals},
});
} }
#[test] error_test! {
fn duplicate_parameter() { name: duplicate_parameter,
let text = "a b b:"; input: "a b b:",
parse_error(text, CompilationError {
text: text,
index: 4, index: 4,
line: 0, line: 0,
column: 4, column: 4,
width: Some(1), width: Some(1),
kind: DuplicateParameter{recipe: "a", parameter: "b"} kind: DuplicateParameter{recipe: "a", parameter: "b"},
});
} }
#[test] error_test! {
fn parameter_shadows_varible() { name: parameter_shadows_varible,
let text = "foo = \"h\"\na foo:"; input: "foo = \"h\"\na foo:",
parse_error(text, CompilationError {
text: text,
index: 12, index: 12,
line: 1, line: 1,
column: 2, column: 2,
width: Some(3), width: Some(3),
kind: ParameterShadowsVariable{parameter: "foo"} kind: ParameterShadowsVariable{parameter: "foo"},
});
} }
#[test] error_test! {
fn dependency_has_parameters() { name: dependency_has_parameters,
let text = "foo arg:\nb: foo"; input: "foo arg:\nb: foo",
parse_error(text, CompilationError {
text: text,
index: 12, index: 12,
line: 1, line: 1,
column: 3, column: 3,
width: Some(3), width: Some(3),
kind: DependencyHasParameters{recipe: "b", dependency: "foo"} kind: DependencyHasParameters{recipe: "b", dependency: "foo"},
});
} }
#[test] error_test! {
fn duplicate_dependency() { name: duplicate_dependency,
let text = "a b c: b c z z"; input: "a b c: b c z z",
parse_error(text, CompilationError {
text: text,
index: 13, index: 13,
line: 0, line: 0,
column: 13, column: 13,
width: Some(1), width: Some(1),
kind: DuplicateDependency{recipe: "a", dependency: "z"} kind: DuplicateDependency{recipe: "a", dependency: "z"},
});
} }
#[test] error_test! {
fn duplicate_recipe() { name: duplicate_recipe,
let text = "a:\nb:\na:"; input: "a:\nb:\na:",
parse_error(text, CompilationError {
text: text,
index: 6, index: 6,
line: 2, line: 2,
column: 0, column: 0,
width: Some(1), width: Some(1),
kind: DuplicateRecipe{recipe: "a", first: 0} kind: DuplicateRecipe{recipe: "a", first: 0},
});
} }
#[test] error_test! {
fn duplicate_variable() { name: duplicate_variable,
let text = "a = \"0\"\na = \"0\""; input: "a = \"0\"\na = \"0\"",
parse_error(text, CompilationError {
text: text,
index: 8, index: 8,
line: 1, line: 1,
column: 0, column: 0,
width: Some(1), width: Some(1),
kind: DuplicateVariable{variable: "a"} kind: DuplicateVariable{variable: "a"},
});
} }
#[test] error_test! {
fn extra_whitespace() { name: extra_whitespace,
let text = "a:\n blah\n blarg"; input: "a:\n blah\n blarg",
parse_error(text, CompilationError {
text: text,
index: 10, index: 10,
line: 2, line: 2,
column: 1, column: 1,
width: Some(6), width: Some(6),
kind: ExtraLeadingWhitespace kind: ExtraLeadingWhitespace,
});
// extra whitespace is okay in a shebang recipe
parse_success("a:\n #!\n print(1)");
} }
#[test] error_test! {
fn interpolation_outside_of_recipe() { name: interpolation_outside_of_recipe,
let text = "{{"; input: "{{",
let expected = vec![Name, At];
let found = InterpolationStart;
parse_error(text, CompilationError {
text: text,
index: 0, index: 0,
line: 0, line: 0,
column: 0, column: 0,
width: Some(2), width: Some(2),
kind: UnexpectedToken{expected, found}, kind: UnexpectedToken{expected: vec![Name, At], found: InterpolationStart},
});
} }
#[test] error_test! {
fn unclosed_interpolation_delimiter() { name: unclosed_interpolation_delimiter,
let text = "a:\n echo {{ foo"; input: "a:\n echo {{ foo",
let expected = vec![Plus, Eol, InterpolationEnd];
let found = Dedent;
parse_error(text, CompilationError {
text: text,
index: 15, index: 15,
line: 1, line: 1,
column: 12, column: 12,
width: Some(0), width: Some(0),
kind: UnexpectedToken{expected, found}, kind: UnexpectedToken{expected: vec![Plus, Eol, InterpolationEnd], found: Dedent},
});
} }
#[test] error_test! {
fn plus_following_parameter() { name: plus_following_parameter,
let text = "a b c+:"; input: "a b c+:",
parse_error(text, CompilationError {
text: text,
index: 5, index: 5,
line: 0, line: 0,
column: 5, column: 5,
width: Some(1), width: Some(1),
kind: UnexpectedToken{expected: vec![Name], found: Plus}, kind: UnexpectedToken{expected: vec![Name], found: Plus},
});
} }
#[test] #[test]

View File

@ -368,19 +368,22 @@ mod test {
} }
} }
summary_test!{tokenize_strings, summary_test! {
tokenize_strings,
r#"a = "'a'" + '"b"' + "'c'" + '"d"'#echo hello"#, r#"a = "'a'" + '"b"' + "'c'" + '"d"'#echo hello"#,
r#"N="+'+"+'#."#, r#"N="+'+"+'#."#,
} }
summary_test!{tokenize_recipe_interpolation_eol, summary_test! {
tokenize_recipe_interpolation_eol,
"foo: # some comment "foo: # some comment
{{hello}} {{hello}}
", ",
"N:#$>^{N}$<.", "N:#$>^{N}$<.",
} }
summary_test!{tokenize_recipe_interpolation_eof, summary_test! {
tokenize_recipe_interpolation_eof,
"foo: # more comments "foo: # more comments
{{hello}} {{hello}}
# another comment # another comment
@ -388,17 +391,20 @@ mod test {
"N:#$>^{N}$<#$.", "N:#$>^{N}$<#$.",
} }
summary_test!{tokenize_recipe_complex_interpolation_expression, summary_test! {
tokenize_recipe_complex_interpolation_expression,
"foo: #lol\n {{a + b + \"z\" + blarg}}", "foo: #lol\n {{a + b + \"z\" + blarg}}",
"N:#$>^{N+N+\"+N}<.", "N:#$>^{N+N+\"+N}<.",
} }
summary_test!{tokenize_recipe_multiple_interpolations, summary_test! {
tokenize_recipe_multiple_interpolations,
"foo:#ok\n {{a}}0{{b}}1{{c}}", "foo:#ok\n {{a}}0{{b}}1{{c}}",
"N:#$>^{N}_{N}_{N}<.", "N:#$>^{N}_{N}_{N}<.",
} }
summary_test!{tokenize_junk, summary_test! {
tokenize_junk,
"bob "bob
hello blah blah blah : a b c #whatever hello blah blah blah : a b c #whatever
@ -406,7 +412,8 @@ hello blah blah blah : a b c #whatever
"N$$NNNN:NNN#$.", "N$$NNNN:NNN#$.",
} }
summary_test!{tokenize_empty_lines, summary_test! {
tokenize_empty_lines,
" "
# this does something # this does something
hello: hello:
@ -422,7 +429,8 @@ hello:
"$#$N:$>^_$^_$$^_$$^_$$<#$.", "$#$N:$>^_$^_$$^_$$^_$$<#$.",
} }
summary_test!{tokenize_comment_before_variable, summary_test! {
tokenize_comment_before_variable,
" "
# #
A='1' A='1'
@ -432,17 +440,20 @@ echo:
"$#$N='$N:$>^_{N}$<.", "$#$N='$N:$>^_{N}$<.",
} }
summary_test!{tokenize_interpolation_backticks, summary_test! {
tokenize_interpolation_backticks,
"hello:\n echo {{`echo hello` + `echo goodbye`}}", "hello:\n echo {{`echo hello` + `echo goodbye`}}",
"N:$>^_{`+`}<.", "N:$>^_{`+`}<.",
} }
summary_test!{tokenize_assignment_backticks, summary_test! {
tokenize_assignment_backticks,
"a = `echo hello` + `echo goodbye`", "a = `echo hello` + `echo goodbye`",
"N=`+`.", "N=`+`.",
} }
summary_test!{tokenize_multiple, summary_test! {
tokenize_multiple,
" "
hello: hello:
a a
@ -460,9 +471,14 @@ bob:
"$N:$>^_$^_$$^_$$^_$$<#$N:$>^_$<.", "$N:$>^_$^_$$^_$$^_$$<#$N:$>^_$<.",
} }
summary_test!{tokenize_comment, "a:=#", "N:=#."} summary_test! {
tokenize_comment,
"a:=#",
"N:=#."
}
summary_test!{tokenize_order, summary_test! {
tokenize_order,
r" r"
b: a b: a
@mv a b @mv a b