Reform Parser (#509)

Just's first parser performed both parsing, i.e the transformation of a
token stream according to the language grammar, and a number of consistency
checks and analysis passes.

This made parsing and analysis quite complex, so this diff introduces a
new, much cleaner `Parser`, and moves existing analysis into a dedicated
`Analyzer`.
This commit is contained in:
Casey Rodarmor 2019-11-07 10:55:15 -08:00 committed by GitHub
parent 8e084a7042
commit b2285ce0e0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
46 changed files with 3142 additions and 1607 deletions

View File

@ -102,7 +102,7 @@ sloc:
! grep --color -En '.{101}' src/*.rs ! grep --color -En '.{101}' src/*.rs
replace FROM TO: replace FROM TO:
sd -i '{{FROM}}' '{{TO}}' src/*.rs sd '{{FROM}}' '{{TO}}' src/*.rs
test-quine: test-quine:
cargo run -- quine cargo run -- quine

View File

@ -1,15 +1,35 @@
use crate::common::*; use crate::common::*;
#[derive(Debug)] /// An alias, e.g. `name := target`
pub(crate) struct Alias<'a> { #[derive(Debug, PartialEq)]
pub(crate) name: &'a str, pub(crate) struct Alias<'src> {
pub(crate) target: &'a str, pub(crate) name: Name<'src>,
pub(crate) line_number: usize, pub(crate) target: Name<'src>,
pub(crate) private: bool, }
impl Alias<'_> {
pub(crate) fn is_private(&self) -> bool {
self.name.lexeme().starts_with('_')
}
pub(crate) fn line_number(&self) -> usize {
self.name.line
}
}
impl<'src> Keyed<'src> for Alias<'src> {
fn key(&self) -> &'src str {
self.name.lexeme()
}
} }
impl<'a> Display for Alias<'a> { impl<'a> Display for Alias<'a> {
fn fmt(&self, f: &mut Formatter) -> fmt::Result { fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "alias {} := {}", self.name, self.target) write!(
f,
"alias {} := {}",
self.name.lexeme(),
self.target.lexeme()
)
} }
} }

View File

@ -7,20 +7,14 @@ where
{ {
aliases: &'b BTreeMap<&'a str, Alias<'a>>, aliases: &'b BTreeMap<&'a str, Alias<'a>>,
recipes: &'b BTreeMap<&'a str, Recipe<'a>>, recipes: &'b BTreeMap<&'a str, Recipe<'a>>,
alias_tokens: &'b BTreeMap<&'a str, Token<'a>>,
} }
impl<'a: 'b, 'b> AliasResolver<'a, 'b> { impl<'a: 'b, 'b> AliasResolver<'a, 'b> {
pub(crate) fn resolve_aliases( pub(crate) fn resolve_aliases(
aliases: &BTreeMap<&'a str, Alias<'a>>, aliases: &BTreeMap<&'a str, Alias<'a>>,
recipes: &BTreeMap<&'a str, Recipe<'a>>, recipes: &BTreeMap<&'a str, Recipe<'a>>,
alias_tokens: &BTreeMap<&'a str, Token<'a>>,
) -> CompilationResult<'a, ()> { ) -> CompilationResult<'a, ()> {
let resolver = AliasResolver { let resolver = AliasResolver { aliases, recipes };
aliases,
recipes,
alias_tokens,
};
resolver.resolve()?; resolver.resolve()?;
@ -36,20 +30,20 @@ impl<'a: 'b, 'b> AliasResolver<'a, 'b> {
} }
fn resolve_alias(&self, alias: &Alias<'a>) -> CompilationResult<'a, ()> { fn resolve_alias(&self, alias: &Alias<'a>) -> CompilationResult<'a, ()> {
let token = self.alias_tokens.get(&alias.name).unwrap(); let token = alias.name.token();
// Make sure the alias doesn't conflict with any recipe // Make sure the alias doesn't conflict with any recipe
if let Some(recipe) = self.recipes.get(alias.name) { if let Some(recipe) = self.recipes.get(alias.name.lexeme()) {
return Err(token.error(AliasShadowsRecipe { return Err(token.error(AliasShadowsRecipe {
alias: alias.name, alias: alias.name.lexeme(),
recipe_line: recipe.line_number, recipe_line: recipe.line_number(),
})); }));
} }
// Make sure the target recipe exists // Make sure the target recipe exists
if self.recipes.get(alias.target).is_none() { if self.recipes.get(alias.target.lexeme()).is_none() {
return Err(token.error(UnknownAliasTarget { return Err(token.error(UnknownAliasTarget {
alias: alias.name, alias: alias.name.lexeme(),
target: alias.target, target: alias.target.lexeme(),
})); }));
} }

300
src/analyzer.rs Normal file
View File

@ -0,0 +1,300 @@
use crate::common::*;
use CompilationErrorKind::*;
pub(crate) struct Analyzer<'a> {
recipes: Table<'a, Recipe<'a>>,
assignments: Table<'a, Assignment<'a>>,
aliases: Table<'a, Alias<'a>>,
}
impl<'a> Analyzer<'a> {
pub(crate) fn analyze(module: Module<'a>) -> CompilationResult<'a, Justfile> {
let analyzer = Analyzer::new();
analyzer.justfile(module)
}
pub(crate) fn new() -> Analyzer<'a> {
Analyzer {
recipes: empty(),
assignments: empty(),
aliases: empty(),
}
}
pub(crate) fn justfile(mut self, module: Module<'a>) -> CompilationResult<'a, Justfile<'a>> {
for item in module.items {
match item {
Item::Alias(alias) => {
self.analyze_alias(&alias)?;
self.aliases.insert(alias);
}
Item::Assignment(assignment) => {
self.analyze_assignment(&assignment)?;
self.assignments.insert(assignment);
}
Item::Recipe(recipe) => {
self.analyze_recipe(&recipe)?;
self.recipes.insert(recipe);
}
}
}
let recipes = self.recipes;
let assignments = self.assignments;
let aliases = self.aliases;
AssignmentResolver::resolve_assignments(&assignments)?;
RecipeResolver::resolve_recipes(&recipes, &assignments)?;
for recipe in recipes.values() {
for parameter in &recipe.parameters {
if assignments.contains_key(parameter.name.lexeme()) {
return Err(parameter.name.token().error(ParameterShadowsVariable {
parameter: parameter.name.lexeme(),
}));
}
}
for dependency in &recipe.dependencies {
if !recipes[dependency.lexeme()].parameters.is_empty() {
return Err(dependency.error(DependencyHasParameters {
recipe: recipe.name(),
dependency: dependency.lexeme(),
}));
}
}
}
AliasResolver::resolve_aliases(&aliases, &recipes)?;
Ok(Justfile {
warnings: module.warnings,
recipes,
assignments,
aliases,
})
}
fn analyze_recipe(&self, recipe: &Recipe<'a>) -> CompilationResult<'a, ()> {
if let Some(original) = self.recipes.get(recipe.name.lexeme()) {
return Err(recipe.name.token().error(DuplicateRecipe {
recipe: original.name(),
first: original.line_number(),
}));
}
let mut parameters = BTreeSet::new();
let mut passed_default = false;
for parameter in &recipe.parameters {
if parameters.contains(parameter.name.lexeme()) {
return Err(parameter.name.token().error(DuplicateParameter {
recipe: recipe.name.lexeme(),
parameter: parameter.name.lexeme(),
}));
}
parameters.insert(parameter.name.lexeme());
if parameter.default.is_some() {
passed_default = true;
} else if passed_default {
return Err(
parameter
.name
.token()
.error(RequiredParameterFollowsDefaultParameter {
parameter: parameter.name.lexeme(),
}),
);
}
}
let mut dependencies = BTreeSet::new();
for dependency in &recipe.dependencies {
if dependencies.contains(dependency.lexeme()) {
return Err(dependency.token().error(DuplicateDependency {
recipe: recipe.name.lexeme(),
dependency: dependency.lexeme(),
}));
}
dependencies.insert(dependency.lexeme());
}
let mut continued = false;
for line in &recipe.body {
if !recipe.shebang && !continued {
if let Some(Fragment::Text { token }) = line.fragments.first() {
let text = token.lexeme();
if text.starts_with(' ') || text.starts_with('\t') {
return Err(token.error(ExtraLeadingWhitespace));
}
}
}
continued = line.is_continuation();
}
Ok(())
}
fn analyze_assignment(&self, assignment: &Assignment<'a>) -> CompilationResult<'a, ()> {
if self.assignments.contains_key(assignment.name.lexeme()) {
return Err(assignment.name.token().error(DuplicateVariable {
variable: assignment.name.lexeme(),
}));
}
Ok(())
}
fn analyze_alias(&self, alias: &Alias<'a>) -> CompilationResult<'a, ()> {
let name = alias.name.lexeme();
if let Some(original) = self.aliases.get(name) {
return Err(alias.name.token().error(DuplicateAlias {
alias: name,
first: original.line_number(),
}));
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
analysis_error! {
name: duplicate_alias,
input: "alias foo = bar\nalias foo = baz",
offset: 22,
line: 1,
column: 6,
width: 3,
kind: DuplicateAlias { alias: "foo", first: 0 },
}
analysis_error! {
name: unknown_alias_target,
input: "alias foo = bar\n",
offset: 6,
line: 0,
column: 6,
width: 3,
kind: UnknownAliasTarget {alias: "foo", target: "bar"},
}
analysis_error! {
name: alias_shadows_recipe_before,
input: "bar: \n echo bar\nalias foo = bar\nfoo:\n echo foo",
offset: 23,
line: 2,
column: 6,
width: 3,
kind: AliasShadowsRecipe {alias: "foo", recipe_line: 3},
}
analysis_error! {
name: alias_shadows_recipe_after,
input: "foo:\n echo foo\nalias foo = bar\nbar:\n echo bar",
offset: 22,
line: 2,
column: 6,
width: 3,
kind: AliasShadowsRecipe { alias: "foo", recipe_line: 0 },
}
analysis_error! {
name: required_after_default,
input: "hello arg='foo' bar:",
offset: 16,
line: 0,
column: 16,
width: 3,
kind: RequiredParameterFollowsDefaultParameter{parameter: "bar"},
}
analysis_error! {
name: duplicate_parameter,
input: "a b b:",
offset: 4,
line: 0,
column: 4,
width: 1,
kind: DuplicateParameter{recipe: "a", parameter: "b"},
}
analysis_error! {
name: duplicate_variadic_parameter,
input: "a b +b:",
offset: 5,
line: 0,
column: 5,
width: 1,
kind: DuplicateParameter{recipe: "a", parameter: "b"},
}
analysis_error! {
name: parameter_shadows_varible,
input: "foo = \"h\"\na foo:",
offset: 12,
line: 1,
column: 2,
width: 3,
kind: ParameterShadowsVariable{parameter: "foo"},
}
analysis_error! {
name: dependency_has_parameters,
input: "foo arg:\nb: foo",
offset: 12,
line: 1,
column: 3,
width: 3,
kind: DependencyHasParameters{recipe: "b", dependency: "foo"},
}
analysis_error! {
name: duplicate_dependency,
input: "a b c: b c z z",
offset: 13,
line: 0,
column: 13,
width: 1,
kind: DuplicateDependency{recipe: "a", dependency: "z"},
}
analysis_error! {
name: duplicate_recipe,
input: "a:\nb:\na:",
offset: 6,
line: 2,
column: 0,
width: 1,
kind: DuplicateRecipe{recipe: "a", first: 0},
}
analysis_error! {
name: duplicate_variable,
input: "a = \"0\"\na = \"0\"",
offset: 8,
line: 1,
column: 0,
width: 1,
kind: DuplicateVariable{variable: "a"},
}
analysis_error! {
name: extra_whitespace,
input: "a:\n blah\n blarg",
offset: 10,
line: 2,
column: 1,
width: 6,
kind: ExtraLeadingWhitespace,
}
}

18
src/assignment.rs Normal file
View File

@ -0,0 +1,18 @@
use crate::common::*;
/// An assignment, e.g `foo := bar`
#[derive(Debug, PartialEq)]
pub(crate) struct Assignment<'src> {
/// Assignment was prefixed by the `export` keyword
pub(crate) export: bool,
/// Left-hand side of the assignment
pub(crate) name: Name<'src>,
/// Right-hand side of the assignment
pub(crate) expression: Expression<'src>,
}
impl<'src> Keyed<'src> for Assignment<'src> {
fn key(&self) -> &'src str {
self.name.lexeme()
}
}

View File

@ -1,31 +1,29 @@
use crate::common::*; use crate::common::*;
pub(crate) struct AssignmentEvaluator<'a: 'b, 'b> { pub(crate) struct AssignmentEvaluator<'a: 'b, 'b> {
pub(crate) assignments: &'b BTreeMap<&'a str, Expression<'a>>, pub(crate) assignments: &'b BTreeMap<&'a str, Assignment<'a>>,
pub(crate) invocation_directory: &'b Result<PathBuf, String>, pub(crate) invocation_directory: &'b Result<PathBuf, String>,
pub(crate) dotenv: &'b BTreeMap<String, String>, pub(crate) dotenv: &'b BTreeMap<String, String>,
pub(crate) dry_run: bool, pub(crate) dry_run: bool,
pub(crate) evaluated: BTreeMap<&'a str, String>, pub(crate) evaluated: BTreeMap<&'a str, (bool, String)>,
pub(crate) exports: &'b BTreeSet<&'a str>,
pub(crate) overrides: &'b BTreeMap<&'b str, &'b str>, pub(crate) overrides: &'b BTreeMap<&'b str, &'b str>,
pub(crate) quiet: bool, pub(crate) quiet: bool,
pub(crate) scope: &'b BTreeMap<&'a str, String>, pub(crate) scope: &'b BTreeMap<&'a str, (bool, String)>,
pub(crate) shell: &'b str, pub(crate) shell: &'b str,
} }
impl<'a, 'b> AssignmentEvaluator<'a, 'b> { impl<'a, 'b> AssignmentEvaluator<'a, 'b> {
pub(crate) fn evaluate_assignments( pub(crate) fn evaluate_assignments(
assignments: &BTreeMap<&'a str, Expression<'a>>, assignments: &BTreeMap<&'a str, Assignment<'a>>,
invocation_directory: &Result<PathBuf, String>, invocation_directory: &Result<PathBuf, String>,
dotenv: &'b BTreeMap<String, String>, dotenv: &'b BTreeMap<String, String>,
overrides: &BTreeMap<&str, &str>, overrides: &BTreeMap<&str, &str>,
quiet: bool, quiet: bool,
shell: &'a str, shell: &'a str,
dry_run: bool, dry_run: bool,
) -> RunResult<'a, BTreeMap<&'a str, String>> { ) -> RunResult<'a, BTreeMap<&'a str, (bool, String)>> {
let mut evaluator = AssignmentEvaluator { let mut evaluator = AssignmentEvaluator {
evaluated: empty(), evaluated: empty(),
exports: &empty(),
scope: &empty(), scope: &empty(),
assignments, assignments,
invocation_directory, invocation_directory,
@ -46,13 +44,13 @@ impl<'a, 'b> AssignmentEvaluator<'a, 'b> {
pub(crate) fn evaluate_line( pub(crate) fn evaluate_line(
&mut self, &mut self,
line: &[Fragment<'a>], line: &[Fragment<'a>],
arguments: &BTreeMap<&str, Cow<str>>, arguments: &BTreeMap<&'a str, Cow<str>>,
) -> RunResult<'a, String> { ) -> RunResult<'a, String> {
let mut evaluated = String::new(); let mut evaluated = String::new();
for fragment in line { for fragment in line {
match *fragment { match fragment {
Fragment::Text { ref text } => evaluated += text.lexeme(), Fragment::Text { token } => evaluated += token.lexeme(),
Fragment::Expression { ref expression } => { Fragment::Interpolation { expression } => {
evaluated += &self.evaluate_expression(expression, arguments)?; evaluated += &self.evaluate_expression(expression, arguments)?;
} }
} }
@ -65,12 +63,14 @@ impl<'a, 'b> AssignmentEvaluator<'a, 'b> {
return Ok(()); return Ok(());
} }
if let Some(expression) = self.assignments.get(name) { if let Some(assignment) = self.assignments.get(name) {
if let Some(value) = self.overrides.get(name) { if let Some(value) = self.overrides.get(name) {
self.evaluated.insert(name, value.to_string()); self
.evaluated
.insert(name, (assignment.export, value.to_string()));
} else { } else {
let value = self.evaluate_expression(expression, &empty())?; let value = self.evaluate_expression(&assignment.expression, &empty())?;
self.evaluated.insert(name, value); self.evaluated.insert(name, (assignment.export, value));
} }
} else { } else {
return Err(RuntimeError::Internal { return Err(RuntimeError::Internal {
@ -84,29 +84,29 @@ impl<'a, 'b> AssignmentEvaluator<'a, 'b> {
pub(crate) fn evaluate_expression( pub(crate) fn evaluate_expression(
&mut self, &mut self,
expression: &Expression<'a>, expression: &Expression<'a>,
arguments: &BTreeMap<&str, Cow<str>>, arguments: &BTreeMap<&'a str, Cow<str>>,
) -> RunResult<'a, String> { ) -> RunResult<'a, String> {
match *expression { match expression {
Expression::Variable { name, .. } => { Expression::Variable { name, .. } => {
if self.evaluated.contains_key(name) { let variable = name.lexeme();
Ok(self.evaluated[name].clone()) if self.evaluated.contains_key(variable) {
} else if self.scope.contains_key(name) { Ok(self.evaluated[variable].1.clone())
Ok(self.scope[name].clone()) } else if self.scope.contains_key(variable) {
} else if self.assignments.contains_key(name) { Ok(self.scope[variable].1.clone())
self.evaluate_assignment(name)?; } else if self.assignments.contains_key(variable) {
Ok(self.evaluated[name].clone()) self.evaluate_assignment(variable)?;
} else if arguments.contains_key(name) { Ok(self.evaluated[variable].1.clone())
Ok(arguments[name].to_string()) } else if arguments.contains_key(variable) {
Ok(arguments[variable].to_string())
} else { } else {
Err(RuntimeError::Internal { Err(RuntimeError::Internal {
message: format!("attempted to evaluate undefined variable `{}`", name), message: format!("attempted to evaluate undefined variable `{}`", variable),
}) })
} }
} }
Expression::Call { Expression::Call {
name, function,
arguments: ref call_arguments, arguments: call_arguments,
ref token,
} => { } => {
let call_arguments = call_arguments let call_arguments = call_arguments
.iter() .iter()
@ -116,20 +116,20 @@ impl<'a, 'b> AssignmentEvaluator<'a, 'b> {
invocation_directory: &self.invocation_directory, invocation_directory: &self.invocation_directory,
dotenv: self.dotenv, dotenv: self.dotenv,
}; };
Function::evaluate(token, name, &context, &call_arguments) Function::evaluate(*function, &context, &call_arguments)
} }
Expression::String { ref cooked_string } => Ok(cooked_string.cooked.to_string()), Expression::StringLiteral { string_literal } => Ok(string_literal.cooked.to_string()),
Expression::Backtick { raw, ref token } => { Expression::Backtick { contents, token } => {
if self.dry_run { if self.dry_run {
Ok(format!("`{}`", raw)) Ok(format!("`{}`", contents))
} else { } else {
Ok(self.run_backtick(self.dotenv, raw, token)?) Ok(self.run_backtick(self.dotenv, contents, token)?)
} }
} }
Expression::Concatination { ref lhs, ref rhs } => { Expression::Concatination { lhs, rhs } => {
Ok(self.evaluate_expression(lhs, arguments)? + &self.evaluate_expression(rhs, arguments)?) Ok(self.evaluate_expression(lhs, arguments)? + &self.evaluate_expression(rhs, arguments)?)
} }
Expression::Group { ref expression } => self.evaluate_expression(&expression, arguments), Expression::Group { contents } => self.evaluate_expression(contents, arguments),
} }
} }
@ -143,7 +143,7 @@ impl<'a, 'b> AssignmentEvaluator<'a, 'b> {
cmd.arg("-cu").arg(raw); cmd.arg("-cu").arg(raw);
cmd.export_environment_variables(self.scope, dotenv, self.exports)?; cmd.export_environment_variables(self.scope, dotenv)?;
cmd.stdin(process::Stdio::inherit()); cmd.stdin(process::Stdio::inherit());
@ -163,13 +163,13 @@ impl<'a, 'b> AssignmentEvaluator<'a, 'b> {
} }
#[cfg(test)] #[cfg(test)]
mod test { mod tests {
use super::*; use super::*;
use crate::testing::parse; use crate::testing::compile;
#[test] #[test]
fn backtick_code() { fn backtick_code() {
match parse("a:\n echo {{`f() { return 100; }; f`}}") match compile("a:\n echo {{`f() { return 100; }; f`}}")
.run(&["a"], &Default::default()) .run(&["a"], &Default::default())
.unwrap_err() .unwrap_err()
{ {
@ -198,7 +198,7 @@ recipe:
..Default::default() ..Default::default()
}; };
match parse(text).run(&["recipe"], &config).unwrap_err() { match compile(text).run(&["recipe"], &config).unwrap_err() {
RuntimeError::Backtick { RuntimeError::Backtick {
token, token,
output_error: OutputError::Code(_), output_error: OutputError::Code(_),

View File

@ -3,8 +3,7 @@ use crate::common::*;
use CompilationErrorKind::*; use CompilationErrorKind::*;
pub(crate) struct AssignmentResolver<'a: 'b, 'b> { pub(crate) struct AssignmentResolver<'a: 'b, 'b> {
assignments: &'b BTreeMap<&'a str, Expression<'a>>, assignments: &'b BTreeMap<&'a str, Assignment<'a>>,
assignment_tokens: &'b BTreeMap<&'a str, Token<'a>>,
stack: Vec<&'a str>, stack: Vec<&'a str>,
seen: BTreeSet<&'a str>, seen: BTreeSet<&'a str>,
evaluated: BTreeSet<&'a str>, evaluated: BTreeSet<&'a str>,
@ -12,15 +11,13 @@ pub(crate) struct AssignmentResolver<'a: 'b, 'b> {
impl<'a: 'b, 'b> AssignmentResolver<'a, 'b> { impl<'a: 'b, 'b> AssignmentResolver<'a, 'b> {
pub(crate) fn resolve_assignments( pub(crate) fn resolve_assignments(
assignments: &BTreeMap<&'a str, Expression<'a>>, assignments: &BTreeMap<&'a str, Assignment<'a>>,
assignment_tokens: &BTreeMap<&'a str, Token<'a>>,
) -> CompilationResult<'a, ()> { ) -> CompilationResult<'a, ()> {
let mut resolver = AssignmentResolver { let mut resolver = AssignmentResolver {
stack: empty(), stack: empty(),
seen: empty(), seen: empty(),
evaluated: empty(), evaluated: empty(),
assignments, assignments,
assignment_tokens,
}; };
for name in assignments.keys() { for name in assignments.keys() {
@ -38,13 +35,13 @@ impl<'a: 'b, 'b> AssignmentResolver<'a, 'b> {
self.seen.insert(name); self.seen.insert(name);
self.stack.push(name); self.stack.push(name);
if let Some(expression) = self.assignments.get(name) { if let Some(assignment) = self.assignments.get(name) {
self.resolve_expression(expression)?; self.resolve_expression(&assignment.expression)?;
self.evaluated.insert(name); self.evaluated.insert(name);
} else { } else {
let message = format!("attempted to resolve unknown assignment `{}`", name); let message = format!("attempted to resolve unknown assignment `{}`", name);
return Err(CompilationError { return Err(CompilationError {
text: "", src: "",
offset: 0, offset: 0,
line: 0, line: 0,
column: 0, column: 0,
@ -57,43 +54,43 @@ impl<'a: 'b, 'b> AssignmentResolver<'a, 'b> {
fn resolve_expression(&mut self, expression: &Expression<'a>) -> CompilationResult<'a, ()> { fn resolve_expression(&mut self, expression: &Expression<'a>) -> CompilationResult<'a, ()> {
match expression { match expression {
Expression::Variable { name, ref token } => { Expression::Variable { name } => {
if self.evaluated.contains(name) { let variable = name.lexeme();
if self.evaluated.contains(variable) {
return Ok(()); return Ok(());
} else if self.seen.contains(name) { } else if self.seen.contains(variable) {
let token = &self.assignment_tokens[name]; let token = self.assignments[variable].name.token();
self.stack.push(name); self.stack.push(variable);
return Err(token.error(CircularVariableDependency { return Err(token.error(CircularVariableDependency {
variable: name, variable: variable,
circle: self.stack.clone(), circle: self.stack.clone(),
})); }));
} else if self.assignments.contains_key(name) { } else if self.assignments.contains_key(variable) {
self.resolve_assignment(name)?; self.resolve_assignment(variable)?;
} else { } else {
return Err(token.error(UndefinedVariable { variable: name })); return Err(name.token().error(UndefinedVariable { variable }));
} }
} }
Expression::Call { Expression::Call {
ref token, function,
ref arguments, arguments,
.. } => Function::resolve(&function.token(), arguments.len())?,
} => Function::resolve(token, arguments.len())?, Expression::Concatination { lhs, rhs } => {
Expression::Concatination { ref lhs, ref rhs } => {
self.resolve_expression(lhs)?; self.resolve_expression(lhs)?;
self.resolve_expression(rhs)?; self.resolve_expression(rhs)?;
} }
Expression::String { .. } | Expression::Backtick { .. } => {} Expression::StringLiteral { .. } | Expression::Backtick { .. } => {}
Expression::Group { expression } => self.resolve_expression(expression)?, Expression::Group { contents } => self.resolve_expression(contents)?,
} }
Ok(()) Ok(())
} }
} }
#[cfg(test)] #[cfg(test)]
mod test { mod tests {
use super::*; use super::*;
error_test! { analysis_error! {
name: circular_variable_dependency, name: circular_variable_dependency,
input: "a = b\nb = a", input: "a = b\nb = a",
offset: 0, offset: 0,
@ -103,7 +100,7 @@ mod test {
kind: CircularVariableDependency{variable: "a", circle: vec!["a", "b", "a"]}, kind: CircularVariableDependency{variable: "a", circle: vec!["a", "b", "a"]},
} }
error_test! { analysis_error! {
name: self_variable_dependency, name: self_variable_dependency,
input: "a = a", input: "a = a",
offset: 0, offset: 0,
@ -113,7 +110,7 @@ mod test {
kind: CircularVariableDependency{variable: "a", circle: vec!["a", "a"]}, kind: CircularVariableDependency{variable: "a", circle: vec!["a", "a"]},
} }
error_test! { analysis_error! {
name: unknown_expression_variable, name: unknown_expression_variable,
input: "x = yy", input: "x = yy",
offset: 4, offset: 4,
@ -123,7 +120,7 @@ mod test {
kind: UndefinedVariable{variable: "yy"}, kind: UndefinedVariable{variable: "yy"},
} }
error_test! { analysis_error! {
name: unknown_function, name: unknown_function,
input: "a = foo()", input: "a = foo()",
offset: 4, offset: 4,
@ -132,5 +129,4 @@ mod test {
width: 3, width: 3,
kind: UnknownFunction{function: "foo"}, kind: UnknownFunction{function: "foo"},
} }
} }

View File

@ -3,31 +3,27 @@ use crate::common::*;
pub(crate) trait CommandExt { pub(crate) trait CommandExt {
fn export_environment_variables<'a>( fn export_environment_variables<'a>(
&mut self, &mut self,
scope: &BTreeMap<&'a str, String>, scope: &BTreeMap<&'a str, (bool, String)>,
dotenv: &BTreeMap<String, String>, dotenv: &BTreeMap<String, String>,
exports: &BTreeSet<&'a str>,
) -> RunResult<'a, ()>; ) -> RunResult<'a, ()>;
} }
impl CommandExt for Command { impl CommandExt for Command {
fn export_environment_variables<'a>( fn export_environment_variables<'a>(
&mut self, &mut self,
scope: &BTreeMap<&'a str, String>, scope: &BTreeMap<&'a str, (bool, String)>,
dotenv: &BTreeMap<String, String>, dotenv: &BTreeMap<String, String>,
exports: &BTreeSet<&'a str>,
) -> RunResult<'a, ()> { ) -> RunResult<'a, ()> {
for (name, value) in dotenv { for (name, value) in dotenv {
self.env(name, value); self.env(name, value);
} }
for name in exports {
if let Some(value) = scope.get(name) { for (name, (export, value)) in scope {
if *export {
self.env(name, value); self.env(name, value);
} else {
return Err(RuntimeError::Internal {
message: format!("scope does not contain exported variable `{}`", name),
});
} }
} }
Ok(()) Ok(())
} }
} }

View File

@ -7,8 +7,10 @@ pub(crate) use std::{
env, env,
ffi::OsStr, ffi::OsStr,
fmt::{self, Display, Formatter}, fmt::{self, Display, Formatter},
fs, io, iter, fs,
ops::{Range, RangeInclusive}, io::{self, Write},
iter::{self, FromIterator},
ops::{Deref, Range, RangeInclusive},
path::{Path, PathBuf}, path::{Path, PathBuf},
process::{self, Command}, process::{self, Command},
str::{self, Chars}, str::{self, Chars},
@ -23,7 +25,7 @@ pub(crate) use log::warn;
pub(crate) use unicode_width::UnicodeWidthChar; pub(crate) use unicode_width::UnicodeWidthChar;
// modules // modules
pub(crate) use crate::search; pub(crate) use crate::{keyword, search};
// modules used in tests // modules used in tests
#[cfg(test)] #[cfg(test)]
@ -35,39 +37,35 @@ pub(crate) use crate::{
write_message_context::write_message_context, write_message_context::write_message_context,
}; };
// structs and enums // traits
pub(crate) use crate::{ pub(crate) use crate::{
alias::Alias, alias_resolver::AliasResolver, assignment_evaluator::AssignmentEvaluator, command_ext::CommandExt, compilation_result_ext::CompilationResultExt, keyed::Keyed,
assignment_resolver::AssignmentResolver, color::Color, compilation_error::CompilationError, ordinal::Ordinal, platform_interface::PlatformInterface, range_ext::RangeExt,
compilation_error_kind::CompilationErrorKind, config::Config, config_error::ConfigError,
count::Count, enclosure::Enclosure, expression::Expression, fragment::Fragment,
function::Function, function_context::FunctionContext, functions::Functions,
interrupt_guard::InterruptGuard, interrupt_handler::InterruptHandler, justfile::Justfile,
lexer::Lexer, list::List, output_error::OutputError, parameter::Parameter, parser::Parser,
platform::Platform, position::Position, recipe::Recipe, recipe_context::RecipeContext,
recipe_resolver::RecipeResolver, runtime_error::RuntimeError, search_error::SearchError,
shebang::Shebang, show_whitespace::ShowWhitespace, state::State, string_literal::StringLiteral,
subcommand::Subcommand, token::Token, token_kind::TokenKind, use_color::UseColor,
variables::Variables, verbosity::Verbosity, warning::Warning,
}; };
// structs and enums
pub(crate) use crate::{
alias::Alias, alias_resolver::AliasResolver, analyzer::Analyzer, assignment::Assignment,
assignment_evaluator::AssignmentEvaluator, assignment_resolver::AssignmentResolver, color::Color,
compilation_error::CompilationError, compilation_error_kind::CompilationErrorKind,
compiler::Compiler, config::Config, config_error::ConfigError, count::Count,
enclosure::Enclosure, expression::Expression, fragment::Fragment, function::Function,
function_context::FunctionContext, functions::Functions, interrupt_guard::InterruptGuard,
interrupt_handler::InterruptHandler, item::Item, justfile::Justfile, lexer::Lexer, line::Line,
list::List, module::Module, name::Name, output_error::OutputError, parameter::Parameter,
parser::Parser, platform::Platform, position::Position, recipe::Recipe,
recipe_context::RecipeContext, recipe_resolver::RecipeResolver, runtime_error::RuntimeError,
search_error::SearchError, shebang::Shebang, show_whitespace::ShowWhitespace, state::State,
string_literal::StringLiteral, subcommand::Subcommand, table::Table, token::Token,
token_kind::TokenKind, use_color::UseColor, variables::Variables, verbosity::Verbosity,
warning::Warning,
};
// structs and enums used in tests
#[cfg(test)]
pub(crate) use crate::{node::Node, tree::Tree};
// type aliases
pub(crate) type CompilationResult<'a, T> = Result<T, CompilationError<'a>>; pub(crate) type CompilationResult<'a, T> = Result<T, CompilationError<'a>>;
pub(crate) type RunResult<'a, T> = Result<T, RuntimeError<'a>>;
pub(crate) type ConfigResult<T> = Result<T, ConfigError>; pub(crate) type ConfigResult<T> = Result<T, ConfigError>;
pub(crate) type RunResult<'a, T> = Result<T, RuntimeError<'a>>;
#[allow(unused_imports)]
pub(crate) use std::io::prelude::*;
#[allow(unused_imports)]
pub(crate) use crate::command_ext::CommandExt;
#[allow(unused_imports)]
pub(crate) use crate::range_ext::RangeExt;
#[allow(unused_imports)]
pub(crate) use crate::ordinal::Ordinal;
#[allow(unused_imports)]
pub(crate) use crate::platform_interface::PlatformInterface;

View File

@ -2,7 +2,7 @@ use crate::common::*;
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
pub(crate) struct CompilationError<'a> { pub(crate) struct CompilationError<'a> {
pub(crate) text: &'a str, pub(crate) src: &'a str,
pub(crate) offset: usize, pub(crate) offset: usize,
pub(crate) line: usize, pub(crate) line: usize,
pub(crate) column: usize, pub(crate) column: usize,
@ -213,7 +213,7 @@ impl<'a> Display for CompilationError<'a> {
write_message_context( write_message_context(
f, f,
Color::fmt(f).error(), Color::fmt(f).error(),
self.text, self.src,
self.offset, self.offset,
self.line, self.line,
self.column, self.column,

View File

@ -0,0 +1,23 @@
use crate::common::*;
pub(crate) trait CompilationResultExt {
fn expected(self, kinds: &[TokenKind]) -> Self;
}
impl<'src, T> CompilationResultExt for CompilationResult<'src, T> {
fn expected(mut self, kinds: &[TokenKind]) -> Self {
if let Err(CompilationError {
kind: CompilationErrorKind::UnexpectedToken {
ref mut expected, ..
},
..
}) = &mut self
{
expected.extend_from_slice(kinds);
expected.sort();
expected.dedup();
}
self
}
}

13
src/compiler.rs Normal file
View File

@ -0,0 +1,13 @@
use crate::common::*;
pub(crate) struct Compiler;
impl Compiler {
pub(crate) fn compile(text: &str) -> CompilationResult<Justfile> {
let tokens = Lexer::lex(text)?;
let ast = Parser::parse(&tokens)?;
Analyzer::analyze(ast)
}
}

View File

@ -1,55 +1,60 @@
use crate::common::*; use crate::common::*;
/// An expression. Note that the Just language grammar has both an
/// `expression` production of additions (`a + b`) and values, and a
/// `value` production of all other value types (for example strings,
/// function calls, and parenthetical groups).
///
/// The parser parses both values and expressions into `Expression`s.
#[derive(PartialEq, Debug)] #[derive(PartialEq, Debug)]
pub(crate) enum Expression<'a> { pub(crate) enum Expression<'src> {
/// `contents`
Backtick { Backtick {
raw: &'a str, contents: &'src str,
token: Token<'a>, token: Token<'src>,
}, },
/// `name(arguments)`
Call { Call {
name: &'a str, function: Name<'src>,
token: Token<'a>, arguments: Vec<Expression<'src>>,
arguments: Vec<Expression<'a>>,
}, },
/// `lhs + rhs`
Concatination { Concatination {
lhs: Box<Expression<'a>>, lhs: Box<Expression<'src>>,
rhs: Box<Expression<'a>>, rhs: Box<Expression<'src>>,
}, },
String { /// `(contents)`
cooked_string: StringLiteral<'a>, Group { contents: Box<Expression<'src>> },
}, /// `"string_literal"` or `'string_literal'`
Variable { StringLiteral {
name: &'a str, string_literal: StringLiteral<'src>,
token: Token<'a>,
},
Group {
expression: Box<Expression<'a>>,
}, },
/// `variable`
Variable { name: Name<'src> },
} }
impl<'a> Expression<'a> { impl<'src> Expression<'src> {
pub(crate) fn variables(&'a self) -> Variables<'a> { pub(crate) fn variables<'expression>(&'expression self) -> Variables<'expression, 'src> {
Variables::new(self) Variables::new(self)
} }
pub(crate) fn functions(&'a self) -> Functions<'a> { pub(crate) fn functions<'expression>(&'expression self) -> Functions<'expression, 'src> {
Functions::new(self) Functions::new(self)
} }
} }
impl<'a> Display for Expression<'a> { impl<'src> Display for Expression<'src> {
fn fmt(&self, f: &mut Formatter) -> Result<(), fmt::Error> { fn fmt(&self, f: &mut Formatter) -> Result<(), fmt::Error> {
match *self { match self {
Expression::Backtick { raw, .. } => write!(f, "`{}`", raw)?, Expression::Backtick { contents, .. } => write!(f, "`{}`", contents)?,
Expression::Concatination { ref lhs, ref rhs } => write!(f, "{} + {}", lhs, rhs)?, Expression::Concatination { lhs, rhs } => write!(f, "{} + {}", lhs, rhs)?,
Expression::String { ref cooked_string } => write!(f, "{}", cooked_string)?, Expression::StringLiteral { string_literal } => write!(f, "{}", string_literal)?,
Expression::Variable { name, .. } => write!(f, "{}", name)?, Expression::Variable { name } => write!(f, "{}", name.lexeme())?,
Expression::Call { Expression::Call {
name, function,
ref arguments, arguments,
..
} => { } => {
write!(f, "{}(", name)?; write!(f, "{}(", function.lexeme())?;
for (i, argument) in arguments.iter().enumerate() { for (i, argument) in arguments.iter().enumerate() {
if i > 0 { if i > 0 {
write!(f, ", {}", argument)?; write!(f, ", {}", argument)?;
@ -59,7 +64,7 @@ impl<'a> Display for Expression<'a> {
} }
write!(f, ")")?; write!(f, ")")?;
} }
Expression::Group { ref expression } => write!(f, "({})", expression)?, Expression::Group { contents } => write!(f, "({})", contents)?,
} }
Ok(()) Ok(())
} }

View File

@ -1,16 +1,10 @@
use crate::common::*; use crate::common::*;
/// A line fragment consisting either of…
#[derive(PartialEq, Debug)] #[derive(PartialEq, Debug)]
pub(crate) enum Fragment<'a> { pub(crate) enum Fragment<'src> {
Text { text: Token<'a> }, /// …raw text…
Expression { expression: Expression<'a> }, Text { token: Token<'src> },
} /// …an interpolation containing `expression`.
Interpolation { expression: Expression<'src> },
impl<'a> Fragment<'a> {
pub(crate) fn continuation(&self) -> bool {
match *self {
Fragment::Text { ref text } => text.lexeme().ends_with('\\'),
_ => false,
}
}
} }

View File

@ -56,26 +56,26 @@ impl Function {
} }
pub(crate) fn evaluate<'a>( pub(crate) fn evaluate<'a>(
token: &Token<'a>, function_name: Name<'a>,
name: &'a str,
context: &FunctionContext, context: &FunctionContext,
arguments: &[String], arguments: &[String],
) -> RunResult<'a, String> { ) -> RunResult<'a, String> {
let name = function_name.lexeme();
if let Some(function) = FUNCTIONS.get(name) { if let Some(function) = FUNCTIONS.get(name) {
use self::Function::*; use self::Function::*;
let argc = arguments.len(); let argc = arguments.len();
match (function, argc) { match (function, argc) {
(&Nullary(f), 0) => f(context).map_err(|message| RuntimeError::FunctionCall { (&Nullary(f), 0) => f(context).map_err(|message| RuntimeError::FunctionCall {
token: token.clone(), function: function_name,
message, message,
}), }),
(&Unary(f), 1) => f(context, &arguments[0]).map_err(|message| RuntimeError::FunctionCall { (&Unary(f), 1) => f(context, &arguments[0]).map_err(|message| RuntimeError::FunctionCall {
token: token.clone(), function: function_name,
message, message,
}), }),
(&Binary(f), 2) => { (&Binary(f), 2) => {
f(context, &arguments[0], &arguments[1]).map_err(|message| RuntimeError::FunctionCall { f(context, &arguments[0], &arguments[1]).map_err(|message| RuntimeError::FunctionCall {
token: token.clone(), function: function_name,
message, message,
}) })
} }

View File

@ -1,34 +1,36 @@
use crate::common::*; use crate::common::*;
pub(crate) struct Functions<'a> { pub(crate) struct Functions<'expression, 'src> {
stack: Vec<&'a Expression<'a>>, stack: Vec<&'expression Expression<'src>>,
} }
impl<'a> Functions<'a> { impl<'expression, 'src> Functions<'expression, 'src> {
pub(crate) fn new(root: &'a Expression<'a>) -> Functions<'a> { pub(crate) fn new(root: &'expression Expression<'src>) -> Functions<'expression, 'src> {
Functions { stack: vec![root] } Functions { stack: vec![root] }
} }
} }
impl<'a> Iterator for Functions<'a> { impl<'expression, 'src> Iterator for Functions<'expression, 'src> {
type Item = (&'a Token<'a>, usize); type Item = (Token<'src>, usize);
fn next(&mut self) -> Option<Self::Item> { fn next(&mut self) -> Option<Self::Item> {
match self.stack.pop() { match self.stack.pop() {
None None
| Some(Expression::String { .. }) | Some(Expression::StringLiteral { .. })
| Some(Expression::Backtick { .. }) | Some(Expression::Backtick { .. })
| Some(Expression::Variable { .. }) => None, | Some(Expression::Variable { .. }) => None,
Some(Expression::Call { Some(Expression::Call {
token, arguments, .. function,
}) => Some((token, arguments.len())), arguments,
..
}) => Some((function.token(), arguments.len())),
Some(Expression::Concatination { lhs, rhs }) => { Some(Expression::Concatination { lhs, rhs }) => {
self.stack.push(lhs); self.stack.push(lhs);
self.stack.push(rhs); self.stack.push(rhs);
self.next() self.next()
} }
Some(Expression::Group { expression }) => { Some(Expression::Group { contents }) => {
self.stack.push(expression); self.stack.push(contents);
self.next() self.next()
} }
} }

9
src/item.rs Normal file
View File

@ -0,0 +1,9 @@
use crate::common::*;
/// A single top-level item
#[derive(Debug)]
pub(crate) enum Item<'src> {
Alias(Alias<'src>),
Assignment(Assignment<'src>),
Recipe(Recipe<'src>),
}

View File

@ -1,11 +1,10 @@
use crate::common::*; use crate::common::*;
#[derive(Debug)] #[derive(Debug, PartialEq)]
pub(crate) struct Justfile<'a> { pub(crate) struct Justfile<'a> {
pub(crate) recipes: BTreeMap<&'a str, Recipe<'a>>, pub(crate) recipes: Table<'a, Recipe<'a>>,
pub(crate) assignments: BTreeMap<&'a str, Expression<'a>>, pub(crate) assignments: Table<'a, Assignment<'a>>,
pub(crate) exports: BTreeSet<&'a str>, pub(crate) aliases: Table<'a, Alias<'a>>,
pub(crate) aliases: BTreeMap<&'a str, Alias<'a>>,
pub(crate) warnings: Vec<Warning<'a>>, pub(crate) warnings: Vec<Warning<'a>>,
} }
@ -14,7 +13,7 @@ impl<'a> Justfile<'a> {
let mut first: Option<&Recipe> = None; let mut first: Option<&Recipe> = None;
for recipe in self.recipes.values() { for recipe in self.recipes.values() {
if let Some(first_recipe) = first { if let Some(first_recipe) = first {
if recipe.line_number < first_recipe.line_number { if recipe.line_number() < first_recipe.line_number() {
first = Some(recipe) first = Some(recipe)
} }
} else { } else {
@ -75,7 +74,7 @@ impl<'a> Justfile<'a> {
width = cmp::max(name.len(), width); width = cmp::max(name.len(), width);
} }
for (name, value) in scope { for (name, (_export, value)) in scope {
println!("{0:1$} := \"{2}\"", name, width, value); println!("{0:1$} := \"{2}\"", name, width, value);
} }
return Ok(()); return Ok(());
@ -94,7 +93,7 @@ impl<'a> Justfile<'a> {
let argument_count = cmp::min(tail.len(), recipe.max_arguments()); let argument_count = cmp::min(tail.len(), recipe.max_arguments());
if !argument_range.range_contains(&argument_count) { if !argument_range.range_contains(&argument_count) {
return Err(RuntimeError::ArgumentCountMismatch { return Err(RuntimeError::ArgumentCountMismatch {
recipe: recipe.name, recipe: recipe.name(),
parameters: recipe.parameters.iter().collect(), parameters: recipe.parameters.iter().collect(),
found: tail.len(), found: tail.len(),
min: recipe.min_arguments(), min: recipe.min_arguments(),
@ -140,7 +139,7 @@ impl<'a> Justfile<'a> {
if let Some(recipe) = self.recipes.get(name) { if let Some(recipe) = self.recipes.get(name) {
Some(recipe) Some(recipe)
} else if let Some(alias) = self.aliases.get(name) { } else if let Some(alias) = self.aliases.get(name) {
self.recipes.get(alias.target) self.recipes.get(alias.target.lexeme())
} else { } else {
None None
} }
@ -155,12 +154,13 @@ impl<'a> Justfile<'a> {
ran: &mut BTreeSet<&'a str>, ran: &mut BTreeSet<&'a str>,
) -> RunResult<()> { ) -> RunResult<()> {
for dependency_name in &recipe.dependencies { for dependency_name in &recipe.dependencies {
if !ran.contains(dependency_name) { let lexeme = dependency_name.lexeme();
self.run_recipe(context, &self.recipes[dependency_name], &[], dotenv, ran)?; if !ran.contains(lexeme) {
self.run_recipe(context, &self.recipes[lexeme], &[], dotenv, ran)?;
} }
} }
recipe.run(context, arguments, dotenv, &self.exports)?; recipe.run(context, arguments, dotenv)?;
ran.insert(recipe.name); ran.insert(recipe.name());
Ok(()) Ok(())
} }
} }
@ -168,11 +168,11 @@ impl<'a> Justfile<'a> {
impl<'a> Display for Justfile<'a> { impl<'a> Display for Justfile<'a> {
fn fmt(&self, f: &mut Formatter) -> Result<(), fmt::Error> { fn fmt(&self, f: &mut Formatter) -> Result<(), fmt::Error> {
let mut items = self.recipes.len() + self.assignments.len() + self.aliases.len(); let mut items = self.recipes.len() + self.assignments.len() + self.aliases.len();
for (name, expression) in &self.assignments { for (name, assignment) in &self.assignments {
if self.exports.contains(name) { if assignment.export {
write!(f, "export ")?; write!(f, "export ")?;
} }
write!(f, "{} := {}", name, expression)?; write!(f, "{} := {}", name, assignment.expression)?;
items -= 1; items -= 1;
if items != 0 { if items != 0 {
write!(f, "\n\n")?; write!(f, "\n\n")?;
@ -197,15 +197,15 @@ impl<'a> Display for Justfile<'a> {
} }
#[cfg(test)] #[cfg(test)]
mod test { mod tests {
use super::*; use super::*;
use crate::runtime_error::RuntimeError::*; use crate::runtime_error::RuntimeError::*;
use crate::testing::parse; use crate::testing::compile;
#[test] #[test]
fn unknown_recipes() { fn unknown_recipes() {
match parse("a:\nb:\nc:") match compile("a:\nb:\nc:")
.run(&["a", "x", "y", "z"], &Default::default()) .run(&["a", "x", "y", "z"], &Default::default())
.unwrap_err() .unwrap_err()
{ {
@ -238,7 +238,7 @@ a:
x x
"; ";
match parse(text).run(&["a"], &Default::default()).unwrap_err() { match compile(text).run(&["a"], &Default::default()).unwrap_err() {
Code { Code {
recipe, recipe,
line_number, line_number,
@ -254,7 +254,7 @@ a:
#[test] #[test]
fn code_error() { fn code_error() {
match parse("fail:\n @exit 100") match compile("fail:\n @exit 100")
.run(&["fail"], &Default::default()) .run(&["fail"], &Default::default())
.unwrap_err() .unwrap_err()
{ {
@ -277,7 +277,7 @@ a:
a return code: a return code:
@x() { {{return}} {{code + "0"}}; }; x"#; @x() { {{return}} {{code + "0"}}; }; x"#;
match parse(text) match compile(text)
.run(&["a", "return", "15"], &Default::default()) .run(&["a", "return", "15"], &Default::default())
.unwrap_err() .unwrap_err()
{ {
@ -296,7 +296,7 @@ a return code:
#[test] #[test]
fn missing_some_arguments() { fn missing_some_arguments() {
match parse("a b c d:") match compile("a b c d:")
.run(&["a", "b", "c"], &Default::default()) .run(&["a", "b", "c"], &Default::default())
.unwrap_err() .unwrap_err()
{ {
@ -307,7 +307,10 @@ a return code:
min, min,
max, max,
} => { } => {
let param_names = parameters.iter().map(|p| p.name).collect::<Vec<&str>>(); let param_names = parameters
.iter()
.map(|p| p.name.lexeme())
.collect::<Vec<&str>>();
assert_eq!(recipe, "a"); assert_eq!(recipe, "a");
assert_eq!(param_names, ["b", "c", "d"]); assert_eq!(param_names, ["b", "c", "d"]);
assert_eq!(found, 2); assert_eq!(found, 2);
@ -320,7 +323,7 @@ a return code:
#[test] #[test]
fn missing_some_arguments_variadic() { fn missing_some_arguments_variadic() {
match parse("a b c +d:") match compile("a b c +d:")
.run(&["a", "B", "C"], &Default::default()) .run(&["a", "B", "C"], &Default::default())
.unwrap_err() .unwrap_err()
{ {
@ -331,7 +334,10 @@ a return code:
min, min,
max, max,
} => { } => {
let param_names = parameters.iter().map(|p| p.name).collect::<Vec<&str>>(); let param_names = parameters
.iter()
.map(|p| p.name.lexeme())
.collect::<Vec<&str>>();
assert_eq!(recipe, "a"); assert_eq!(recipe, "a");
assert_eq!(param_names, ["b", "c", "d"]); assert_eq!(param_names, ["b", "c", "d"]);
assert_eq!(found, 2); assert_eq!(found, 2);
@ -344,7 +350,7 @@ a return code:
#[test] #[test]
fn missing_all_arguments() { fn missing_all_arguments() {
match parse("a b c d:\n echo {{b}}{{c}}{{d}}") match compile("a b c d:\n echo {{b}}{{c}}{{d}}")
.run(&["a"], &Default::default()) .run(&["a"], &Default::default())
.unwrap_err() .unwrap_err()
{ {
@ -355,7 +361,10 @@ a return code:
min, min,
max, max,
} => { } => {
let param_names = parameters.iter().map(|p| p.name).collect::<Vec<&str>>(); let param_names = parameters
.iter()
.map(|p| p.name.lexeme())
.collect::<Vec<&str>>();
assert_eq!(recipe, "a"); assert_eq!(recipe, "a");
assert_eq!(param_names, ["b", "c", "d"]); assert_eq!(param_names, ["b", "c", "d"]);
assert_eq!(found, 0); assert_eq!(found, 0);
@ -368,7 +377,7 @@ a return code:
#[test] #[test]
fn missing_some_defaults() { fn missing_some_defaults() {
match parse("a b c d='hello':") match compile("a b c d='hello':")
.run(&["a", "b"], &Default::default()) .run(&["a", "b"], &Default::default())
.unwrap_err() .unwrap_err()
{ {
@ -379,7 +388,10 @@ a return code:
min, min,
max, max,
} => { } => {
let param_names = parameters.iter().map(|p| p.name).collect::<Vec<&str>>(); let param_names = parameters
.iter()
.map(|p| p.name.lexeme())
.collect::<Vec<&str>>();
assert_eq!(recipe, "a"); assert_eq!(recipe, "a");
assert_eq!(param_names, ["b", "c", "d"]); assert_eq!(param_names, ["b", "c", "d"]);
assert_eq!(found, 1); assert_eq!(found, 1);
@ -392,7 +404,7 @@ a return code:
#[test] #[test]
fn missing_all_defaults() { fn missing_all_defaults() {
match parse("a b c='r' d='h':") match compile("a b c='r' d='h':")
.run(&["a"], &Default::default()) .run(&["a"], &Default::default())
.unwrap_err() .unwrap_err()
{ {
@ -403,7 +415,10 @@ a return code:
min, min,
max, max,
} => { } => {
let param_names = parameters.iter().map(|p| p.name).collect::<Vec<&str>>(); let param_names = parameters
.iter()
.map(|p| p.name.lexeme())
.collect::<Vec<&str>>();
assert_eq!(recipe, "a"); assert_eq!(recipe, "a");
assert_eq!(param_names, ["b", "c", "d"]); assert_eq!(param_names, ["b", "c", "d"]);
assert_eq!(found, 0); assert_eq!(found, 0);
@ -419,7 +434,7 @@ a return code:
let mut config: Config = Default::default(); let mut config: Config = Default::default();
config.overrides.insert("foo", "bar"); config.overrides.insert("foo", "bar");
config.overrides.insert("baz", "bob"); config.overrides.insert("baz", "bob");
match parse("a:\n echo {{`f() { return 100; }; f`}}") match compile("a:\n echo {{`f() { return 100; }; f`}}")
.run(&["a"], &config) .run(&["a"], &config)
.unwrap_err() .unwrap_err()
{ {
@ -447,7 +462,7 @@ wut:
..Default::default() ..Default::default()
}; };
match parse(text).run(&["wut"], &config).unwrap_err() { match compile(text).run(&["wut"], &config).unwrap_err() {
Code { Code {
code: _, code: _,
line_number, line_number,
@ -459,4 +474,369 @@ wut:
other => panic!("expected a recipe code errror, but got: {}", other), other => panic!("expected a recipe code errror, but got: {}", other),
} }
} }
macro_rules! test {
($name:ident, $input:expr, $expected:expr $(,)*) => {
#[test]
fn $name() {
test($input, $expected);
}
};
}
fn test(input: &str, expected: &str) {
let justfile = compile(input);
let actual = format!("{:#}", justfile);
assert_eq!(actual, expected);
println!("Re-parsing...");
let reparsed = compile(&actual);
let redumped = format!("{:#}", reparsed);
assert_eq!(redumped, actual);
}
test! {
parse_empty,
"
# hello
",
"",
}
test! {
parse_string_default,
r#"
foo a="b\t":
"#,
r#"foo a="b\t":"#,
}
test! {
parse_multiple,
r#"
a:
b:
"#,
r#"a:
b:"#,
}
test! {
parse_variadic,
r#"
foo +a:
"#,
r#"foo +a:"#,
}
test! {
parse_variadic_string_default,
r#"
foo +a="Hello":
"#,
r#"foo +a="Hello":"#,
}
test! {
parse_raw_string_default,
r#"
foo a='b\t':
"#,
r#"foo a='b\t':"#,
}
test! {
parse_export,
r#"
export a := "hello"
"#,
r#"export a := "hello""#,
}
test! {
parse_alias_after_target,
r#"
foo:
echo a
alias f := foo
"#,
r#"alias f := foo
foo:
echo a"#
}
test! {
parse_alias_before_target,
r#"
alias f := foo
foo:
echo a
"#,
r#"alias f := foo
foo:
echo a"#
}
test! {
parse_alias_with_comment,
r#"
alias f := foo #comment
foo:
echo a
"#,
r#"alias f := foo
foo:
echo a"#
}
test! {
parse_complex,
"
x:
y:
z:
foo := \"xx\"
bar := foo
goodbye := \"y\"
hello a b c : x y z #hello
#! blah
#blarg
{{ foo + bar}}abc{{ goodbye\t + \"x\" }}xyz
1
2
3
",
"bar := foo
foo := \"xx\"
goodbye := \"y\"
hello a b c: x y z
#! blah
#blarg
{{foo + bar}}abc{{goodbye + \"x\"}}xyz
1
2
3
x:
y:
z:"
}
test! {
parse_shebang,
"
practicum := 'hello'
install:
\t#!/bin/sh
\tif [[ -f {{practicum}} ]]; then
\t\treturn
\tfi
",
"practicum := 'hello'
install:
#!/bin/sh
if [[ -f {{practicum}} ]]; then
\treturn
fi",
}
test! {
parse_simple_shebang,
"a:\n #!\n print(1)",
"a:\n #!\n print(1)",
}
test! {
parse_assignments,
r#"a := "0"
c := a + b + a + b
b := "1"
"#,
r#"a := "0"
b := "1"
c := a + b + a + b"#,
}
test! {
parse_assignment_backticks,
"a := `echo hello`
c := a + b + a + b
b := `echo goodbye`",
"a := `echo hello`
b := `echo goodbye`
c := a + b + a + b",
}
test! {
parse_interpolation_backticks,
r#"a:
echo {{ `echo hello` + "blarg" }} {{ `echo bob` }}"#,
r#"a:
echo {{`echo hello` + "blarg"}} {{`echo bob`}}"#,
}
test! {
eof_test,
"x:\ny:\nz:\na b c: x y z",
"a b c: x y z\n\nx:\n\ny:\n\nz:",
}
test! {
string_quote_escape,
r#"a := "hello\"""#,
r#"a := "hello\"""#,
}
test! {
string_escapes,
r#"a := "\n\t\r\"\\""#,
r#"a := "\n\t\r\"\\""#,
}
test! {
parameters,
"a b c:
{{b}} {{c}}",
"a b c:
{{b}} {{c}}",
}
test! {
unary_functions,
"
x := arch()
a:
{{os()}} {{os_family()}}",
"x := arch()
a:
{{os()}} {{os_family()}}",
}
test! {
env_functions,
r#"
x := env_var('foo',)
a:
{{env_var_or_default('foo' + 'bar', 'baz',)}} {{env_var(env_var("baz"))}}"#,
r#"x := env_var('foo')
a:
{{env_var_or_default('foo' + 'bar', 'baz')}} {{env_var(env_var("baz"))}}"#,
}
test! {
parameter_default_string,
r#"
f x="abc":
"#,
r#"f x="abc":"#,
}
test! {
parameter_default_raw_string,
r#"
f x='abc':
"#,
r#"f x='abc':"#,
}
test! {
parameter_default_backtick,
r#"
f x=`echo hello`:
"#,
r#"f x=`echo hello`:"#,
}
test! {
parameter_default_concatination_string,
r#"
f x=(`echo hello` + "foo"):
"#,
r#"f x=(`echo hello` + "foo"):"#,
}
test! {
parameter_default_concatination_variable,
r#"
x := "10"
f y=(`echo hello` + x) +z="foo":
"#,
r#"x := "10"
f y=(`echo hello` + x) +z="foo":"#,
}
test! {
parameter_default_multiple,
r#"
x := "10"
f y=(`echo hello` + x) +z=("foo" + "bar"):
"#,
r#"x := "10"
f y=(`echo hello` + x) +z=("foo" + "bar"):"#,
}
test! {
concatination_in_group,
"x := ('0' + '1')",
"x := ('0' + '1')",
}
test! {
string_in_group,
"x := ('0' )",
"x := ('0')",
}
#[rustfmt::skip]
test! {
escaped_dos_newlines,
"@spam:\r
\t{ \\\r
\t\tfiglet test; \\\r
\t\tcargo build --color always 2>&1; \\\r
\t\tcargo test --color always -- --color always 2>&1; \\\r
\t} | less\r
",
"@spam:
{ \\
\tfiglet test; \\
\tcargo build --color always 2>&1; \\
\tcargo test --color always -- --color always 2>&1; \\
} | less",
}
} }

3
src/keyed.rs Normal file
View File

@ -0,0 +1,3 @@
pub(crate) trait Keyed<'key> {
fn key(&self) -> &'key str;
}

2
src/keyword.rs Normal file
View File

@ -0,0 +1,2 @@
pub(crate) const ALIAS: &str = "alias";
pub(crate) const EXPORT: &str = "export";

View File

@ -5,12 +5,15 @@ use TokenKind::*;
/// Just language lexer /// Just language lexer
/// ///
/// `self.next` points to the next character to be lexed, and /// The lexer proceeds character-by-character, as opposed to using
/// the text between `self.token_start` and `self.token_end` contains /// regular expressions to lex tokens or semi-tokens at a time. As a
/// the current token being lexed. /// result, it is verbose and straightforward. Just used to have a
/// regex-based lexer, which was slower and generally godawful. However,
/// this should not be taken as a slight against regular expressions,
/// the lexer was just idiosyncratically bad.
pub(crate) struct Lexer<'a> { pub(crate) struct Lexer<'a> {
/// Source text /// Source text
text: &'a str, src: &'a str,
/// Char iterator /// Char iterator
chars: Chars<'a>, chars: Chars<'a>,
/// Tokens /// Tokens
@ -21,19 +24,19 @@ pub(crate) struct Lexer<'a> {
token_start: Position, token_start: Position,
/// Current token end /// Current token end
token_end: Position, token_end: Position,
/// Next character /// Next character to be lexed
next: Option<char>, next: Option<char>,
} }
impl<'a> Lexer<'a> { impl<'a> Lexer<'a> {
/// Lex `text` /// Lex `text`
pub(crate) fn lex(text: &str) -> CompilationResult<Vec<Token>> { pub(crate) fn lex(src: &str) -> CompilationResult<Vec<Token>> {
Lexer::new(text).tokenize() Lexer::new(src).tokenize()
} }
/// Create a new Lexer to lex `text` /// Create a new Lexer to lex `text`
fn new(text: &'a str) -> Lexer<'a> { fn new(src: &'a str) -> Lexer<'a> {
let mut chars = text.chars(); let mut chars = src.chars();
let next = chars.next(); let next = chars.next();
let start = Position { let start = Position {
@ -49,7 +52,7 @@ impl<'a> Lexer<'a> {
token_end: start, token_end: start,
chars, chars,
next, next,
text, src,
} }
} }
@ -82,7 +85,7 @@ impl<'a> Lexer<'a> {
/// Lexeme of in-progress token /// Lexeme of in-progress token
fn lexeme(&self) -> &'a str { fn lexeme(&self) -> &'a str {
&self.text[self.token_start.offset..self.token_end.offset] &self.src[self.token_start.offset..self.token_end.offset]
} }
/// Length of current token /// Length of current token
@ -102,7 +105,7 @@ impl<'a> Lexer<'a> {
/// Un-lexed text /// Un-lexed text
fn rest(&self) -> &'a str { fn rest(&self) -> &'a str {
&self.text[self.token_end.offset..] &self.src[self.token_end.offset..]
} }
/// Check if unlexed text begins with prefix /// Check if unlexed text begins with prefix
@ -145,7 +148,7 @@ impl<'a> Lexer<'a> {
offset: self.token_start.offset, offset: self.token_start.offset,
column: self.token_start.column, column: self.token_start.column,
line: self.token_start.line, line: self.token_start.line,
text: self.text, src: self.src,
length: self.token_end.offset - self.token_start.offset, length: self.token_end.offset - self.token_start.offset,
kind, kind,
}); });
@ -158,7 +161,7 @@ impl<'a> Lexer<'a> {
fn internal_error(&self, message: impl Into<String>) -> CompilationError<'a> { fn internal_error(&self, message: impl Into<String>) -> CompilationError<'a> {
// Use `self.token_end` as the location of the error // Use `self.token_end` as the location of the error
CompilationError { CompilationError {
text: self.text, src: self.src,
offset: self.token_end.offset, offset: self.token_end.offset,
line: self.token_end.line, line: self.token_end.line,
column: self.token_end.column, column: self.token_end.column,
@ -184,7 +187,7 @@ impl<'a> Lexer<'a> {
}; };
CompilationError { CompilationError {
text: self.text, src: self.src,
offset: self.token_start.offset, offset: self.token_start.offset,
line: self.token_start.line, line: self.token_start.line,
column: self.token_start.column, column: self.token_start.column,
@ -198,7 +201,7 @@ impl<'a> Lexer<'a> {
interpolation_start: Position, interpolation_start: Position,
) -> CompilationError<'a> { ) -> CompilationError<'a> {
CompilationError { CompilationError {
text: self.text, src: self.src,
offset: interpolation_start.offset, offset: interpolation_start.offset,
line: interpolation_start.line, line: interpolation_start.line,
column: interpolation_start.column, column: interpolation_start.column,
@ -359,7 +362,7 @@ impl<'a> Lexer<'a> {
' ' | '\t' => self.lex_whitespace(), ' ' | '\t' => self.lex_whitespace(),
'\'' => self.lex_raw_string(), '\'' => self.lex_raw_string(),
'"' => self.lex_cooked_string(), '"' => self.lex_cooked_string(),
'a'..='z' | 'A'..='Z' | '_' => self.lex_name(), 'a'..='z' | 'A'..='Z' | '_' => self.lex_identifier(),
_ => { _ => {
self.advance()?; self.advance()?;
Err(self.error(UnknownStartOfToken)) Err(self.error(UnknownStartOfToken))
@ -446,7 +449,6 @@ impl<'a> Lexer<'a> {
/// Lex token beginning with `start` in indented state /// Lex token beginning with `start` in indented state
fn lex_indented(&mut self) -> CompilationResult<'a, ()> { fn lex_indented(&mut self) -> CompilationResult<'a, ()> {
self.state.push(State::Text); self.state.push(State::Text);
self.token(Line);
Ok(()) Ok(())
} }
@ -513,8 +515,8 @@ impl<'a> Lexer<'a> {
self.lex_double(Eol) self.lex_double(Eol)
} }
/// Lex name: [a-zA-Z_][a-zA-Z0-9_]* /// Lex identifier: [a-zA-Z_][a-zA-Z0-9_]*
fn lex_name(&mut self) -> CompilationResult<'a, ()> { fn lex_identifier(&mut self) -> CompilationResult<'a, ()> {
while self while self
.next .next
.map(|c| c.is_ascii_alphanumeric() || c == '-' || c == '_') .map(|c| c.is_ascii_alphanumeric() || c == '-' || c == '_')
@ -523,7 +525,7 @@ impl<'a> Lexer<'a> {
self.advance()?; self.advance()?;
} }
self.token(Name); self.token(Identifier);
Ok(()) Ok(())
} }
@ -725,19 +727,61 @@ mod tests {
Whitespace => " ", Whitespace => " ",
// Empty lexemes // Empty lexemes
Line | Dedent | Eof => "", Dedent | Eof => "",
// Variable lexemes // Variable lexemes
Text | StringCooked | StringRaw | Name | Comment | Backtick => { Text | StringCooked | StringRaw | Identifier | Comment | Backtick => {
panic!("Token {:?} has no default lexeme", kind) panic!("Token {:?} has no default lexeme", kind)
} }
} }
} }
macro_rules! error {
(
name: $name:ident,
input: $input:expr,
offset: $offset:expr,
line: $line:expr,
column: $column:expr,
width: $width:expr,
kind: $kind:expr,
) => {
#[test]
fn $name() {
error($input, $offset, $line, $column, $width, $kind);
}
};
}
fn error(
src: &str,
offset: usize,
line: usize,
column: usize,
width: usize,
kind: CompilationErrorKind,
) {
let expected = CompilationError {
src,
offset,
line,
column,
width,
kind,
};
match Lexer::lex(src) {
Ok(_) => panic!("Lexing succeeded but expected: {}\n{}", expected, src),
Err(actual) => {
assert_eq!(actual, expected);
}
}
}
test! { test! {
name: name_new, name: name_new,
text: "foo", text: "foo",
tokens: (Name:"foo"), tokens: (Identifier:"foo"),
} }
test! { test! {
@ -768,9 +812,9 @@ mod tests {
name: export_concatination, name: export_concatination,
text: "export foo = 'foo' + 'bar'", text: "export foo = 'foo' + 'bar'",
tokens: ( tokens: (
Name:"export", Identifier:"export",
Whitespace, Whitespace,
Name:"foo", Identifier:"foo",
Whitespace, Whitespace,
Equals, Equals,
Whitespace, Whitespace,
@ -786,9 +830,9 @@ mod tests {
name: export_complex, name: export_complex,
text: "export foo = ('foo' + 'bar') + `baz`", text: "export foo = ('foo' + 'bar') + `baz`",
tokens: ( tokens: (
Name:"export", Identifier:"export",
Whitespace, Whitespace,
Name:"foo", Identifier:"foo",
Whitespace, Whitespace,
Equals, Equals,
Whitespace, Whitespace,
@ -821,7 +865,7 @@ mod tests {
test! { test! {
name: indented_line, name: indented_line,
text: "foo:\n a", text: "foo:\n a",
tokens: (Name:"foo", Colon, Eol, Indent:" ", Line, Text:"a", Dedent), tokens: (Identifier:"foo", Colon, Eol, Indent:" ", Text:"a", Dedent),
} }
test! { test! {
@ -833,19 +877,16 @@ mod tests {
c c
", ",
tokens: ( tokens: (
Name:"foo", Identifier:"foo",
Colon, Colon,
Eol, Eol,
Indent, Indent,
Line,
Text:"a", Text:"a",
Eol, Eol,
Whitespace:" ", Whitespace:" ",
Line,
Text:"b", Text:"b",
Eol, Eol,
Whitespace:" ", Whitespace:" ",
Line,
Text:"c", Text:"c",
Eol, Eol,
Dedent, Dedent,
@ -860,15 +901,14 @@ mod tests {
b: b:
", ",
tokens: ( tokens: (
Name:"foo", Identifier:"foo",
Colon, Colon,
Eol, Eol,
Indent, Indent,
Line,
Text:"a", Text:"a",
Eol, Eol,
Dedent, Dedent,
Name:"b", Identifier:"b",
Colon, Colon,
Eol, Eol,
) )
@ -883,17 +923,15 @@ mod tests {
b: b:
", ",
tokens: ( tokens: (
Name:"foo", Identifier:"foo",
Colon, Colon,
Eol, Eol,
Indent:" ", Indent:" ",
Line,
Text:"a", Text:"a",
Eol, Eol,
Line,
Eol, Eol,
Dedent, Dedent,
Name:"b", Identifier:"b",
Colon, Colon,
Eol, Eol,
), ),
@ -903,11 +941,10 @@ mod tests {
name: indented_line_containing_unpaired_carriage_return, name: indented_line_containing_unpaired_carriage_return,
text: "foo:\n \r \n", text: "foo:\n \r \n",
tokens: ( tokens: (
Name:"foo", Identifier:"foo",
Colon, Colon,
Eol, Eol,
Indent:" ", Indent:" ",
Line,
Text:"\r ", Text:"\r ",
Eol, Eol,
Dedent, Dedent,
@ -931,51 +968,43 @@ mod tests {
@mv b c @mv b c
", ",
tokens: ( tokens: (
Name:"b", Identifier:"b",
Colon, Colon,
Whitespace, Whitespace,
Name:"a", Identifier:"a",
Eol, Eol,
Indent, Indent,
Line,
Text:"@mv a b", Text:"@mv a b",
Eol, Eol,
Line,
Eol, Eol,
Dedent, Dedent,
Name:"a", Identifier:"a",
Colon, Colon,
Eol, Eol,
Indent, Indent,
Line,
Text:"@touch F", Text:"@touch F",
Eol, Eol,
Whitespace:" ", Whitespace:" ",
Line,
Text:"@touch a", Text:"@touch a",
Eol, Eol,
Line,
Eol, Eol,
Dedent, Dedent,
Name:"d", Identifier:"d",
Colon, Colon,
Whitespace, Whitespace,
Name:"c", Identifier:"c",
Eol, Eol,
Indent, Indent,
Line,
Text:"@rm c", Text:"@rm c",
Eol, Eol,
Line,
Eol, Eol,
Dedent, Dedent,
Name:"c", Identifier:"c",
Colon, Colon,
Whitespace, Whitespace,
Name:"b", Identifier:"b",
Eol, Eol,
Indent, Indent,
Line,
Text:"@mv b c", Text:"@mv b c",
Eol, Eol,
Dedent Dedent
@ -986,11 +1015,10 @@ mod tests {
name: interpolation_empty, name: interpolation_empty,
text: "hello:\n echo {{}}", text: "hello:\n echo {{}}",
tokens: ( tokens: (
Name:"hello", Identifier:"hello",
Colon, Colon,
Eol, Eol,
Indent:" ", Indent:" ",
Line,
Text:"echo ", Text:"echo ",
InterpolationStart, InterpolationStart,
InterpolationEnd, InterpolationEnd,
@ -1002,11 +1030,10 @@ mod tests {
name: interpolation_expression, name: interpolation_expression,
text: "hello:\n echo {{`echo hello` + `echo goodbye`}}", text: "hello:\n echo {{`echo hello` + `echo goodbye`}}",
tokens: ( tokens: (
Name:"hello", Identifier:"hello",
Colon, Colon,
Eol, Eol,
Indent:" ", Indent:" ",
Line,
Text:"echo ", Text:"echo ",
InterpolationStart, InterpolationStart,
Backtick:"`echo hello`", Backtick:"`echo hello`",
@ -1028,13 +1055,13 @@ mod tests {
test123 test123
", ",
tokens: ( tokens: (
Name:"foo", Identifier:"foo",
Eol, Eol,
Name:"bar-bob", Identifier:"bar-bob",
Eol, Eol,
Name:"b-bob_asdfAAAA", Identifier:"b-bob_asdfAAAA",
Eol, Eol,
Name:"test123", Identifier:"test123",
Eol, Eol,
), ),
} }
@ -1043,11 +1070,10 @@ mod tests {
name: tokenize_indented_line, name: tokenize_indented_line,
text: "foo:\n a", text: "foo:\n a",
tokens: ( tokens: (
Name:"foo", Identifier:"foo",
Colon, Colon,
Eol, Eol,
Indent:" ", Indent:" ",
Line,
Text:"a", Text:"a",
Dedent, Dedent,
), ),
@ -1062,19 +1088,16 @@ mod tests {
c c
", ",
tokens: ( tokens: (
Name:"foo", Identifier:"foo",
Colon, Colon,
Eol, Eol,
Indent, Indent,
Line,
Text:"a", Text:"a",
Eol, Eol,
Whitespace:" ", Whitespace:" ",
Line,
Text:"b", Text:"b",
Eol, Eol,
Whitespace:" ", Whitespace:" ",
Line,
Text:"c", Text:"c",
Eol, Eol,
Dedent, Dedent,
@ -1085,7 +1108,7 @@ mod tests {
name: tokenize_strings, name: tokenize_strings,
text: r#"a = "'a'" + '"b"' + "'c'" + '"d"'#echo hello"#, text: r#"a = "'a'" + '"b"' + "'c'" + '"d"'#echo hello"#,
tokens: ( tokens: (
Name:"a", Identifier:"a",
Whitespace, Whitespace,
Equals, Equals,
Whitespace, Whitespace,
@ -1113,15 +1136,14 @@ mod tests {
{{hello}} {{hello}}
", ",
tokens: ( tokens: (
Name:"foo", Identifier:"foo",
Colon, Colon,
Whitespace, Whitespace,
Comment:"# some comment", Comment:"# some comment",
Eol, Eol,
Indent:" ", Indent:" ",
Line,
InterpolationStart, InterpolationStart,
Name:"hello", Identifier:"hello",
InterpolationEnd, InterpolationEnd,
Eol, Eol,
Dedent Dedent
@ -1135,15 +1157,14 @@ mod tests {
# another comment # another comment
", ",
tokens: ( tokens: (
Name:"foo", Identifier:"foo",
Colon, Colon,
Whitespace, Whitespace,
Comment:"# more comments", Comment:"# more comments",
Eol, Eol,
Indent:" ", Indent:" ",
Line,
InterpolationStart, InterpolationStart,
Name:"hello", Identifier:"hello",
InterpolationEnd, InterpolationEnd,
Eol, Eol,
Dedent, Dedent,
@ -1156,19 +1177,18 @@ mod tests {
name: tokenize_recipe_complex_interpolation_expression, name: tokenize_recipe_complex_interpolation_expression,
text: "foo: #lol\n {{a + b + \"z\" + blarg}}", text: "foo: #lol\n {{a + b + \"z\" + blarg}}",
tokens: ( tokens: (
Name:"foo", Identifier:"foo",
Colon, Colon,
Whitespace:" ", Whitespace:" ",
Comment:"#lol", Comment:"#lol",
Eol, Eol,
Indent:" ", Indent:" ",
Line,
InterpolationStart, InterpolationStart,
Name:"a", Identifier:"a",
Whitespace, Whitespace,
Plus, Plus,
Whitespace, Whitespace,
Name:"b", Identifier:"b",
Whitespace, Whitespace,
Plus, Plus,
Whitespace, Whitespace,
@ -1176,7 +1196,7 @@ mod tests {
Whitespace, Whitespace,
Plus, Plus,
Whitespace, Whitespace,
Name:"blarg", Identifier:"blarg",
InterpolationEnd, InterpolationEnd,
Dedent, Dedent,
), ),
@ -1186,23 +1206,22 @@ mod tests {
name: tokenize_recipe_multiple_interpolations, name: tokenize_recipe_multiple_interpolations,
text: "foo:,#ok\n {{a}}0{{b}}1{{c}}", text: "foo:,#ok\n {{a}}0{{b}}1{{c}}",
tokens: ( tokens: (
Name:"foo", Identifier:"foo",
Colon, Colon,
Comma, Comma,
Comment:"#ok", Comment:"#ok",
Eol, Eol,
Indent:" ", Indent:" ",
Line,
InterpolationStart, InterpolationStart,
Name:"a", Identifier:"a",
InterpolationEnd, InterpolationEnd,
Text:"0", Text:"0",
InterpolationStart, InterpolationStart,
Name:"b", Identifier:"b",
InterpolationEnd, InterpolationEnd,
Text:"1", Text:"1",
InterpolationStart, InterpolationStart,
Name:"c", Identifier:"c",
InterpolationEnd, InterpolationEnd,
Dedent, Dedent,
@ -1217,24 +1236,24 @@ mod tests {
hello blah blah blah : a b c #whatever hello blah blah blah : a b c #whatever
", ",
tokens: ( tokens: (
Name:"bob", Identifier:"bob",
Eol, Eol,
Eol, Eol,
Name:"hello", Identifier:"hello",
Whitespace, Whitespace,
Name:"blah", Identifier:"blah",
Whitespace, Whitespace,
Name:"blah", Identifier:"blah",
Whitespace, Whitespace,
Name:"blah", Identifier:"blah",
Whitespace, Whitespace,
Colon, Colon,
Whitespace, Whitespace,
Name:"a", Identifier:"a",
Whitespace, Whitespace,
Name:"b", Identifier:"b",
Whitespace, Whitespace,
Name:"c", Identifier:"c",
Whitespace, Whitespace,
Comment:"#whatever", Comment:"#whatever",
Eol, Eol,
@ -1260,30 +1279,23 @@ mod tests {
Eol, Eol,
Comment:"# this does something", Comment:"# this does something",
Eol, Eol,
Name:"hello", Identifier:"hello",
Colon, Colon,
Eol, Eol,
Indent, Indent,
Line,
Text:"asdf", Text:"asdf",
Eol, Eol,
Whitespace:" ", Whitespace:" ",
Line,
Text:"bsdf", Text:"bsdf",
Eol, Eol,
Line,
Eol, Eol,
Whitespace:" ", Whitespace:" ",
Line,
Text:"csdf", Text:"csdf",
Eol, Eol,
Line,
Eol, Eol,
Whitespace:" ", Whitespace:" ",
Line,
Text:"dsdf # whatever", Text:"dsdf # whatever",
Eol, Eol,
Line,
Eol, Eol,
Dedent, Dedent,
Comment:"# yolo", Comment:"# yolo",
@ -1302,18 +1314,17 @@ mod tests {
tokens: ( tokens: (
Comment:"#", Comment:"#",
Eol, Eol,
Name:"A", Identifier:"A",
Equals, Equals,
StringRaw:"'1'", StringRaw:"'1'",
Eol, Eol,
Name:"echo", Identifier:"echo",
Colon, Colon,
Eol, Eol,
Indent, Indent,
Line,
Text:"echo ", Text:"echo ",
InterpolationStart, InterpolationStart,
Name:"A", Identifier:"A",
InterpolationEnd, InterpolationEnd,
Eol, Eol,
Dedent, Dedent,
@ -1324,11 +1335,10 @@ mod tests {
name: tokenize_interpolation_backticks, name: tokenize_interpolation_backticks,
text: "hello:\n echo {{`echo hello` + `echo goodbye`}}", text: "hello:\n echo {{`echo hello` + `echo goodbye`}}",
tokens: ( tokens: (
Name:"hello", Identifier:"hello",
Colon, Colon,
Eol, Eol,
Indent:" ", Indent:" ",
Line,
Text:"echo ", Text:"echo ",
InterpolationStart, InterpolationStart,
Backtick:"`echo hello`", Backtick:"`echo hello`",
@ -1345,11 +1355,10 @@ mod tests {
name: tokenize_empty_interpolation, name: tokenize_empty_interpolation,
text: "hello:\n echo {{}}", text: "hello:\n echo {{}}",
tokens: ( tokens: (
Name:"hello", Identifier:"hello",
Colon, Colon,
Eol, Eol,
Indent:" ", Indent:" ",
Line,
Text:"echo ", Text:"echo ",
InterpolationStart, InterpolationStart,
InterpolationEnd, InterpolationEnd,
@ -1361,7 +1370,7 @@ mod tests {
name: tokenize_assignment_backticks, name: tokenize_assignment_backticks,
text: "a = `echo hello` + `echo goodbye`", text: "a = `echo hello` + `echo goodbye`",
tokens: ( tokens: (
Name:"a", Identifier:"a",
Whitespace, Whitespace,
Equals, Equals,
Whitespace, Whitespace,
@ -1392,42 +1401,33 @@ mod tests {
", ",
tokens: ( tokens: (
Eol, Eol,
Name:"hello", Identifier:"hello",
Colon, Colon,
Eol, Eol,
Indent, Indent,
Line,
Text:"a", Text:"a",
Eol, Eol,
Whitespace:" ", Whitespace:" ",
Line,
Text:"b", Text:"b",
Eol, Eol,
Line,
Eol, Eol,
Whitespace:" ", Whitespace:" ",
Line,
Text:"c", Text:"c",
Eol, Eol,
Line,
Eol, Eol,
Whitespace:" ", Whitespace:" ",
Line,
Text:"d", Text:"d",
Eol, Eol,
Line,
Eol, Eol,
Dedent, Dedent,
Comment:"# hello", Comment:"# hello",
Eol, Eol,
Name:"bob", Identifier:"bob",
Colon, Colon,
Eol, Eol,
Indent:" ", Indent:" ",
Line,
Text:"frank", Text:"frank",
Eol, Eol,
Line,
Eol, Eol,
Dedent, Dedent,
), ),
@ -1437,7 +1437,7 @@ mod tests {
name: tokenize_comment, name: tokenize_comment,
text: "a:=#", text: "a:=#",
tokens: ( tokens: (
Name:"a", Identifier:"a",
ColonEquals, ColonEquals,
Comment:"#", Comment:"#",
), ),
@ -1447,7 +1447,7 @@ mod tests {
name: tokenize_comment_with_bang, name: tokenize_comment_with_bang,
text: "a:=#foo!", text: "a:=#foo!",
tokens: ( tokens: (
Name:"a", Identifier:"a",
ColonEquals, ColonEquals,
Comment:"#foo!", Comment:"#foo!",
), ),
@ -1470,51 +1470,43 @@ mod tests {
@mv b c @mv b c
", ",
tokens: ( tokens: (
Name:"b", Identifier:"b",
Colon, Colon,
Whitespace, Whitespace,
Name:"a", Identifier:"a",
Eol, Eol,
Indent, Indent,
Line,
Text:"@mv a b", Text:"@mv a b",
Eol, Eol,
Line,
Eol, Eol,
Dedent, Dedent,
Name:"a", Identifier:"a",
Colon, Colon,
Eol, Eol,
Indent, Indent,
Line,
Text:"@touch F", Text:"@touch F",
Eol, Eol,
Whitespace:" ", Whitespace:" ",
Line,
Text:"@touch a", Text:"@touch a",
Eol, Eol,
Line,
Eol, Eol,
Dedent, Dedent,
Name:"d", Identifier:"d",
Colon, Colon,
Whitespace, Whitespace,
Name:"c", Identifier:"c",
Eol, Eol,
Indent, Indent,
Line,
Text:"@rm c", Text:"@rm c",
Eol, Eol,
Line,
Eol, Eol,
Dedent, Dedent,
Name:"c", Identifier:"c",
Colon, Colon,
Whitespace, Whitespace,
Name:"b", Identifier:"b",
Eol, Eol,
Indent, Indent,
Line,
Text:"@mv b c", Text:"@mv b c",
Eol, Eol,
Dedent, Dedent,
@ -1533,7 +1525,7 @@ mod tests {
ParenR, ParenR,
Whitespace, Whitespace,
ParenR, ParenR,
Name:"abc", Identifier:"abc",
ParenL, ParenL,
Plus, Plus,
), ),
@ -1554,20 +1546,19 @@ mod tests {
name: multiple_recipes, name: multiple_recipes,
text: "a:\n foo\nb:", text: "a:\n foo\nb:",
tokens: ( tokens: (
Name:"a", Identifier:"a",
Colon, Colon,
Eol, Eol,
Indent:" ", Indent:" ",
Line,
Text:"foo", Text:"foo",
Eol, Eol,
Dedent, Dedent,
Name:"b", Identifier:"b",
Colon, Colon,
), ),
} }
error_test! { error! {
name: tokenize_space_then_tab, name: tokenize_space_then_tab,
input: "a: input: "a:
0 0
@ -1581,7 +1572,7 @@ mod tests {
kind: InconsistentLeadingWhitespace{expected: " ", found: "\t"}, kind: InconsistentLeadingWhitespace{expected: " ", found: "\t"},
} }
error_test! { error! {
name: tokenize_tabs_then_tab_space, name: tokenize_tabs_then_tab_space,
input: "a: input: "a:
\t\t0 \t\t0
@ -1595,7 +1586,7 @@ mod tests {
kind: InconsistentLeadingWhitespace{expected: "\t\t", found: "\t "}, kind: InconsistentLeadingWhitespace{expected: "\t\t", found: "\t "},
} }
error_test! { error! {
name: tokenize_unknown, name: tokenize_unknown,
input: "~", input: "~",
offset: 0, offset: 0,
@ -1605,7 +1596,7 @@ mod tests {
kind: UnknownStartOfToken, kind: UnknownStartOfToken,
} }
error_test! { error! {
name: unterminated_string_with_escapes, name: unterminated_string_with_escapes,
input: r#"a = "\n\t\r\"\\"#, input: r#"a = "\n\t\r\"\\"#,
offset: 4, offset: 4,
@ -1615,7 +1606,7 @@ mod tests {
kind: UnterminatedString, kind: UnterminatedString,
} }
error_test! { error! {
name: unterminated_raw_string, name: unterminated_raw_string,
input: "r a='asdf", input: "r a='asdf",
offset: 4, offset: 4,
@ -1625,7 +1616,7 @@ mod tests {
kind: UnterminatedString, kind: UnterminatedString,
} }
error_test! { error! {
name: unterminated_interpolation, name: unterminated_interpolation,
input: "foo:\n echo {{ input: "foo:\n echo {{
", ",
@ -1636,7 +1627,7 @@ mod tests {
kind: UnterminatedInterpolation, kind: UnterminatedInterpolation,
} }
error_test! { error! {
name: unterminated_backtick, name: unterminated_backtick,
input: "`echo", input: "`echo",
offset: 0, offset: 0,
@ -1646,7 +1637,7 @@ mod tests {
kind: UnterminatedBacktick, kind: UnterminatedBacktick,
} }
error_test! { error! {
name: unpaired_carriage_return, name: unpaired_carriage_return,
input: "foo\rbar", input: "foo\rbar",
offset: 3, offset: 3,
@ -1656,7 +1647,7 @@ mod tests {
kind: UnpairedCarriageReturn, kind: UnpairedCarriageReturn,
} }
error_test! { error! {
name: unknown_start_of_token_ampersand, name: unknown_start_of_token_ampersand,
input: " \r\n&", input: " \r\n&",
offset: 3, offset: 3,
@ -1666,7 +1657,7 @@ mod tests {
kind: UnknownStartOfToken, kind: UnknownStartOfToken,
} }
error_test! { error! {
name: unknown_start_of_token_tilde, name: unknown_start_of_token_tilde,
input: "~", input: "~",
offset: 0, offset: 0,
@ -1676,7 +1667,7 @@ mod tests {
kind: UnknownStartOfToken, kind: UnknownStartOfToken,
} }
error_test! { error! {
name: unterminated_string, name: unterminated_string,
input: r#"a = ""#, input: r#"a = ""#,
offset: 4, offset: 4,
@ -1686,7 +1677,7 @@ mod tests {
kind: UnterminatedString, kind: UnterminatedString,
} }
error_test! { error! {
name: mixed_leading_whitespace, name: mixed_leading_whitespace,
input: "a:\n\t echo hello", input: "a:\n\t echo hello",
offset: 3, offset: 3,
@ -1696,7 +1687,7 @@ mod tests {
kind: MixedLeadingWhitespace{whitespace: "\t "}, kind: MixedLeadingWhitespace{whitespace: "\t "},
} }
error_test! { error! {
name: unclosed_interpolation_delimiter, name: unclosed_interpolation_delimiter,
input: "a:\n echo {{ foo", input: "a:\n echo {{ foo",
offset: 9, offset: 9,

View File

@ -3,7 +3,14 @@ extern crate lazy_static;
#[cfg(test)] #[cfg(test)]
#[macro_use] #[macro_use]
mod testing; pub mod testing;
#[cfg(test)]
#[macro_use]
pub mod tree;
#[cfg(test)]
pub mod node;
#[cfg(fuzzing)] #[cfg(fuzzing)]
pub(crate) mod fuzzing; pub(crate) mod fuzzing;
@ -13,6 +20,8 @@ mod die;
mod alias; mod alias;
mod alias_resolver; mod alias_resolver;
mod analyzer;
mod assignment;
mod assignment_evaluator; mod assignment_evaluator;
mod assignment_resolver; mod assignment_resolver;
mod color; mod color;
@ -20,6 +29,8 @@ mod command_ext;
mod common; mod common;
mod compilation_error; mod compilation_error;
mod compilation_error_kind; mod compilation_error_kind;
mod compilation_result_ext;
mod compiler;
mod config; mod config;
mod config_error; mod config_error;
mod count; mod count;
@ -33,10 +44,16 @@ mod function_context;
mod functions; mod functions;
mod interrupt_guard; mod interrupt_guard;
mod interrupt_handler; mod interrupt_handler;
mod item;
mod justfile; mod justfile;
mod keyed;
mod keyword;
mod lexer; mod lexer;
mod line;
mod list; mod list;
mod load_dotenv; mod load_dotenv;
mod module;
mod name;
mod ordinal; mod ordinal;
mod output; mod output;
mod output_error; mod output_error;
@ -58,6 +75,7 @@ mod show_whitespace;
mod state; mod state;
mod string_literal; mod string_literal;
mod subcommand; mod subcommand;
mod table;
mod token; mod token;
mod token_kind; mod token_kind;
mod use_color; mod use_color;

28
src/line.rs Normal file
View File

@ -0,0 +1,28 @@
use crate::common::*;
/// A single line in a recipe body, consisting of any number of
/// `Fragment`s.
#[derive(Debug, PartialEq)]
pub(crate) struct Line<'src> {
pub(crate) fragments: Vec<Fragment<'src>>,
}
impl<'src> Line<'src> {
pub(crate) fn is_empty(&self) -> bool {
self.fragments.is_empty()
}
pub(crate) fn is_continuation(&self) -> bool {
match self.fragments.last() {
Some(Fragment::Text { token }) => token.lexeme().ends_with('\\'),
_ => false,
}
}
pub(crate) fn is_shebang(&self) -> bool {
match self.fragments.first() {
Some(Fragment::Text { token }) => token.lexeme().starts_with("#!"),
_ => false,
}
}
}

16
src/module.rs Normal file
View File

@ -0,0 +1,16 @@
use crate::common::*;
/// A module, the top-level type produced by the parser. So-named because
/// although at present, all justfiles consist of a single module, in the
/// future we will likely have multi-module and multi-file justfiles.
///
/// Not all successful parses result in valid justfiles, so additional
/// consistency checks and name resolution are performed by the `Analyzer`,
/// which produces a `Justfile` from a `Module`.
#[derive(Debug)]
pub(crate) struct Module<'src> {
/// Items in the justfile
pub(crate) items: Vec<Item<'src>>,
/// Non-fatal warnings encountered during parsing
pub(crate) warnings: Vec<Warning<'src>>,
}

52
src/name.rs Normal file
View File

@ -0,0 +1,52 @@
use crate::common::*;
/// A name. This is effectively just a `Token` of kind `Identifier`, but we
/// give it its own type for clarity.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd)]
pub(crate) struct Name<'src> {
pub(crate) offset: usize,
pub(crate) length: usize,
pub(crate) line: usize,
pub(crate) column: usize,
pub(crate) src: &'src str,
}
impl<'src> Name<'src> {
/// The name's text contents
pub(crate) fn lexeme(&self) -> &'src str {
&self.src[self.offset..self.offset + self.length]
}
/// Turn this name back into a token
pub(crate) fn token(&self) -> Token<'src> {
Token {
kind: TokenKind::Identifier,
offset: self.offset,
length: self.length,
line: self.line,
column: self.column,
src: self.src,
}
}
pub(crate) fn from_identifier(token: Token<'src>) -> Name {
assert_eq!(token.kind, TokenKind::Identifier);
Name {
offset: token.offset,
length: token.length,
line: token.line,
column: token.column,
src: token.src,
}
}
pub(crate) fn error(&self, kind: CompilationErrorKind<'src>) -> CompilationError<'src> {
self.token().error(kind)
}
}
impl Display for Name<'_> {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "{}", self.lexeme())
}
}

150
src/node.rs Normal file
View File

@ -0,0 +1,150 @@
use crate::common::*;
/// Methods commmon to all AST nodes. Currently only used in parser unit tests.
pub(crate) trait Node<'src> {
/// Construct an untyped tree of atoms representing this Node. This function,
/// and `Tree` type, are only used in parser unit tests.
fn tree(&self) -> Tree<'src>;
}
impl<'src> Node<'src> for Module<'src> {
fn tree(&self) -> Tree<'src> {
Tree::atom("justfile")
.extend(self.items.iter().map(|item| item.tree()))
.extend(self.warnings.iter().map(|warning| warning.tree()))
}
}
impl<'src> Node<'src> for Item<'src> {
fn tree(&self) -> Tree<'src> {
match self {
Item::Alias(alias) => alias.tree(),
Item::Assignment(assignment) => assignment.tree(),
Item::Recipe(recipe) => recipe.tree(),
}
}
}
impl<'src> Node<'src> for Alias<'src> {
fn tree(&self) -> Tree<'src> {
Tree::atom(keyword::ALIAS)
.push(self.name.lexeme())
.push(self.target.lexeme())
}
}
impl<'src> Node<'src> for Assignment<'src> {
fn tree(&self) -> Tree<'src> {
if self.export {
Tree::atom("assignment").push("#").push(keyword::EXPORT)
} else {
Tree::atom("assignment")
}
.push(self.name.lexeme())
.push(self.expression.tree())
}
}
impl<'src> Node<'src> for Expression<'src> {
fn tree(&self) -> Tree<'src> {
match self {
Expression::Concatination { lhs, rhs } => Tree::atom("+").push(lhs.tree()).push(rhs.tree()),
Expression::Call {
function,
arguments,
} => Tree::atom("call")
.push(function.lexeme())
.extend(arguments.iter().map(|argument| argument.tree())),
Expression::Variable { name } => Tree::atom(name.lexeme()),
Expression::StringLiteral {
string_literal: StringLiteral { cooked, .. },
} => Tree::string(cooked),
Expression::Backtick { contents, .. } => Tree::atom("backtick").push(Tree::string(contents)),
Expression::Group { contents } => Tree::List(vec![contents.tree()]),
}
}
}
impl<'src> Node<'src> for Recipe<'src> {
fn tree(&self) -> Tree<'src> {
let mut t = Tree::atom("recipe");
if self.quiet {
t.push_mut("#");
t.push_mut("quiet");
}
if let Some(doc) = self.doc {
t.push_mut(Tree::string(doc));
}
t.push_mut(self.name.lexeme());
if !self.parameters.is_empty() {
let mut params = Tree::atom("params");
for parameter in &self.parameters {
if parameter.variadic {
params.push_mut("+");
}
params.push_mut(parameter.tree());
}
t.push_mut(params);
}
if !self.dependencies.is_empty() {
t = t.push(
Tree::atom("deps").extend(
self
.dependencies
.iter()
.map(|dependency| dependency.lexeme()),
),
);
}
if !self.body.is_empty() {
t.push_mut(Tree::atom("body").extend(self.body.iter().map(|line| line.tree())));
}
t
}
}
impl<'src> Node<'src> for Parameter<'src> {
fn tree(&self) -> Tree<'src> {
let mut children = Vec::new();
children.push(Tree::atom(self.name.lexeme()));
if let Some(default) = &self.default {
children.push(default.tree());
}
Tree::List(children)
}
}
impl<'src> Node<'src> for Line<'src> {
fn tree(&self) -> Tree<'src> {
Tree::list(self.fragments.iter().map(|fragment| fragment.tree()))
}
}
impl<'src> Node<'src> for Fragment<'src> {
fn tree(&self) -> Tree<'src> {
match self {
Fragment::Text { token } => Tree::string(token.lexeme()),
Fragment::Interpolation { expression } => Tree::List(vec![expression.tree()]),
}
}
}
impl<'src> Node<'src> for Warning<'src> {
fn tree(&self) -> Tree<'src> {
match self {
Warning::DeprecatedEquals { .. } => Tree::atom("warning").push("deprecated_equals"),
}
}
}

View File

@ -25,4 +25,3 @@ impl Display for OutputError {
} }
} }
} }

View File

@ -1,20 +1,23 @@
use crate::common::*; use crate::common::*;
/// A single function parameter
#[derive(PartialEq, Debug)] #[derive(PartialEq, Debug)]
pub(crate) struct Parameter<'a> { pub(crate) struct Parameter<'src> {
pub(crate) default: Option<Expression<'a>>, /// The parameter name
pub(crate) name: &'a str, pub(crate) name: Name<'src>,
pub(crate) token: Token<'a>, /// Parameter is variadic
pub(crate) variadic: bool, pub(crate) variadic: bool,
/// An optional default expression
pub(crate) default: Option<Expression<'src>>,
} }
impl<'a> Display for Parameter<'a> { impl<'src> Display for Parameter<'src> {
fn fmt(&self, f: &mut Formatter) -> Result<(), fmt::Error> { fn fmt(&self, f: &mut Formatter) -> Result<(), fmt::Error> {
let color = Color::fmt(f); let color = Color::fmt(f);
if self.variadic { if self.variadic {
write!(f, "{}", color.annotation().paint("+"))?; write!(f, "{}", color.annotation().paint("+"))?;
} }
write!(f, "{}", color.parameter().paint(self.name))?; write!(f, "{}", color.parameter().paint(self.name.lexeme()))?;
if let Some(ref default) = self.default { if let Some(ref default) = self.default {
write!(f, "={}", color.string().paint(&default.to_string()))?; write!(f, "={}", color.string().paint(&default.to_string()))?;
} }

File diff suppressed because it is too large Load Diff

View File

@ -23,7 +23,7 @@ where
} }
#[cfg(test)] #[cfg(test)]
mod test { mod tests {
use super::*; use super::*;
#[test] #[test]

View File

@ -22,14 +22,13 @@ fn error_from_signal(
} }
} }
/// A recipe, e.g. `foo: bar baz`
#[derive(PartialEq, Debug)] #[derive(PartialEq, Debug)]
pub(crate) struct Recipe<'a> { pub(crate) struct Recipe<'a> {
pub(crate) dependencies: Vec<&'a str>, pub(crate) dependencies: Vec<Name<'a>>,
pub(crate) dependency_tokens: Vec<Token<'a>>,
pub(crate) doc: Option<&'a str>, pub(crate) doc: Option<&'a str>,
pub(crate) line_number: usize, pub(crate) body: Vec<Line<'a>>,
pub(crate) lines: Vec<Vec<Fragment<'a>>>, pub(crate) name: Name<'a>,
pub(crate) name: &'a str,
pub(crate) parameters: Vec<Parameter<'a>>, pub(crate) parameters: Vec<Parameter<'a>>,
pub(crate) private: bool, pub(crate) private: bool,
pub(crate) quiet: bool, pub(crate) quiet: bool,
@ -57,12 +56,19 @@ impl<'a> Recipe<'a> {
} }
} }
pub(crate) fn name(&self) -> &'a str {
self.name.lexeme()
}
pub(crate) fn line_number(&self) -> usize {
self.name.line
}
pub(crate) fn run( pub(crate) fn run(
&self, &self,
context: &RecipeContext<'a>, context: &RecipeContext<'a>,
arguments: &[&'a str], arguments: &[&'a str],
dotenv: &BTreeMap<String, String>, dotenv: &BTreeMap<String, String>,
exports: &BTreeSet<&'a str>,
) -> RunResult<'a, ()> { ) -> RunResult<'a, ()> {
let config = &context.config; let config = &context.config;
@ -88,7 +94,6 @@ impl<'a> Recipe<'a> {
scope: &context.scope, scope: &context.scope,
shell: config.shell, shell: config.shell,
dotenv, dotenv,
exports,
}; };
let mut rest = arguments; let mut rest = arguments;
@ -111,13 +116,13 @@ impl<'a> Recipe<'a> {
rest = &rest[1..]; rest = &rest[1..];
value value
}; };
argument_map.insert(parameter.name, value); argument_map.insert(parameter.name.lexeme(), value);
} }
if self.shebang { if self.shebang {
let mut evaluated_lines = vec![]; let mut evaluated_lines = vec![];
for line in &self.lines { for line in &self.body {
evaluated_lines.push(evaluator.evaluate_line(line, &argument_map)?); evaluated_lines.push(evaluator.evaluate_line(&line.fragments, &argument_map)?);
} }
if config.dry_run || self.quiet { if config.dry_run || self.quiet {
@ -134,14 +139,14 @@ impl<'a> Recipe<'a> {
.prefix("just") .prefix("just")
.tempdir() .tempdir()
.map_err(|error| RuntimeError::TmpdirIoError { .map_err(|error| RuntimeError::TmpdirIoError {
recipe: self.name, recipe: self.name(),
io_error: error, io_error: error,
})?; })?;
let mut path = tmp.path().to_path_buf(); let mut path = tmp.path().to_path_buf();
path.push(self.name); path.push(self.name());
{ {
let mut f = fs::File::create(&path).map_err(|error| RuntimeError::TmpdirIoError { let mut f = fs::File::create(&path).map_err(|error| RuntimeError::TmpdirIoError {
recipe: self.name, recipe: self.name(),
io_error: error, io_error: error,
})?; })?;
let mut text = String::new(); let mut text = String::new();
@ -151,7 +156,7 @@ impl<'a> Recipe<'a> {
// add blank lines so that lines in the generated script // add blank lines so that lines in the generated script
// have the same line number as the corresponding lines // have the same line number as the corresponding lines
// in the justfile // in the justfile
for _ in 1..(self.line_number + 2) { for _ in 1..(self.line_number() + 2) {
text += "\n" text += "\n"
} }
for line in &evaluated_lines[1..] { for line in &evaluated_lines[1..] {
@ -165,14 +170,14 @@ impl<'a> Recipe<'a> {
f.write_all(text.as_bytes()) f.write_all(text.as_bytes())
.map_err(|error| RuntimeError::TmpdirIoError { .map_err(|error| RuntimeError::TmpdirIoError {
recipe: self.name, recipe: self.name(),
io_error: error, io_error: error,
})?; })?;
} }
// make the script executable // make the script executable
Platform::set_execute_permission(&path).map_err(|error| RuntimeError::TmpdirIoError { Platform::set_execute_permission(&path).map_err(|error| RuntimeError::TmpdirIoError {
recipe: self.name, recipe: self.name(),
io_error: error, io_error: error,
})?; })?;
@ -193,12 +198,12 @@ impl<'a> Recipe<'a> {
let mut command = let mut command =
Platform::make_shebang_command(&path, interpreter, argument).map_err(|output_error| { Platform::make_shebang_command(&path, interpreter, argument).map_err(|output_error| {
RuntimeError::Cygpath { RuntimeError::Cygpath {
recipe: self.name, recipe: self.name(),
output_error, output_error,
} }
})?; })?;
command.export_environment_variables(&context.scope, dotenv, exports)?; command.export_environment_variables(&context.scope, dotenv)?;
// run it! // run it!
match InterruptHandler::guard(|| command.status()) { match InterruptHandler::guard(|| command.status()) {
@ -206,18 +211,18 @@ impl<'a> Recipe<'a> {
if let Some(code) = exit_status.code() { if let Some(code) = exit_status.code() {
if code != 0 { if code != 0 {
return Err(RuntimeError::Code { return Err(RuntimeError::Code {
recipe: self.name, recipe: self.name(),
line_number: None, line_number: None,
code, code,
}); });
} }
} else { } else {
return Err(error_from_signal(self.name, None, exit_status)); return Err(error_from_signal(self.name(), None, exit_status));
} }
} }
Err(io_error) => { Err(io_error) => {
return Err(RuntimeError::Shebang { return Err(RuntimeError::Shebang {
recipe: self.name, recipe: self.name(),
command: interpreter.to_string(), command: interpreter.to_string(),
argument: argument.map(String::from), argument: argument.map(String::from),
io_error, io_error,
@ -225,8 +230,8 @@ impl<'a> Recipe<'a> {
} }
}; };
} else { } else {
let mut lines = self.lines.iter().peekable(); let mut lines = self.body.iter().peekable();
let mut line_number = self.line_number + 1; let mut line_number = self.line_number() + 1;
loop { loop {
if lines.peek().is_none() { if lines.peek().is_none() {
break; break;
@ -238,8 +243,8 @@ impl<'a> Recipe<'a> {
} }
let line = lines.next().unwrap(); let line = lines.next().unwrap();
line_number += 1; line_number += 1;
evaluated += &evaluator.evaluate_line(line, &argument_map)?; evaluated += &evaluator.evaluate_line(&line.fragments, &argument_map)?;
if line.last().map(Fragment::continuation).unwrap_or(false) { if line.is_continuation() {
evaluated.pop(); evaluated.pop();
} else { } else {
break; break;
@ -280,25 +285,29 @@ impl<'a> Recipe<'a> {
cmd.stdout(Stdio::null()); cmd.stdout(Stdio::null());
} }
cmd.export_environment_variables(&context.scope, dotenv, exports)?; cmd.export_environment_variables(&context.scope, dotenv)?;
match InterruptHandler::guard(|| cmd.status()) { match InterruptHandler::guard(|| cmd.status()) {
Ok(exit_status) => { Ok(exit_status) => {
if let Some(code) = exit_status.code() { if let Some(code) = exit_status.code() {
if code != 0 { if code != 0 {
return Err(RuntimeError::Code { return Err(RuntimeError::Code {
recipe: self.name, recipe: self.name(),
line_number: Some(line_number), line_number: Some(line_number),
code, code,
}); });
} }
} else { } else {
return Err(error_from_signal(self.name, Some(line_number), exit_status)); return Err(error_from_signal(
self.name(),
Some(line_number),
exit_status,
));
} }
} }
Err(io_error) => { Err(io_error) => {
return Err(RuntimeError::IoError { return Err(RuntimeError::IoError {
recipe: self.name, recipe: self.name(),
io_error, io_error,
}); });
} }
@ -309,6 +318,12 @@ impl<'a> Recipe<'a> {
} }
} }
impl<'src> Keyed<'src> for Recipe<'src> {
fn key(&self) -> &'src str {
self.name.lexeme()
}
}
impl<'a> Display for Recipe<'a> { impl<'a> Display for Recipe<'a> {
fn fmt(&self, f: &mut Formatter) -> Result<(), fmt::Error> { fn fmt(&self, f: &mut Formatter) -> Result<(), fmt::Error> {
if let Some(doc) = self.doc { if let Some(doc) = self.doc {
@ -329,20 +344,20 @@ impl<'a> Display for Recipe<'a> {
write!(f, " {}", dependency)?; write!(f, " {}", dependency)?;
} }
for (i, pieces) in self.lines.iter().enumerate() { for (i, line) in self.body.iter().enumerate() {
if i == 0 { if i == 0 {
writeln!(f)?; writeln!(f)?;
} }
for (j, piece) in pieces.iter().enumerate() { for (j, fragment) in line.fragments.iter().enumerate() {
if j == 0 { if j == 0 {
write!(f, " ")?; write!(f, " ")?;
} }
match *piece { match fragment {
Fragment::Text { ref text } => write!(f, "{}", text.lexeme())?, Fragment::Text { token } => write!(f, "{}", token.lexeme())?,
Fragment::Expression { ref expression, .. } => write!(f, "{{{{{}}}}}", expression)?, Fragment::Interpolation { expression, .. } => write!(f, "{{{{{}}}}}", expression)?,
} }
} }
if i + 1 < self.lines.len() { if i + 1 < self.body.len() {
writeln!(f)?; writeln!(f)?;
} }
} }

View File

@ -2,5 +2,5 @@ use crate::common::*;
pub(crate) struct RecipeContext<'a> { pub(crate) struct RecipeContext<'a> {
pub(crate) config: &'a Config<'a>, pub(crate) config: &'a Config<'a>,
pub(crate) scope: BTreeMap<&'a str, String>, pub(crate) scope: BTreeMap<&'a str, (bool, String)>,
} }

View File

@ -17,22 +17,19 @@ pub(crate) struct RecipeResolver<'a: 'b, 'b> {
seen: BTreeSet<&'a str>, seen: BTreeSet<&'a str>,
resolved: BTreeSet<&'a str>, resolved: BTreeSet<&'a str>,
recipes: &'b BTreeMap<&'a str, Recipe<'a>>, recipes: &'b BTreeMap<&'a str, Recipe<'a>>,
assignments: &'b BTreeMap<&'a str, Expression<'a>>, assignments: &'b BTreeMap<&'a str, Assignment<'a>>,
text: &'a str,
} }
impl<'a, 'b> RecipeResolver<'a, 'b> { impl<'a, 'b> RecipeResolver<'a, 'b> {
pub(crate) fn resolve_recipes( pub(crate) fn resolve_recipes(
recipes: &BTreeMap<&'a str, Recipe<'a>>, recipes: &BTreeMap<&'a str, Recipe<'a>>,
assignments: &BTreeMap<&'a str, Expression<'a>>, assignments: &BTreeMap<&'a str, Assignment<'a>>,
text: &'a str,
) -> CompilationResult<'a, ()> { ) -> CompilationResult<'a, ()> {
let mut resolver = RecipeResolver { let mut resolver = RecipeResolver {
seen: empty(), seen: empty(),
stack: empty(), stack: empty(),
resolved: empty(), resolved: empty(),
assignments, assignments,
text,
recipes, recipes,
}; };
@ -48,19 +45,19 @@ impl<'a, 'b> RecipeResolver<'a, 'b> {
resolver.resolve_function(function, argc)?; resolver.resolve_function(function, argc)?;
} }
for variable in expression.variables() { for variable in expression.variables() {
resolver.resolve_variable(variable, &[])?; resolver.resolve_variable(&variable, &[])?;
} }
} }
} }
for line in &recipe.lines { for line in &recipe.body {
for fragment in line { for fragment in &line.fragments {
if let Fragment::Expression { ref expression, .. } = *fragment { if let Fragment::Interpolation { expression, .. } = fragment {
for (function, argc) in expression.functions() { for (function, argc) in expression.functions() {
resolver.resolve_function(function, argc)?; resolver.resolve_function(function, argc)?;
} }
for variable in expression.variables() { for variable in expression.variables() {
resolver.resolve_variable(variable, &recipe.parameters)?; resolver.resolve_variable(&variable, &recipe.parameters)?;
} }
} }
} }
@ -70,27 +67,27 @@ impl<'a, 'b> RecipeResolver<'a, 'b> {
Ok(()) Ok(())
} }
fn resolve_function(&self, function: &Token, argc: usize) -> CompilationResult<'a, ()> { fn resolve_function(&self, function: Token<'a>, argc: usize) -> CompilationResult<'a, ()> {
Function::resolve(function, argc).map_err(|error| CompilationError { Function::resolve(&function, argc).map_err(|error| CompilationError {
offset: error.offset, offset: error.offset,
line: error.line, line: error.line,
column: error.column, column: error.column,
width: error.width, width: error.width,
kind: UnknownFunction { kind: UnknownFunction {
function: &self.text[error.offset..error.offset + error.width], function: &function.src[error.offset..error.offset + error.width],
}, },
text: self.text, src: function.src,
}) })
} }
fn resolve_variable( fn resolve_variable(
&self, &self,
variable: &Token, variable: &Token<'a>,
parameters: &[Parameter], parameters: &[Parameter],
) -> CompilationResult<'a, ()> { ) -> CompilationResult<'a, ()> {
let name = variable.lexeme(); let name = variable.lexeme();
let undefined = let undefined =
!self.assignments.contains_key(name) && !parameters.iter().any(|p| p.name == name); !self.assignments.contains_key(name) && !parameters.iter().any(|p| p.name.lexeme() == name);
if undefined { if undefined {
let error = variable.error(UndefinedVariable { variable: name }); let error = variable.error(UndefinedVariable { variable: name });
return Err(CompilationError { return Err(CompilationError {
@ -99,9 +96,9 @@ impl<'a, 'b> RecipeResolver<'a, 'b> {
column: error.column, column: error.column,
width: error.width, width: error.width,
kind: UndefinedVariable { kind: UndefinedVariable {
variable: &self.text[error.offset..error.offset + error.width], variable: &variable.src[error.offset..error.offset + error.width],
}, },
text: self.text, src: variable.src,
}); });
} }
@ -109,25 +106,29 @@ impl<'a, 'b> RecipeResolver<'a, 'b> {
} }
fn resolve_recipe(&mut self, recipe: &Recipe<'a>) -> CompilationResult<'a, ()> { fn resolve_recipe(&mut self, recipe: &Recipe<'a>) -> CompilationResult<'a, ()> {
if self.resolved.contains(recipe.name) { if self.resolved.contains(recipe.name()) {
return Ok(()); return Ok(());
} }
self.stack.push(recipe.name); self.stack.push(recipe.name());
self.seen.insert(recipe.name); self.seen.insert(recipe.name());
for dependency_token in &recipe.dependency_tokens { for dependency_token in recipe
.dependencies
.iter()
.map(|dependency| dependency.token())
{
match self.recipes.get(dependency_token.lexeme()) { match self.recipes.get(dependency_token.lexeme()) {
Some(dependency) => { Some(dependency) => {
if !self.resolved.contains(dependency.name) { if !self.resolved.contains(dependency.name()) {
if self.seen.contains(dependency.name) { if self.seen.contains(dependency.name()) {
let first = self.stack[0]; let first = self.stack[0];
self.stack.push(first); self.stack.push(first);
return Err( return Err(
dependency_token.error(CircularRecipeDependency { dependency_token.error(CircularRecipeDependency {
recipe: recipe.name, recipe: recipe.name(),
circle: self circle: self
.stack .stack
.iter() .iter()
.skip_while(|name| **name != dependency.name) .skip_while(|name| **name != dependency.name())
.cloned() .cloned()
.collect(), .collect(),
}), }),
@ -138,23 +139,23 @@ impl<'a, 'b> RecipeResolver<'a, 'b> {
} }
None => { None => {
return Err(dependency_token.error(UnknownDependency { return Err(dependency_token.error(UnknownDependency {
recipe: recipe.name, recipe: recipe.name(),
unknown: dependency_token.lexeme(), unknown: dependency_token.lexeme(),
})); }));
} }
} }
} }
self.resolved.insert(recipe.name); self.resolved.insert(recipe.name());
self.stack.pop(); self.stack.pop();
Ok(()) Ok(())
} }
} }
#[cfg(test)] #[cfg(test)]
mod test { mod tests {
use super::*; use super::*;
error_test! { analysis_error! {
name: circular_recipe_dependency, name: circular_recipe_dependency,
input: "a: b\nb: a", input: "a: b\nb: a",
offset: 8, offset: 8,
@ -164,7 +165,7 @@ mod test {
kind: CircularRecipeDependency{recipe: "b", circle: vec!["a", "b", "a"]}, kind: CircularRecipeDependency{recipe: "b", circle: vec!["a", "b", "a"]},
} }
error_test! { analysis_error! {
name: self_recipe_dependency, name: self_recipe_dependency,
input: "a: a", input: "a: a",
offset: 3, offset: 3,
@ -174,7 +175,7 @@ mod test {
kind: CircularRecipeDependency{recipe: "a", circle: vec!["a", "a"]}, kind: CircularRecipeDependency{recipe: "a", circle: vec!["a", "a"]},
} }
error_test! { analysis_error! {
name: unknown_dependency, name: unknown_dependency,
input: "a: b", input: "a: b",
offset: 3, offset: 3,
@ -184,7 +185,7 @@ mod test {
kind: UnknownDependency{recipe: "a", unknown: "b"}, kind: UnknownDependency{recipe: "a", unknown: "b"},
} }
error_test! { analysis_error! {
name: unknown_interpolation_variable, name: unknown_interpolation_variable,
input: "x:\n {{ hello}}", input: "x:\n {{ hello}}",
offset: 9, offset: 9,
@ -194,7 +195,7 @@ mod test {
kind: UndefinedVariable{variable: "hello"}, kind: UndefinedVariable{variable: "hello"},
} }
error_test! { analysis_error! {
name: unknown_second_interpolation_variable, name: unknown_second_interpolation_variable,
input: "wtf=\"x\"\nx:\n echo\n foo {{wtf}} {{ lol }}", input: "wtf=\"x\"\nx:\n echo\n foo {{wtf}} {{ lol }}",
offset: 33, offset: 33,
@ -204,7 +205,7 @@ mod test {
kind: UndefinedVariable{variable: "lol"}, kind: UndefinedVariable{variable: "lol"},
} }
error_test! { analysis_error! {
name: unknown_function_in_interpolation, name: unknown_function_in_interpolation,
input: "a:\n echo {{bar()}}", input: "a:\n echo {{bar()}}",
offset: 11, offset: 11,
@ -214,7 +215,7 @@ mod test {
kind: UnknownFunction{function: "bar"}, kind: UnknownFunction{function: "bar"},
} }
error_test! { analysis_error! {
name: unknown_function_in_default, name: unknown_function_in_default,
input: "a f=baz():", input: "a f=baz():",
offset: 4, offset: 4,
@ -224,7 +225,7 @@ mod test {
kind: UnknownFunction{function: "baz"}, kind: UnknownFunction{function: "baz"},
} }
error_test! { analysis_error! {
name: unknown_variable_in_default, name: unknown_variable_in_default,
input: "a f=foo:", input: "a f=foo:",
offset: 4, offset: 4,

View File

@ -131,7 +131,7 @@ pub fn run() -> Result<(), i32> {
} }
} }
let justfile = match Parser::parse(&text) { let justfile = match Compiler::compile(&text) {
Err(error) => { Err(error) => {
if config.color.stderr().active() { if config.color.stderr().active() {
eprintln!("{:#}", error); eprintln!("{:#}", error);
@ -177,15 +177,15 @@ pub fn run() -> Result<(), i32> {
// Construct a target to alias map. // Construct a target to alias map.
let mut recipe_aliases: BTreeMap<&str, Vec<&str>> = BTreeMap::new(); let mut recipe_aliases: BTreeMap<&str, Vec<&str>> = BTreeMap::new();
for alias in justfile.aliases.values() { for alias in justfile.aliases.values() {
if alias.private { if alias.is_private() {
continue; continue;
} }
if !recipe_aliases.contains_key(alias.target) { if !recipe_aliases.contains_key(alias.target.lexeme()) {
recipe_aliases.insert(alias.target, vec![alias.name]); recipe_aliases.insert(alias.target.lexeme(), vec![alias.name.lexeme()]);
} else { } else {
let aliases = recipe_aliases.get_mut(alias.target).unwrap(); let aliases = recipe_aliases.get_mut(alias.target.lexeme()).unwrap();
aliases.push(alias.name); aliases.push(alias.name.lexeme());
} }
} }
@ -262,7 +262,7 @@ pub fn run() -> Result<(), i32> {
if let Subcommand::Show { name } = config.subcommand { if let Subcommand::Show { name } = config.subcommand {
if let Some(alias) = justfile.get_alias(name) { if let Some(alias) = justfile.get_alias(name) {
let recipe = justfile.get_recipe(alias.target).unwrap(); let recipe = justfile.get_recipe(alias.target.lexeme()).unwrap();
println!("{}", alias); println!("{}", alias);
println!("{}", recipe); println!("{}", recipe);
return Ok(()); return Ok(());
@ -291,7 +291,7 @@ pub fn run() -> Result<(), i32> {
Count("argument", min_arguments), Count("argument", min_arguments),
); );
} }
vec![recipe.name] vec![recipe.name()]
} else { } else {
die!("Justfile contains no recipes."); die!("Justfile contains no recipes.");
}; };

View File

@ -26,7 +26,7 @@ pub(crate) enum RuntimeError<'a> {
dotenv_error: dotenv::Error, dotenv_error: dotenv::Error,
}, },
FunctionCall { FunctionCall {
token: Token<'a>, function: Name<'a>,
message: String, message: String,
}, },
Internal { Internal {
@ -91,7 +91,7 @@ impl<'a> Display for RuntimeError<'a> {
let message = color.message(); let message = color.message();
write!(f, "{} {}", error.paint("error:"), message.prefix())?; write!(f, "{} {}", error.paint("error:"), message.prefix())?;
let mut error_token = None; let mut error_token: Option<Token> = None;
match *self { match *self {
UnknownRecipes { UnknownRecipes {
@ -235,16 +235,16 @@ impl<'a> Display for RuntimeError<'a> {
writeln!(f, "Failed to load .env: {}", dotenv_error)?; writeln!(f, "Failed to load .env: {}", dotenv_error)?;
} }
FunctionCall { FunctionCall {
ref token, ref function,
ref message, ref message,
} => { } => {
writeln!( writeln!(
f, f,
"Call to function `{}` failed: {}", "Call to function `{}` failed: {}",
token.lexeme(), function.lexeme(),
message message
)?; )?;
error_token = Some(token); error_token = Some(function.token());
} }
Shebang { Shebang {
recipe, recipe,
@ -332,15 +332,15 @@ impl<'a> Display for RuntimeError<'a> {
} => match *output_error { } => match *output_error {
OutputError::Code(code) => { OutputError::Code(code) => {
writeln!(f, "Backtick failed with exit code {}", code)?; writeln!(f, "Backtick failed with exit code {}", code)?;
error_token = Some(token); error_token = Some(*token);
} }
OutputError::Signal(signal) => { OutputError::Signal(signal) => {
writeln!(f, "Backtick was terminated by signal {}", signal)?; writeln!(f, "Backtick was terminated by signal {}", signal)?;
error_token = Some(token); error_token = Some(*token);
} }
OutputError::Unknown => { OutputError::Unknown => {
writeln!(f, "Backtick failed for an unknown reason")?; writeln!(f, "Backtick failed for an unknown reason")?;
error_token = Some(token); error_token = Some(*token);
} }
OutputError::Io(ref io_error) => { OutputError::Io(ref io_error) => {
match io_error.kind() { match io_error.kind() {
@ -361,7 +361,7 @@ impl<'a> Display for RuntimeError<'a> {
io_error io_error
), ),
}?; }?;
error_token = Some(token); error_token = Some(*token);
} }
OutputError::Utf8(ref utf8_error) => { OutputError::Utf8(ref utf8_error) => {
writeln!( writeln!(
@ -369,7 +369,7 @@ impl<'a> Display for RuntimeError<'a> {
"Backtick succeeded but stdout was not utf8: {}", "Backtick succeeded but stdout was not utf8: {}",
utf8_error utf8_error
)?; )?;
error_token = Some(token); error_token = Some(*token);
} }
}, },
Internal { ref message } => { Internal { ref message } => {
@ -388,7 +388,7 @@ impl<'a> Display for RuntimeError<'a> {
write_message_context( write_message_context(
f, f,
Color::fmt(f).error(), Color::fmt(f).error(),
token.text, token.src,
token.offset, token.offset,
token.line, token.line,
token.column, token.column,

View File

@ -31,7 +31,7 @@ impl<'a> Shebang<'a> {
} }
#[cfg(test)] #[cfg(test)]
mod test { mod tests {
use super::Shebang; use super::Shebang;
#[test] #[test]

View File

@ -1,59 +1,12 @@
use crate::common::*; use crate::common::*;
#[derive(PartialEq, Debug)] #[derive(PartialEq, Debug)]
pub(crate) struct StringLiteral<'a> { pub(crate) struct StringLiteral<'src> {
pub(crate) raw: &'a str, pub(crate) raw: &'src str,
pub(crate) cooked: Cow<'a, str>, pub(crate) cooked: Cow<'src, str>,
} }
impl<'a> StringLiteral<'a> { impl Display for StringLiteral<'_> {
pub(crate) fn new(token: &Token<'a>) -> CompilationResult<'a, StringLiteral<'a>> {
let raw = &token.lexeme()[1..token.lexeme().len() - 1];
if let TokenKind::StringRaw = token.kind {
Ok(StringLiteral {
cooked: Cow::Borrowed(raw),
raw,
})
} else if let TokenKind::StringCooked = token.kind {
let mut cooked = String::new();
let mut escape = false;
for c in raw.chars() {
if escape {
match c {
'n' => cooked.push('\n'),
'r' => cooked.push('\r'),
't' => cooked.push('\t'),
'\\' => cooked.push('\\'),
'"' => cooked.push('"'),
other => {
return Err(
token.error(CompilationErrorKind::InvalidEscapeSequence { character: other }),
);
}
}
escape = false;
continue;
}
if c == '\\' {
escape = true;
continue;
}
cooked.push(c);
}
Ok(StringLiteral {
raw,
cooked: Cow::Owned(cooked),
})
} else {
Err(token.error(CompilationErrorKind::Internal {
message: "cook_string() called on non-string token".to_string(),
}))
}
}
}
impl<'a> Display for StringLiteral<'a> {
fn fmt(&self, f: &mut Formatter) -> fmt::Result { fn fmt(&self, f: &mut Formatter) -> fmt::Result {
match self.cooked { match self.cooked {
Cow::Borrowed(raw) => write!(f, "'{}'", raw), Cow::Borrowed(raw) => write!(f, "'{}'", raw),

View File

@ -18,12 +18,19 @@ use std::{
path::Path, path::Path,
}; };
use crate::{expression, fragment, justfile::Justfile, parameter, parser::Parser, recipe}; use crate::compiler::Compiler;
mod full {
pub(crate) use crate::{
assignment::Assignment, expression::Expression, fragment::Fragment, justfile::Justfile,
line::Line, parameter::Parameter, recipe::Recipe,
};
}
pub fn summary(path: &Path) -> Result<Result<Summary, String>, io::Error> { pub fn summary(path: &Path) -> Result<Result<Summary, String>, io::Error> {
let text = fs::read_to_string(path)?; let text = fs::read_to_string(path)?;
match Parser::parse(&text) { match Compiler::compile(&text) {
Ok(justfile) => Ok(Ok(Summary::new(justfile))), Ok(justfile) => Ok(Ok(Summary::new(justfile))),
Err(compilation_error) => Ok(Err(compilation_error.to_string())), Err(compilation_error) => Ok(Err(compilation_error.to_string())),
} }
@ -36,14 +43,12 @@ pub struct Summary {
} }
impl Summary { impl Summary {
fn new(justfile: Justfile) -> Summary { fn new(justfile: full::Justfile) -> Summary {
let exports = justfile.exports;
let mut aliases = BTreeMap::new(); let mut aliases = BTreeMap::new();
for alias in justfile.aliases.values() { for alias in justfile.aliases.values() {
aliases aliases
.entry(alias.target) .entry(alias.target.lexeme())
.or_insert_with(Vec::new) .or_insert_with(Vec::new)
.push(alias.name.to_string()); .push(alias.name.to_string());
} }
@ -62,12 +67,7 @@ impl Summary {
assignments: justfile assignments: justfile
.assignments .assignments
.into_iter() .into_iter()
.map(|(name, expression)| { .map(|(name, assignment)| (name.to_string(), Assignment::new(assignment)))
(
name.to_string(),
Assignment::new(name, expression, &exports),
)
})
.collect(), .collect(),
} }
} }
@ -85,13 +85,17 @@ pub struct Recipe {
} }
impl Recipe { impl Recipe {
fn new(recipe: recipe::Recipe, aliases: Vec<String>) -> Recipe { fn new(recipe: full::Recipe, aliases: Vec<String>) -> Recipe {
Recipe { Recipe {
private: recipe.private, private: recipe.private,
shebang: recipe.shebang, shebang: recipe.shebang,
quiet: recipe.quiet, quiet: recipe.quiet,
dependencies: recipe.dependencies.into_iter().map(str::to_owned).collect(), dependencies: recipe
lines: recipe.lines.into_iter().map(Line::new).collect(), .dependencies
.into_iter()
.map(|name| name.lexeme().to_string())
.collect(),
lines: recipe.body.into_iter().map(Line::new).collect(),
parameters: recipe.parameters.into_iter().map(Parameter::new).collect(), parameters: recipe.parameters.into_iter().map(Parameter::new).collect(),
aliases, aliases,
} }
@ -106,10 +110,10 @@ pub struct Parameter {
} }
impl Parameter { impl Parameter {
fn new(parameter: parameter::Parameter) -> Parameter { fn new(parameter: full::Parameter) -> Parameter {
Parameter { Parameter {
variadic: parameter.variadic, variadic: parameter.variadic,
name: parameter.name.to_owned(), name: parameter.name.lexeme().to_owned(),
default: parameter.default.map(Expression::new), default: parameter.default.map(Expression::new),
} }
} }
@ -121,9 +125,9 @@ pub struct Line {
} }
impl Line { impl Line {
fn new(fragments: Vec<fragment::Fragment>) -> Line { fn new(line: full::Line) -> Line {
Line { Line {
fragments: fragments.into_iter().map(Fragment::new).collect(), fragments: line.fragments.into_iter().map(Fragment::new).collect(),
} }
} }
} }
@ -135,12 +139,12 @@ pub enum Fragment {
} }
impl Fragment { impl Fragment {
fn new(fragment: fragment::Fragment) -> Fragment { fn new(fragment: full::Fragment) -> Fragment {
match fragment { match fragment {
fragment::Fragment::Text { text } => Fragment::Text { full::Fragment::Text { token } => Fragment::Text {
text: text.lexeme().to_owned(), text: token.lexeme().to_owned(),
}, },
fragment::Fragment::Expression { expression } => Fragment::Expression { full::Fragment::Interpolation { expression } => Fragment::Expression {
expression: Expression::new(expression), expression: Expression::new(expression),
}, },
} }
@ -154,10 +158,10 @@ pub struct Assignment {
} }
impl Assignment { impl Assignment {
fn new(name: &str, expression: expression::Expression, exports: &BTreeSet<&str>) -> Assignment { fn new(assignment: full::Assignment) -> Assignment {
Assignment { Assignment {
exported: exports.contains(name), exported: assignment.export,
expression: Expression::new(expression), expression: Expression::new(assignment.expression),
} }
} }
} }
@ -184,29 +188,30 @@ pub enum Expression {
} }
impl Expression { impl Expression {
fn new(expression: expression::Expression) -> Expression { fn new(expression: full::Expression) -> Expression {
use expression::Expression::*; use full::Expression::*;
match expression { match expression {
Backtick { raw, .. } => Expression::Backtick { Backtick { contents, .. } => Expression::Backtick {
command: raw.to_owned(), command: contents.to_owned(),
}, },
Call { Call {
name, arguments, .. function,
arguments,
} => Expression::Call { } => Expression::Call {
name: name.to_owned(), name: function.lexeme().to_owned(),
arguments: arguments.into_iter().map(Expression::new).collect(), arguments: arguments.into_iter().map(Expression::new).collect(),
}, },
Concatination { lhs, rhs } => Expression::Concatination { Concatination { lhs, rhs } => Expression::Concatination {
lhs: Box::new(Expression::new(*lhs)), lhs: Box::new(Expression::new(*lhs)),
rhs: Box::new(Expression::new(*rhs)), rhs: Box::new(Expression::new(*rhs)),
}, },
String { cooked_string } => Expression::String { StringLiteral { string_literal } => Expression::String {
text: cooked_string.cooked.to_string(), text: string_literal.cooked.to_string(),
}, },
Variable { name, .. } => Expression::Variable { Variable { name, .. } => Expression::Variable {
name: name.to_owned(), name: name.lexeme().to_owned(),
}, },
Group { expression } => Expression::new(*expression), Group { contents } => Expression::new(*contents),
} }
} }
} }

46
src/table.rs Normal file
View File

@ -0,0 +1,46 @@
use crate::common::*;
#[derive(Debug, PartialEq)]
pub(crate) struct Table<'key, V: Keyed<'key>> {
map: BTreeMap<&'key str, V>,
}
impl<'key, V: Keyed<'key>> Table<'key, V> {
pub(crate) fn insert(&mut self, value: V) {
self.map.insert(value.key(), value);
}
}
impl<'key, V: Keyed<'key>> FromIterator<V> for Table<'key, V> {
fn from_iter<I: IntoIterator<Item = V>>(iter: I) -> Self {
Table {
map: iter.into_iter().map(|value| (value.key(), value)).collect(),
}
}
}
impl<'key, V: Keyed<'key>> Deref for Table<'key, V> {
type Target = BTreeMap<&'key str, V>;
fn deref(&self) -> &Self::Target {
&self.map
}
}
impl<'key, V: Keyed<'key>> IntoIterator for Table<'key, V> {
type Item = (&'key str, V);
type IntoIter = std::collections::btree_map::IntoIter<&'key str, V>;
fn into_iter(self) -> std::collections::btree_map::IntoIter<&'key str, V> {
self.map.into_iter()
}
}
impl<'table, V: Keyed<'table> + 'table> IntoIterator for &'table Table<'table, V> {
type Item = (&'table &'table str, &'table V);
type IntoIter = std::collections::btree_map::Iter<'table, &'table str, V>;
fn into_iter(self) -> std::collections::btree_map::Iter<'table, &'table str, V> {
self.map.iter()
}
}

View File

@ -1,15 +1,15 @@
use crate::common::*; use crate::common::*;
pub(crate) fn parse(text: &str) -> Justfile { pub(crate) fn compile(text: &str) -> Justfile {
match Parser::parse(text) { match Compiler::compile(text) {
Ok(justfile) => justfile, Ok(justfile) => justfile,
Err(error) => panic!("Expected successful parse but got error:\n {}", error), Err(error) => panic!("Expected successful compilation but got error:\n {}", error),
} }
} }
pub(crate) use test_utilities::{tempdir, unindent}; pub(crate) use test_utilities::{tempdir, unindent};
macro_rules! error_test { macro_rules! analysis_error {
( (
name: $name:ident, name: $name:ident,
input: $input:expr, input: $input:expr,
@ -21,15 +21,21 @@ macro_rules! error_test {
) => { ) => {
#[test] #[test]
fn $name() { fn $name() {
let text: &str = $input; $crate::testing::error($input, $offset, $line, $column, $width, $kind);
let offset: usize = $offset; }
let column: usize = $column; };
let width: usize = $width; }
let line: usize = $line;
let kind: CompilationErrorKind = $kind;
pub(crate) fn error(
src: &str,
offset: usize,
line: usize,
column: usize,
width: usize,
kind: CompilationErrorKind,
) {
let expected = CompilationError { let expected = CompilationError {
text, src,
offset, offset,
line, line,
column, column,
@ -37,13 +43,39 @@ macro_rules! error_test {
kind, kind,
}; };
match Parser::parse(text) { let tokens = Lexer::lex(src).expect("Lexing failed in parse test...");
Ok(_) => panic!("Compilation succeeded but expected: {}\n{}", expected, text),
let module = Parser::parse(&tokens).expect("Parsing failed in analysis test...");
match Analyzer::analyze(module) {
Ok(_) => panic!("Analysis succeeded but expected: {}\n{}", expected, src),
Err(actual) => { Err(actual) => {
use pretty_assertions::assert_eq;
assert_eq!(actual, expected); assert_eq!(actual, expected);
} }
} }
} }
};
#[test]
fn readme_test() {
let mut justfiles = vec![];
let mut current = None;
for line in fs::read_to_string("README.adoc").unwrap().lines() {
if let Some(mut justfile) = current {
if line == "```" {
justfiles.push(justfile);
current = None;
} else {
justfile += line;
justfile += "\n";
current = Some(justfile);
}
} else if line == "```make" {
current = Some(String::new());
}
}
for justfile in justfiles {
compile(&justfile);
}
} }

View File

@ -1,18 +1,18 @@
use crate::common::*; use crate::common::*;
#[derive(Debug, PartialEq, Clone)] #[derive(Debug, PartialEq, Clone, Copy)]
pub(crate) struct Token<'a> { pub(crate) struct Token<'a> {
pub(crate) offset: usize, pub(crate) offset: usize,
pub(crate) length: usize, pub(crate) length: usize,
pub(crate) line: usize, pub(crate) line: usize,
pub(crate) column: usize, pub(crate) column: usize,
pub(crate) text: &'a str, pub(crate) src: &'a str,
pub(crate) kind: TokenKind, pub(crate) kind: TokenKind,
} }
impl<'a> Token<'a> { impl<'a> Token<'a> {
pub(crate) fn lexeme(&self) -> &'a str { pub(crate) fn lexeme(&self) -> &'a str {
&self.text[self.offset..self.offset + self.length] &self.src[self.offset..self.offset + self.length]
} }
pub(crate) fn error(&self, kind: CompilationErrorKind<'a>) -> CompilationError<'a> { pub(crate) fn error(&self, kind: CompilationErrorKind<'a>) -> CompilationError<'a> {
@ -20,7 +20,7 @@ impl<'a> Token<'a> {
column: self.column, column: self.column,
offset: self.offset, offset: self.offset,
line: self.line, line: self.line,
text: self.text, src: self.src,
width: self.length, width: self.length,
kind, kind,
} }

View File

@ -1,6 +1,6 @@
use crate::common::*; use crate::common::*;
#[derive(Debug, PartialEq, Clone, Copy)] #[derive(Debug, PartialEq, Clone, Copy, Ord, PartialOrd, Eq)]
pub(crate) enum TokenKind { pub(crate) enum TokenKind {
At, At,
Backtick, Backtick,
@ -12,16 +12,15 @@ pub(crate) enum TokenKind {
Eof, Eof,
Eol, Eol,
Equals, Equals,
Identifier,
Indent, Indent,
InterpolationEnd, InterpolationEnd,
InterpolationStart, InterpolationStart,
Line,
Name,
ParenL, ParenL,
ParenR, ParenR,
Plus, Plus,
StringRaw,
StringCooked, StringCooked,
StringRaw,
Text, Text,
Whitespace, Whitespace,
} }
@ -43,16 +42,15 @@ impl Display for TokenKind {
Eof => "end of file", Eof => "end of file",
Eol => "end of line", Eol => "end of line",
Equals => "'='", Equals => "'='",
Identifier => "identifier",
Indent => "indent", Indent => "indent",
InterpolationEnd => "'}}'", InterpolationEnd => "'}}'",
InterpolationStart => "'{{'", InterpolationStart => "'{{'",
Line => "command",
Name => "name",
ParenL => "'('", ParenL => "'('",
ParenR => "')'", ParenR => "')'",
Plus => "'+'", Plus => "'+'",
StringRaw => "raw string",
StringCooked => "cooked string", StringCooked => "cooked string",
StringRaw => "raw string",
Text => "command text", Text => "command text",
Whitespace => "whitespace", Whitespace => "whitespace",
} }

130
src/tree.rs Normal file
View File

@ -0,0 +1,130 @@
use crate::common::*;
use std::mem;
/// Construct a `Tree` from a symbolic expression literal. This macro, and the
/// Tree type, are only used in the Parser unit tests, as a concise notation
/// representing the expected results of parsing a given string.
macro_rules! tree {
{
($($child:tt)*)
} => {
$crate::tree::Tree::List(vec![$(tree!($child),)*])
};
{
$atom:ident
} => {
$crate::tree::Tree::atom(stringify!($atom))
};
{
$atom:literal
} => {
$crate::tree::Tree::atom(format!("\"{}\"", $atom))
};
{
#
} => {
$crate::tree::Tree::atom("#")
};
{
+
} => {
$crate::tree::Tree::atom("+")
};
}
/// A `Tree` is either…
#[derive(Debug, PartialEq)]
pub(crate) enum Tree<'text> {
/// …an atom containing text, or…
Atom(Cow<'text, str>),
/// …a list containing zero or more `Tree`s.
List(Vec<Tree<'text>>),
}
impl<'text> Tree<'text> {
/// Construct an Atom from a text scalar
pub(crate) fn atom(text: impl Into<Cow<'text, str>>) -> Tree<'text> {
Tree::Atom(text.into())
}
/// Construct a List from an iterable of trees
pub(crate) fn list(children: impl IntoIterator<Item = Tree<'text>>) -> Tree<'text> {
Tree::List(children.into_iter().collect())
}
/// Convenience function to create an atom containing quoted text
pub(crate) fn string(contents: impl AsRef<str>) -> Tree<'text> {
Tree::atom(format!("\"{}\"", contents.as_ref()))
}
/// Push a child node into self, turning it into a List if it was an Atom
pub(crate) fn push(self, tree: impl Into<Tree<'text>>) -> Tree<'text> {
match self {
Tree::List(mut children) => {
children.push(tree.into());
Tree::List(children)
}
Tree::Atom(text) => Tree::List(vec![Tree::Atom(text), tree.into()]),
}
}
/// Extend a self with a tail of Trees, turning self into a List if it
/// was an Atom
pub(crate) fn extend<I, T>(self, tail: I) -> Tree<'text>
where
I: IntoIterator<Item = T>,
T: Into<Tree<'text>>,
{
// Tree::List(children.into_iter().collect())
let mut head = match self {
Tree::List(children) => children,
Tree::Atom(text) => vec![Tree::Atom(text)],
};
for child in tail {
head.push(child.into());
}
Tree::List(head)
}
/// Like `push`, but modify self in-place
pub(crate) fn push_mut(&mut self, tree: impl Into<Tree<'text>>) {
let tree = mem::replace(self, Tree::List(Vec::new())).push(tree.into());
mem::replace(self, tree);
}
}
impl Display for Tree<'_> {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
match self {
Tree::List(children) => {
write!(f, "(")?;
for (i, child) in children.iter().enumerate() {
if i > 0 {
write!(f, " ")?;
}
write!(f, "{}", child)?;
}
write!(f, ")")
}
Tree::Atom(text) => write!(f, "{}", text),
}
}
}
impl<'text, T> From<T> for Tree<'text>
where
T: Into<Cow<'text, str>>,
{
fn from(text: T) -> Tree<'text> {
Tree::Atom(text.into())
}
}

View File

@ -1,32 +1,32 @@
use crate::common::*; use crate::common::*;
pub(crate) struct Variables<'a> { pub(crate) struct Variables<'expression, 'src> {
stack: Vec<&'a Expression<'a>>, stack: Vec<&'expression Expression<'src>>,
} }
impl<'a> Variables<'a> { impl<'expression, 'src> Variables<'expression, 'src> {
pub(crate) fn new(root: &'a Expression<'a>) -> Variables<'a> { pub(crate) fn new(root: &'expression Expression<'src>) -> Variables<'expression, 'src> {
Variables { stack: vec![root] } Variables { stack: vec![root] }
} }
} }
impl<'a> Iterator for Variables<'a> { impl<'expression, 'src> Iterator for Variables<'expression, 'src> {
type Item = &'a Token<'a>; type Item = Token<'src>;
fn next(&mut self) -> Option<&'a Token<'a>> { fn next(&mut self) -> Option<Token<'src>> {
match self.stack.pop() { match self.stack.pop() {
None None
| Some(Expression::String { .. }) | Some(Expression::StringLiteral { .. })
| Some(Expression::Backtick { .. }) | Some(Expression::Backtick { .. })
| Some(Expression::Call { .. }) => None, | Some(Expression::Call { .. }) => None,
Some(Expression::Variable { token, .. }) => Some(token), Some(Expression::Variable { name, .. }) => Some(name.token()),
Some(Expression::Concatination { lhs, rhs }) => { Some(Expression::Concatination { lhs, rhs }) => {
self.stack.push(lhs); self.stack.push(lhs);
self.stack.push(rhs); self.stack.push(rhs);
self.next() self.next()
} }
Some(Expression::Group { expression }) => { Some(Expression::Group { contents }) => {
self.stack.push(expression); self.stack.push(contents);
self.next() self.next()
} }
} }

View File

@ -2,13 +2,13 @@ use crate::common::*;
use Warning::*; use Warning::*;
#[derive(Debug)] #[derive(Debug, PartialEq)]
pub(crate) enum Warning<'a> { pub(crate) enum Warning<'src> {
DeprecatedEquals { equals: Token<'a> }, DeprecatedEquals { equals: Token<'src> },
} }
impl Warning<'_> { impl<'src> Warning<'src> {
fn context(&self) -> Option<&Token> { fn context(&self) -> Option<&Token<'src>> {
match self { match self {
DeprecatedEquals { equals } => Some(equals), DeprecatedEquals { equals } => Some(equals),
} }
@ -42,7 +42,7 @@ impl Display for Warning<'_> {
write_message_context( write_message_context(
f, f,
Color::fmt(f).warning(), Color::fmt(f).warning(),
token.text, token.src,
token.offset, token.offset,
token.line, token.line,
token.column, token.column,

View File

@ -1503,7 +1503,7 @@ integration_test! {
justfile: "foo: 'bar'", justfile: "foo: 'bar'",
args: ("foo"), args: ("foo"),
stdout: "", stdout: "",
stderr: "error: Expected name, end of line, or end of file, but found raw string stderr: "error: Expected end of file, end of line, or identifier, but found raw string
| |
1 | foo: 'bar' 1 | foo: 'bar'
| ^^^^^ | ^^^^^
@ -1516,7 +1516,7 @@ integration_test! {
justfile: "foo 'bar'", justfile: "foo 'bar'",
args: ("foo"), args: ("foo"),
stdout: "", stdout: "",
stderr: "error: Expected name, '+', ':', or ':=', but found raw string stderr: "error: Expected ':', ':=', identifier, or '+', but found raw string
| |
1 | foo 'bar' 1 | foo 'bar'
| ^^^^^ | ^^^^^