Use unstable rustfmt configuration options (#592)

This commit is contained in:
Casey Rodarmor 2020-02-10 20:07:06 -08:00 committed by GitHub
parent aceee3e217
commit 3ec7dea4a3
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
62 changed files with 569 additions and 550 deletions

2
.gitattributes vendored
View File

@ -1 +1 @@
Cargo.lock linguist-generated diff=nodiff * -text

View File

@ -20,7 +20,7 @@ jobs:
runs-on: ${{matrix.os}} runs-on: ${{matrix.os}}
steps: steps:
- uses: actions/checkout@v1 - uses: actions/checkout@v1
- name: Install - name: Install Main Toolchain
uses: actions-rs/toolchain@v1 uses: actions-rs/toolchain@v1
with: with:
toolchain: stable toolchain: stable
@ -42,8 +42,15 @@ jobs:
- name: Lint - name: Lint
if: matrix.os != 'windows-latest' if: matrix.os != 'windows-latest'
run: cargo run lint run: cargo run lint
- name: Install Rustfmt Toolchain
uses: actions-rs/toolchain@v1
with:
toolchain: nightly
target: ${{ matrix.target }}
profile: minimal
components: rustfmt
- name: Format - name: Format
run: cargo fmt --all -- --check run: cargo +nightly fmt --all -- --check
- name: Completion Scripts - name: Completion Scripts
if: matrix.os != 'windows-latest' if: matrix.os != 'windows-latest'
run: | run: |

View File

@ -33,6 +33,9 @@ build:
check: check:
cargo check cargo check
fmt:
cargo +nightly fmt --all
watch +COMMAND='test': watch +COMMAND='test':
cargo watch --clear --exec "{{COMMAND}}" cargo watch --clear --exec "{{COMMAND}}"

View File

@ -1,2 +1,22 @@
tab_spaces = 2 comment_width = 70
edition = "2018"
error_on_line_overflow = true
error_on_unformatted = true
format_code_in_doc_comments = true
format_macro_bodies = true
format_strings = true
match_arm_blocks = false
match_block_trailing_comma = true
max_width = 100 max_width = 100
merge_imports = true
newline_style = "Unix"
normalize_comments = true
overflow_delimited_expr = true
reorder_impl_items = true
required_version = "1.4.11"
struct_field_align_threshold = 20
tab_spaces = 2
unstable_features = true
use_field_init_shorthand = true
use_try_shorthand = true
wrap_comments = true

View File

@ -34,19 +34,19 @@ impl<'src> Analyzer<'src> {
Item::Alias(alias) => { Item::Alias(alias) => {
self.analyze_alias(&alias)?; self.analyze_alias(&alias)?;
self.aliases.insert(alias); self.aliases.insert(alias);
} },
Item::Assignment(assignment) => { Item::Assignment(assignment) => {
self.analyze_assignment(&assignment)?; self.analyze_assignment(&assignment)?;
self.assignments.insert(assignment); self.assignments.insert(assignment);
} },
Item::Recipe(recipe) => { Item::Recipe(recipe) => {
self.analyze_recipe(&recipe)?; self.analyze_recipe(&recipe)?;
self.recipes.insert(recipe); self.recipes.insert(recipe);
} },
Item::Set(set) => { Item::Set(set) => {
self.analyze_set(&set)?; self.analyze_set(&set)?;
self.sets.insert(set); self.sets.insert(set);
} },
} }
} }
@ -78,7 +78,7 @@ impl<'src> Analyzer<'src> {
Setting::Shell(shell) => { Setting::Shell(shell) => {
assert!(settings.shell.is_none()); assert!(settings.shell.is_none());
settings.shell = Some(shell); settings.shell = Some(shell);
} },
} }
} }

View File

@ -74,19 +74,19 @@ impl<'src: 'run, 'run> AssignmentResolver<'src, 'run> {
} else { } else {
Err(name.token().error(UndefinedVariable { variable })) Err(name.token().error(UndefinedVariable { variable }))
} }
} },
Expression::Call { thunk } => match thunk { Expression::Call { thunk } => match thunk {
Thunk::Nullary { .. } => Ok(()), Thunk::Nullary { .. } => Ok(()),
Thunk::Unary { arg, .. } => self.resolve_expression(arg), Thunk::Unary { arg, .. } => self.resolve_expression(arg),
Thunk::Binary { args: [a, b], .. } => { Thunk::Binary { args: [a, b], .. } => {
self.resolve_expression(a)?; self.resolve_expression(a)?;
self.resolve_expression(b) self.resolve_expression(b)
} },
}, },
Expression::Concatination { lhs, rhs } => { Expression::Concatination { lhs, rhs } => {
self.resolve_expression(lhs)?; self.resolve_expression(lhs)?;
self.resolve_expression(rhs) self.resolve_expression(rhs)
} },
Expression::StringLiteral { .. } | Expression::Backtick { .. } => Ok(()), Expression::StringLiteral { .. } | Expression::Backtick { .. } => Ok(()),
Expression::Group { contents } => self.resolve_expression(contents), Expression::Group { contents } => self.resolve_expression(contents),
} }

View File

@ -1,7 +1,6 @@
use crate::common::*; use crate::common::*;
use ansi_term::Color::*; use ansi_term::{ANSIGenericString, Color::*, Prefix, Style, Suffix};
use ansi_term::{ANSIGenericString, Prefix, Style, Suffix};
use atty::Stream; use atty::Stream;
#[derive(Copy, Clone, Debug, PartialEq)] #[derive(Copy, Clone, Debug, PartialEq)]

View File

@ -25,8 +25,8 @@ impl Display for CompilationError<'_> {
alias, alias,
recipe_line.ordinal(), recipe_line.ordinal(),
)?; )?;
} },
CircularRecipeDependency { recipe, ref circle } => { CircularRecipeDependency { recipe, ref circle } =>
if circle.len() == 2 { if circle.len() == 2 {
writeln!(f, "Recipe `{}` depends on itself", recipe)?; writeln!(f, "Recipe `{}` depends on itself", recipe)?;
} else { } else {
@ -36,12 +36,11 @@ impl Display for CompilationError<'_> {
recipe, recipe,
circle.join(" -> ") circle.join(" -> ")
)?; )?;
} },
}
CircularVariableDependency { CircularVariableDependency {
variable, variable,
ref circle, ref circle,
} => { } =>
if circle.len() == 2 { if circle.len() == 2 {
writeln!(f, "Variable `{}` is defined in terms of itself", variable)?; writeln!(f, "Variable `{}` is defined in terms of itself", variable)?;
} else { } else {
@ -51,8 +50,7 @@ impl Display for CompilationError<'_> {
variable, variable,
circle.join(" -> ") circle.join(" -> ")
)?; )?;
} },
}
InvalidEscapeSequence { character } => { InvalidEscapeSequence { character } => {
let representation = match character { let representation = match character {
@ -63,23 +61,23 @@ impl Display for CompilationError<'_> {
_ => character.escape_default().collect(), _ => character.escape_default().collect(),
}; };
writeln!(f, "`\\{}` is not a valid escape sequence", representation)?; writeln!(f, "`\\{}` is not a valid escape sequence", representation)?;
} },
DuplicateParameter { recipe, parameter } => { DuplicateParameter { recipe, parameter } => {
writeln!( writeln!(
f, f,
"Recipe `{}` has duplicate parameter `{}`", "Recipe `{}` has duplicate parameter `{}`",
recipe, parameter recipe, parameter
)?; )?;
} },
DuplicateVariable { variable } => { DuplicateVariable { variable } => {
writeln!(f, "Variable `{}` has multiple definitions", variable)?; writeln!(f, "Variable `{}` has multiple definitions", variable)?;
} },
UnexpectedToken { UnexpectedToken {
ref expected, ref expected,
found, found,
} => { } => {
writeln!(f, "Expected {}, but found {}", List::or(expected), found)?; writeln!(f, "Expected {}, but found {}", List::or(expected), found)?;
} },
DuplicateAlias { alias, first } => { DuplicateAlias { alias, first } => {
writeln!( writeln!(
f, f,
@ -88,7 +86,7 @@ impl Display for CompilationError<'_> {
first.ordinal(), first.ordinal(),
self.token.line.ordinal(), self.token.line.ordinal(),
)?; )?;
} },
DuplicateRecipe { recipe, first } => { DuplicateRecipe { recipe, first } => {
writeln!( writeln!(
f, f,
@ -97,7 +95,7 @@ impl Display for CompilationError<'_> {
first.ordinal(), first.ordinal(),
self.token.line.ordinal() self.token.line.ordinal()
)?; )?;
} },
DuplicateSet { setting, first } => { DuplicateSet { setting, first } => {
writeln!( writeln!(
f, f,
@ -106,7 +104,7 @@ impl Display for CompilationError<'_> {
first.ordinal(), first.ordinal(),
self.token.line.ordinal(), self.token.line.ordinal(),
)?; )?;
} },
DependencyArgumentCountMismatch { DependencyArgumentCountMismatch {
dependency, dependency,
found, found,
@ -129,35 +127,35 @@ impl Display for CompilationError<'_> {
} else { } else {
writeln!(f, "at most {} {}", max, Count("argument", max))?; writeln!(f, "at most {} {}", max, Count("argument", max))?;
} }
} },
ParameterShadowsVariable { parameter } => { ParameterShadowsVariable { parameter } => {
writeln!( writeln!(
f, f,
"Parameter `{}` shadows variable of the same name", "Parameter `{}` shadows variable of the same name",
parameter parameter
)?; )?;
} },
RequiredParameterFollowsDefaultParameter { parameter } => { RequiredParameterFollowsDefaultParameter { parameter } => {
writeln!( writeln!(
f, f,
"Non-default parameter `{}` follows default parameter", "Non-default parameter `{}` follows default parameter",
parameter parameter
)?; )?;
} },
ParameterFollowsVariadicParameter { parameter } => { ParameterFollowsVariadicParameter { parameter } => {
writeln!(f, "Parameter `{}` follows variadic parameter", parameter)?; writeln!(f, "Parameter `{}` follows variadic parameter", parameter)?;
} },
MixedLeadingWhitespace { whitespace } => { MixedLeadingWhitespace { whitespace } => {
writeln!( writeln!(
f, f,
"Found a mix of tabs and spaces in leading whitespace: `{}`\n\ "Found a mix of tabs and spaces in leading whitespace: `{}`\nLeading whitespace may \
Leading whitespace may consist of tabs or spaces, but not both", consist of tabs or spaces, but not both",
ShowWhitespace(whitespace) ShowWhitespace(whitespace)
)?; )?;
} },
ExtraLeadingWhitespace => { ExtraLeadingWhitespace => {
writeln!(f, "Recipe line has extra leading whitespace")?; writeln!(f, "Recipe line has extra leading whitespace")?;
} },
FunctionArgumentCountMismatch { FunctionArgumentCountMismatch {
function, function,
found, found,
@ -171,50 +169,50 @@ impl Display for CompilationError<'_> {
Count("argument", found), Count("argument", found),
expected expected
)?; )?;
} },
InconsistentLeadingWhitespace { expected, found } => { InconsistentLeadingWhitespace { expected, found } => {
writeln!( writeln!(
f, f,
"Recipe line has inconsistent leading whitespace. \ "Recipe line has inconsistent leading whitespace. Recipe started with `{}` but found \
Recipe started with `{}` but found line with `{}`", line with `{}`",
ShowWhitespace(expected), ShowWhitespace(expected),
ShowWhitespace(found) ShowWhitespace(found)
)?; )?;
} },
UnknownAliasTarget { alias, target } => { UnknownAliasTarget { alias, target } => {
writeln!(f, "Alias `{}` has an unknown target `{}`", alias, target)?; writeln!(f, "Alias `{}` has an unknown target `{}`", alias, target)?;
} },
UnknownDependency { recipe, unknown } => { UnknownDependency { recipe, unknown } => {
writeln!( writeln!(
f, f,
"Recipe `{}` has unknown dependency `{}`", "Recipe `{}` has unknown dependency `{}`",
recipe, unknown recipe, unknown
)?; )?;
} },
UndefinedVariable { variable } => { UndefinedVariable { variable } => {
writeln!(f, "Variable `{}` not defined", variable)?; writeln!(f, "Variable `{}` not defined", variable)?;
} },
UnknownFunction { function } => { UnknownFunction { function } => {
writeln!(f, "Call to unknown function `{}`", function)?; writeln!(f, "Call to unknown function `{}`", function)?;
} },
UnknownSetting { setting } => { UnknownSetting { setting } => {
writeln!(f, "Unknown setting `{}`", setting)?; writeln!(f, "Unknown setting `{}`", setting)?;
} },
UnknownStartOfToken => { UnknownStartOfToken => {
writeln!(f, "Unknown start of token:")?; writeln!(f, "Unknown start of token:")?;
} },
UnpairedCarriageReturn => { UnpairedCarriageReturn => {
writeln!(f, "Unpaired carriage return")?; writeln!(f, "Unpaired carriage return")?;
} },
UnterminatedInterpolation => { UnterminatedInterpolation => {
writeln!(f, "Unterminated interpolation")?; writeln!(f, "Unterminated interpolation")?;
} },
UnterminatedString => { UnterminatedString => {
writeln!(f, "Unterminated string")?; writeln!(f, "Unterminated string")?;
} },
UnterminatedBacktick => { UnterminatedBacktick => {
writeln!(f, "Unterminated backtick")?; writeln!(f, "Unterminated backtick")?;
} },
Internal { ref message } => { Internal { ref message } => {
writeln!( writeln!(
f, f,
@ -222,7 +220,7 @@ impl Display for CompilationError<'_> {
consider filing an issue: https://github.com/casey/just/issues/new", consider filing an issue: https://github.com/casey/just/issues/new",
message message
)?; )?;
} },
} }
write!(f, "{}", message.suffix())?; write!(f, "{}", message.suffix())?;

View File

@ -279,17 +279,15 @@ impl Config {
match (justfile, working_directory) { match (justfile, working_directory) {
(None, None) => SearchConfig::FromInvocationDirectory, (None, None) => SearchConfig::FromInvocationDirectory,
(Some(justfile), None) => SearchConfig::WithJustfile { justfile }, (Some(justfile), None) => SearchConfig::WithJustfile { justfile },
(Some(justfile), Some(working_directory)) => { (Some(justfile), Some(working_directory)) =>
SearchConfig::WithJustfileAndWorkingDirectory { SearchConfig::WithJustfileAndWorkingDirectory {
justfile, justfile,
working_directory, working_directory,
} },
} (None, Some(_)) =>
(None, Some(_)) => {
return Err(ConfigError::internal( return Err(ConfigError::internal(
"--working-directory set without --justfile", "--working-directory set without --justfile",
)) )),
}
} }
} }
}; };
@ -297,26 +295,26 @@ impl Config {
for subcommand in cmd::ARGLESS { for subcommand in cmd::ARGLESS {
if matches.is_present(subcommand) { if matches.is_present(subcommand) {
match (!overrides.is_empty(), !positional.arguments.is_empty()) { match (!overrides.is_empty(), !positional.arguments.is_empty()) {
(false, false) => {} (false, false) => {},
(true, false) => { (true, false) => {
return Err(ConfigError::SubcommandOverrides { return Err(ConfigError::SubcommandOverrides {
subcommand: format!("--{}", subcommand.to_lowercase()), subcommand: format!("--{}", subcommand.to_lowercase()),
overrides, overrides,
}); });
} },
(false, true) => { (false, true) => {
return Err(ConfigError::SubcommandArguments { return Err(ConfigError::SubcommandArguments {
subcommand: format!("--{}", subcommand.to_lowercase()), subcommand: format!("--{}", subcommand.to_lowercase()),
arguments: positional.arguments, arguments: positional.arguments,
}); });
} },
(true, true) => { (true, true) => {
return Err(ConfigError::SubcommandOverridesAndArguments { return Err(ConfigError::SubcommandOverridesAndArguments {
subcommand: format!("--{}", subcommand.to_lowercase()), subcommand: format!("--{}", subcommand.to_lowercase()),
arguments: positional.arguments, arguments: positional.arguments,
overrides, overrides,
}); });
} },
} }
} }
} }
@ -455,14 +453,13 @@ impl Config {
.status(); .status();
match error { match error {
Ok(status) => { Ok(status) =>
if status.success() { if status.success() {
Ok(()) Ok(())
} else { } else {
eprintln!("Editor `{}` failed: {}", editor.to_string_lossy(), status); eprintln!("Editor `{}` failed: {}", editor.to_string_lossy(), status);
Err(status.code().unwrap_or(EXIT_FAILURE)) Err(status.code().unwrap_or(EXIT_FAILURE))
} },
}
Err(error) => { Err(error) => {
eprintln!( eprintln!(
"Editor `{}` invocation failed: {}", "Editor `{}` invocation failed: {}",
@ -470,7 +467,7 @@ impl Config {
error error
); );
Err(EXIT_FAILURE) Err(EXIT_FAILURE)
} },
} }
} }
@ -555,9 +552,10 @@ impl Config {
} }
} }
// Declaring this outside of the nested loops will probably be more efficient, but // Declaring this outside of the nested loops will probably be more
// it creates all sorts of lifetime issues with variables inside the loops. // efficient, but it creates all sorts of lifetime issues with
// If this is inlined like the docs say, it shouldn't make any difference. // variables inside the loops. If this is inlined like the
// docs say, it shouldn't make any difference.
let print_doc = |doc| { let print_doc = |doc| {
print!( print!(
" {:padding$}{} {}", " {:padding$}{} {}",
@ -643,13 +641,15 @@ mod tests {
use pretty_assertions::assert_eq; use pretty_assertions::assert_eq;
// This test guards against unintended changes to the argument parser. We should have // This test guards against unintended changes to the argument parser.
// proper tests for all the flags, but this will do for now. // We should have proper tests for all the flags, but this will do
// for now.
#[test] #[test]
fn help() { fn help() {
const EXPECTED_HELP: &str = "just v0.5.8 const EXPECTED_HELP: &str = "just v0.5.8
Casey Rodarmor <casey@rodarmor.com> Casey Rodarmor <casey@rodarmor.com>
🤖 Just a command runner - https://github.com/casey/just 🤖 Just a command runner \
- https://github.com/casey/just
USAGE: USAGE:
just [FLAGS] [OPTIONS] [--] [ARGUMENTS]... just [FLAGS] [OPTIONS] [--] [ARGUMENTS]...
@ -658,8 +658,8 @@ FLAGS:
--clear-shell-args Clear shell arguments --clear-shell-args Clear shell arguments
--dry-run Print what just would do without doing it --dry-run Print what just would do without doing it
--dump Print entire justfile --dump Print entire justfile
-e, --edit \ -e, --edit Edit justfile with editor given by $VISUAL or $EDITOR, falling back \
Edit justfile with editor given by $VISUAL or $EDITOR, falling back to `vim` to `vim`
--evaluate Print evaluated variables --evaluate Print evaluated variables
--highlight Highlight echoed recipe lines in bold --highlight Highlight echoed recipe lines in bold
--init Initialize new justfile in project root --init Initialize new justfile in project root
@ -674,22 +674,22 @@ OPTIONS:
Print colorful output [default: auto] [possible values: auto, always, never] Print colorful output [default: auto] [possible values: auto, always, never]
--completions <SHELL> --completions <SHELL>
Print shell completion script for <SHELL> \ Print shell completion script for <SHELL> [possible values: zsh, bash, fish, \
[possible values: zsh, bash, fish, powershell, elvish] powershell, elvish]
-f, --justfile <JUSTFILE> Use <JUSTFILE> as justfile. -f, --justfile <JUSTFILE> Use <JUSTFILE> as justfile.
--set <VARIABLE> <VALUE> Override <VARIABLE> with <VALUE> --set <VARIABLE> <VALUE> Override <VARIABLE> with <VALUE>
--shell <SHELL> Invoke <SHELL> to run recipes [default: sh] --shell <SHELL> Invoke <SHELL> to run recipes [default: sh]
--shell-arg <SHELL-ARG>... \ --shell-arg <SHELL-ARG>... Invoke shell with <SHELL-ARG> as an argument \
Invoke shell with <SHELL-ARG> as an argument [default: -cu] [default: -cu]
-s, --show <RECIPE> Show information about <RECIPE> -s, --show <RECIPE> Show information about <RECIPE>
-d, --working-directory <WORKING-DIRECTORY> -d, --working-directory <WORKING-DIRECTORY>
Use <WORKING-DIRECTORY> as working directory. --justfile must also be set Use <WORKING-DIRECTORY> as working directory. --justfile must also be set
ARGS: ARGS:
<ARGUMENTS>... \ <ARGUMENTS>... Overrides and recipe(s) to run, defaulting to the first recipe in the \
Overrides and recipe(s) to run, defaulting to the first recipe in the justfile"; justfile";
let app = Config::app().setting(AppSettings::ColorNever); let app = Config::app().setting(AppSettings::ColorNever);
let mut buffer = Vec::new(); let mut buffer = Vec::new();

View File

@ -16,7 +16,7 @@ impl<T, E: Error> ErrorResultExt<T> for Result<T, E> {
} }
Err(error.code()) Err(error.code())
} },
} }
} }
} }

View File

@ -64,7 +64,7 @@ impl<'src, 'run> Evaluator<'src, 'run> {
message: format!("attempted to evaluate undefined variable `{}`", variable), message: format!("attempted to evaluate undefined variable `{}`", variable),
}) })
} }
} },
Expression::Call { thunk } => { Expression::Call { thunk } => {
use Thunk::*; use Thunk::*;
@ -75,12 +75,11 @@ impl<'src, 'run> Evaluator<'src, 'run> {
}; };
match thunk { match thunk {
Nullary { name, function, .. } => { Nullary { name, function, .. } =>
function(&context).map_err(|message| RuntimeError::FunctionCall { function(&context).map_err(|message| RuntimeError::FunctionCall {
function: *name, function: *name,
message, message,
}) }),
}
Unary { Unary {
name, name,
function, function,
@ -107,18 +106,16 @@ impl<'src, 'run> Evaluator<'src, 'run> {
message, message,
}), }),
} }
} },
Expression::StringLiteral { string_literal } => Ok(string_literal.cooked.to_string()), Expression::StringLiteral { string_literal } => Ok(string_literal.cooked.to_string()),
Expression::Backtick { contents, token } => { Expression::Backtick { contents, token } =>
if self.config.dry_run { if self.config.dry_run {
Ok(format!("`{}`", contents)) Ok(format!("`{}`", contents))
} else { } else {
Ok(self.run_backtick(contents, token)?) Ok(self.run_backtick(contents, token)?)
} },
} Expression::Concatination { lhs, rhs } =>
Expression::Concatination { lhs, rhs } => { Ok(self.evaluate_expression(lhs)? + &self.evaluate_expression(rhs)?),
Ok(self.evaluate_expression(lhs)? + &self.evaluate_expression(rhs)?)
}
Expression::Group { contents } => self.evaluate_expression(contents), Expression::Group { contents } => self.evaluate_expression(contents),
} }
} }
@ -155,7 +152,7 @@ impl<'src, 'run> Evaluator<'src, 'run> {
Fragment::Text { token } => evaluated += token.lexeme(), Fragment::Text { token } => evaluated += token.lexeme(),
Fragment::Interpolation { expression } => { Fragment::Interpolation { expression } => {
evaluated += &self.evaluate_expression(expression)?; evaluated += &self.evaluate_expression(expression)?;
} },
} }
} }
Ok(evaluated) Ok(evaluated)

View File

@ -18,14 +18,11 @@ impl InterruptHandler {
match INSTANCE.lock() { match INSTANCE.lock() {
Ok(guard) => guard, Ok(guard) => guard,
Err(poison_error) => { Err(poison_error) => {
eprintln!( eprintln!("{}", RuntimeError::Internal {
"{}",
RuntimeError::Internal {
message: format!("interrupt handler mutex poisoned: {}", poison_error), message: format!("interrupt handler mutex poisoned: {}", poison_error),
} });
);
std::process::exit(EXIT_FAILURE); std::process::exit(EXIT_FAILURE);
} },
} }
} }
@ -56,13 +53,9 @@ impl InterruptHandler {
pub(crate) fn unblock(&mut self) { pub(crate) fn unblock(&mut self) {
if self.blocks == 0 { if self.blocks == 0 {
eprintln!( eprintln!("{}", RuntimeError::Internal {
"{}", message: "attempted to unblock interrupt handler, but handler was not blocked".to_string(),
RuntimeError::Internal { });
message: "attempted to unblock interrupt handler, but handler was not blocked"
.to_string(),
}
);
std::process::exit(EXIT_FAILURE); std::process::exit(EXIT_FAILURE);
} }

View File

@ -8,9 +8,9 @@ use TokenKind::*;
/// The lexer proceeds character-by-character, as opposed to using /// The lexer proceeds character-by-character, as opposed to using
/// regular expressions to lex tokens or semi-tokens at a time. As a /// regular expressions to lex tokens or semi-tokens at a time. As a
/// result, it is verbose and straightforward. Just used to have a /// result, it is verbose and straightforward. Just used to have a
/// regex-based lexer, which was slower and generally godawful. However, /// regex-based lexer, which was slower and generally godawful.
/// this should not be taken as a slight against regular expressions, /// However, this should not be taken as a slight against regular
/// the lexer was just idiosyncratically bad. /// expressions, the lexer was just idiosyncratically bad.
pub(crate) struct Lexer<'src> { pub(crate) struct Lexer<'src> {
/// Source text /// Source text
src: &'src str, src: &'src str,
@ -83,7 +83,7 @@ impl<'src> Lexer<'src> {
self.next = self.chars.next(); self.next = self.chars.next();
Ok(()) Ok(())
} },
None => Err(self.internal_error("Lexer advanced past end of text")), None => Err(self.internal_error("Lexer advanced past end of text")),
} }
} }
@ -177,7 +177,8 @@ impl<'src> Lexer<'src> {
fn error(&self, kind: CompilationErrorKind<'src>) -> CompilationError<'src> { fn error(&self, kind: CompilationErrorKind<'src>) -> CompilationError<'src> {
// Use the in-progress token span as the location of the error. // Use the in-progress token span as the location of the error.
// The width of the error site to highlight depends on the kind of error: // The width of the error site to highlight depends on the kind of
// error:
let length = match kind { let length = match kind {
// highlight ' or " // highlight ' or "
UnterminatedString => 1, UnterminatedString => 1,
@ -262,7 +263,7 @@ impl<'src> Lexer<'src> {
} else { } else {
self.lex_normal(first)? self.lex_normal(first)?
}; };
} },
None => break, None => break,
} }
} }
@ -369,7 +370,7 @@ impl<'src> Lexer<'src> {
}; };
Ok(()) Ok(())
} },
Continue => { Continue => {
if !self.indentation().is_empty() { if !self.indentation().is_empty() {
for _ in self.indentation().chars() { for _ in self.indentation().chars() {
@ -380,7 +381,7 @@ impl<'src> Lexer<'src> {
} }
Ok(()) Ok(())
} },
Decrease => { Decrease => {
while self.indentation() != whitespace { while self.indentation() != whitespace {
self.lex_dedent(); self.lex_dedent();
@ -395,14 +396,14 @@ impl<'src> Lexer<'src> {
} }
Ok(()) Ok(())
} },
Mixed { whitespace } => { Mixed { whitespace } => {
for _ in whitespace.chars() { for _ in whitespace.chars() {
self.advance()?; self.advance()?;
} }
Err(self.error(MixedLeadingWhitespace { whitespace })) Err(self.error(MixedLeadingWhitespace { whitespace }))
} },
Inconsistent => { Inconsistent => {
for _ in whitespace.chars() { for _ in whitespace.chars() {
self.advance()?; self.advance()?;
@ -412,7 +413,7 @@ impl<'src> Lexer<'src> {
expected: self.indentation(), expected: self.indentation(),
found: whitespace, found: whitespace,
})) }))
} },
Increase => { Increase => {
while self.next_is_whitespace() { while self.next_is_whitespace() {
self.advance()?; self.advance()?;
@ -429,7 +430,7 @@ impl<'src> Lexer<'src> {
} }
Ok(()) Ok(())
} },
} }
} }
@ -454,14 +455,13 @@ impl<'src> Lexer<'src> {
' ' | '\t' => self.lex_whitespace(), ' ' | '\t' => self.lex_whitespace(),
'\'' => self.lex_raw_string(), '\'' => self.lex_raw_string(),
'"' => self.lex_cooked_string(), '"' => self.lex_cooked_string(),
_ => { _ =>
if Self::is_identifier_start(start) { if Self::is_identifier_start(start) {
self.lex_identifier() self.lex_identifier()
} else { } else {
self.advance()?; self.advance()?;
Err(self.error(UnknownStartOfToken)) Err(self.error(UnknownStartOfToken))
} },
}
} }
} }
@ -478,7 +478,8 @@ impl<'src> Lexer<'src> {
// Emit interpolation end token // Emit interpolation end token
self.lex_double(InterpolationEnd) self.lex_double(InterpolationEnd)
} else if self.at_eol_or_eof() { } else if self.at_eol_or_eof() {
// Return unterminated interpolation error that highlights the opening {{ // Return unterminated interpolation error that highlights the opening
// {{
Err(Self::unterminated_interpolation_error(interpolation_start)) Err(Self::unterminated_interpolation_error(interpolation_start))
} else { } else {
// Otherwise lex as per normal // Otherwise lex as per normal
@ -529,7 +530,7 @@ impl<'src> Lexer<'src> {
self.lex_double(InterpolationStart)?; self.lex_double(InterpolationStart)?;
self.interpolation_start = Some(self.tokens[self.tokens.len() - 1]); self.interpolation_start = Some(self.tokens[self.tokens.len() - 1]);
Ok(()) Ok(())
} },
EndOfFile => Ok(()), EndOfFile => Ok(()),
} }
} }
@ -677,7 +678,7 @@ impl<'src> Lexer<'src> {
match self.next { match self.next {
Some('\'') => break, Some('\'') => break,
None => return Err(self.error(UnterminatedString)), None => return Err(self.error(UnterminatedString)),
_ => {} _ => {},
} }
self.advance()?; self.advance()?;
@ -826,9 +827,8 @@ mod tests {
Dedent | Eof => "", Dedent | Eof => "",
// Variable lexemes // Variable lexemes
Text | StringCooked | StringRaw | Identifier | Comment | Backtick | Unspecified => { Text | StringCooked | StringRaw | Identifier | Comment | Backtick | Unspecified =>
panic!("Token {:?} has no default lexeme", kind) panic!("Token {:?} has no default lexeme", kind),
}
} }
} }
@ -872,7 +872,7 @@ mod tests {
kind, kind,
}; };
assert_eq!(have, want); assert_eq!(have, want);
} },
} }
} }

View File

@ -68,11 +68,11 @@ impl<T: Display, I: Iterator<Item = T> + Clone> Display for List<T, I> {
write!(f, ", {}", c)?; write!(f, ", {}", c)?;
current = Some(n); current = Some(n);
next = values.next(); next = values.next();
} },
(Some(c), None) => { (Some(c), None) => {
write!(f, ", {} {}", self.conjunction, c)?; write!(f, ", {} {}", self.conjunction, c)?;
return Ok(()); return Ok(());
} },
_ => unreachable!("Iterator was fused, but returned Some after None"), _ => unreachable!("Iterator was fused, but returned Some after None"),
} }
} }

View File

@ -8,13 +8,12 @@ pub(crate) fn load_dotenv() -> RunResult<'static, BTreeMap<String, String>> {
Ok(iter) => { Ok(iter) => {
let result: dotenv::Result<BTreeMap<String, String>> = iter.collect(); let result: dotenv::Result<BTreeMap<String, String>> = iter.collect();
result.map_err(|dotenv_error| RuntimeError::Dotenv { dotenv_error }) result.map_err(|dotenv_error| RuntimeError::Dotenv { dotenv_error })
} },
Err(dotenv_error) => { Err(dotenv_error) =>
if dotenv_error.not_found() { if dotenv_error.not_found() {
Ok(BTreeMap::new()) Ok(BTreeMap::new())
} else { } else {
Err(RuntimeError::Dotenv { dotenv_error }) Err(RuntimeError::Dotenv { dotenv_error })
} },
}
} }
} }

View File

@ -1,12 +1,13 @@
use crate::common::*; use crate::common::*;
/// A module, the top-level type produced by the parser. So-named because /// A module, the top-level type produced by the parser. So-named
/// although at present, all justfiles consist of a single module, in the /// because although at present, all justfiles consist of a single
/// future we will likely have multi-module and multi-file justfiles. /// module, in the future we will likely have multi-module and
/// multi-file justfiles.
/// ///
/// Not all successful parses result in valid justfiles, so additional /// Not all successful parses result in valid justfiles, so additional
/// consistency checks and name resolution are performed by the `Analyzer`, /// consistency checks and name resolution are performed by the
/// which produces a `Justfile` from a `Module`. /// `Analyzer`, which produces a `Justfile` from a `Module`.
#[derive(Debug)] #[derive(Debug)]
pub(crate) struct Module<'src> { pub(crate) struct Module<'src> {
/// Items in the justfile /// Items in the justfile

View File

@ -1,7 +1,7 @@
use crate::common::*; use crate::common::*;
/// A name. This is effectively just a `Token` of kind `Identifier`, but we /// A name. This is effectively just a `Token` of kind `Identifier`,
/// give it its own type for clarity. /// but we give it its own type for clarity.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd)]
pub(crate) struct Name<'src> { pub(crate) struct Name<'src> {
pub(crate) offset: usize, pub(crate) offset: usize,

View File

@ -1,9 +1,10 @@
use crate::common::*; use crate::common::*;
/// Methods commmon to all AST nodes. Currently only used in parser unit tests. /// Methods commmon to all AST nodes. Currently only used in parser
/// unit tests.
pub(crate) trait Node<'src> { pub(crate) trait Node<'src> {
/// Construct an untyped tree of atoms representing this Node. This function, /// Construct an untyped tree of atoms representing this Node. This
/// and `Tree` type, are only used in parser unit tests. /// function, and `Tree` type, are only used in parser unit tests.
fn tree(&self) -> Tree<'src>; fn tree(&self) -> Tree<'src>;
} }
@ -59,18 +60,18 @@ impl<'src> Node<'src> for Expression<'src> {
Unary { name, arg, .. } => { Unary { name, arg, .. } => {
tree.push_mut(name.lexeme()); tree.push_mut(name.lexeme());
tree.push_mut(arg.tree()); tree.push_mut(arg.tree());
} },
Binary { Binary {
name, args: [a, b], .. name, args: [a, b], ..
} => { } => {
tree.push_mut(name.lexeme()); tree.push_mut(name.lexeme());
tree.push_mut(a.tree()); tree.push_mut(a.tree());
tree.push_mut(b.tree()); tree.push_mut(b.tree());
} },
} }
tree tree
} },
Expression::Variable { name } => Tree::atom(name.lexeme()), Expression::Variable { name } => Tree::atom(name.lexeme()),
Expression::StringLiteral { Expression::StringLiteral {
string_literal: StringLiteral { cooked, .. }, string_literal: StringLiteral { cooked, .. },
@ -175,7 +176,7 @@ impl<'src> Node<'src> for Set<'src> {
for argument in arguments { for argument in arguments {
set.push_mut(Tree::string(&argument.cooked)); set.push_mut(Tree::string(&argument.cooked));
} }
} },
} }
set set

View File

@ -28,7 +28,7 @@ pub(crate) fn output(mut command: Command) -> Result<String, OutputError> {
.to_string(), .to_string(),
), ),
} }
} },
Err(io_error) => Err(OutputError::Io(io_error)), Err(io_error) => Err(OutputError::Io(io_error)),
} }
} }

View File

@ -4,20 +4,24 @@ use TokenKind::*;
/// Just language parser /// Just language parser
/// ///
/// The parser is a (hopefully) straightforward recursive descent parser. /// The parser is a (hopefully) straightforward recursive descent
/// parser.
/// ///
/// It uses a few tokens of lookahead to disambiguate different constructs. /// It uses a few tokens of lookahead to disambiguate different
/// constructs.
/// ///
/// The `expect_*` and `presume_`* methods are similar in that they assert /// The `expect_*` and `presume_`* methods are similar in that they
/// the type of unparsed tokens and consume them. However, upon encountering /// assert the type of unparsed tokens and consume them. However, upon
/// an unexpected token, the `expect_*` methods return an unexpected token /// encountering an unexpected token, the `expect_*` methods return an
/// error, whereas the `presume_*` tokens return an internal error. /// unexpected token error, whereas the `presume_*` tokens return an
/// internal error.
/// ///
/// The `presume_*` methods are used when the token stream has been inspected /// The `presume_*` methods are used when the token stream has been
/// in some other way, and thus encountering an unexpected token is a bug in /// inspected in some other way, and thus encountering an unexpected
/// Just, and not a syntax error. /// token is a bug in Just, and not a syntax error.
/// ///
/// All methods starting with `parse_*` parse and return a language construct. /// All methods starting with `parse_*` parse and return a language
/// construct.
pub(crate) struct Parser<'tokens, 'src> { pub(crate) struct Parser<'tokens, 'src> {
/// Source tokens /// Source tokens
tokens: &'tokens [Token<'src>], tokens: &'tokens [Token<'src>],
@ -43,7 +47,8 @@ impl<'tokens, 'src> Parser<'tokens, 'src> {
Ok(self.next()?.error(kind)) Ok(self.next()?.error(kind))
} }
/// Construct an unexpected token error with the token returned by `Parser::next` /// Construct an unexpected token error with the token returned by
/// `Parser::next`
fn unexpected_token( fn unexpected_token(
&self, &self,
expected: &[TokenKind], expected: &[TokenKind],
@ -93,11 +98,10 @@ impl<'tokens, 'src> Parser<'tokens, 'src> {
let mut rest = self.rest(); let mut rest = self.rest();
for kind in kinds { for kind in kinds {
match rest.next() { match rest.next() {
Some(token) => { Some(token) =>
if token.kind != *kind { if token.kind != *kind {
return false; return false;
} },
}
None => return false, None => return false,
} }
} }
@ -125,8 +129,8 @@ impl<'tokens, 'src> Parser<'tokens, 'src> {
Err(self.internal_error("`Parser::advance()` advanced past end of token stream")?) Err(self.internal_error("`Parser::advance()` advanced past end of token stream")?)
} }
/// Return the next token if it is of kind `expected`, otherwise, return an /// Return the next token if it is of kind `expected`, otherwise,
/// unexpected token error /// return an unexpected token error
fn expect(&mut self, expected: TokenKind) -> CompilationResult<'src, Token<'src>> { fn expect(&mut self, expected: TokenKind) -> CompilationResult<'src, Token<'src>> {
if let Some(token) = self.accept(expected)? { if let Some(token) = self.accept(expected)? {
Ok(token) Ok(token)
@ -157,8 +161,8 @@ impl<'tokens, 'src> Parser<'tokens, 'src> {
self.expect(Eol).map(|_| ()).expected(&[Eof]) self.expect(Eol).map(|_| ()).expected(&[Eof])
} }
/// Return an internal error if the next token is not of kind `Identifier` with /// Return an internal error if the next token is not of kind
/// lexeme `lexeme`. /// `Identifier` with lexeme `lexeme`.
fn presume_name(&mut self, lexeme: &str) -> CompilationResult<'src, ()> { fn presume_name(&mut self, lexeme: &str) -> CompilationResult<'src, ()> {
let next = self.advance()?; let next = self.advance()?;
@ -178,7 +182,8 @@ impl<'tokens, 'src> Parser<'tokens, 'src> {
} }
} }
/// Return an internal error if the next token is not of kind `kind`. /// Return an internal error if the next token is not of kind
/// `kind`.
fn presume(&mut self, kind: TokenKind) -> CompilationResult<'src, Token<'src>> { fn presume(&mut self, kind: TokenKind) -> CompilationResult<'src, Token<'src>> {
let next = self.advance()?; let next = self.advance()?;
@ -192,7 +197,8 @@ impl<'tokens, 'src> Parser<'tokens, 'src> {
} }
} }
/// Return an internal error if the next token is not one of kinds `kinds`. /// Return an internal error if the next token is not one of kinds
/// `kinds`.
fn presume_any(&mut self, kinds: &[TokenKind]) -> CompilationResult<'src, Token<'src>> { fn presume_any(&mut self, kinds: &[TokenKind]) -> CompilationResult<'src, Token<'src>> {
let next = self.advance()?; let next = self.advance()?;
if !kinds.contains(&next.kind) { if !kinds.contains(&next.kind) {
@ -267,16 +273,16 @@ impl<'tokens, 'src> Parser<'tokens, 'src> {
Comment => { Comment => {
doc = Some(next.lexeme()[1..].trim()); doc = Some(next.lexeme()[1..].trim());
self.expect_eol()?; self.expect_eol()?;
} },
Eol => { Eol => {
self.advance()?; self.advance()?;
} },
Eof => { Eof => {
self.advance()?; self.advance()?;
break; break;
} },
Identifier => match next.lexeme() { Identifier => match next.lexeme() {
keyword::ALIAS => { keyword::ALIAS =>
if self.next_are(&[Identifier, Identifier, Equals]) { if self.next_are(&[Identifier, Identifier, Equals]) {
warnings.push(Warning::DeprecatedEquals { warnings.push(Warning::DeprecatedEquals {
equals: self.get(2)?, equals: self.get(2)?,
@ -286,9 +292,8 @@ impl<'tokens, 'src> Parser<'tokens, 'src> {
items.push(Item::Alias(self.parse_alias()?)); items.push(Item::Alias(self.parse_alias()?));
} else { } else {
items.push(Item::Recipe(self.parse_recipe(doc, false)?)); items.push(Item::Recipe(self.parse_recipe(doc, false)?));
} },
} keyword::EXPORT =>
keyword::EXPORT => {
if self.next_are(&[Identifier, Identifier, Equals]) { if self.next_are(&[Identifier, Identifier, Equals]) {
warnings.push(Warning::DeprecatedEquals { warnings.push(Warning::DeprecatedEquals {
equals: self.get(2)?, equals: self.get(2)?,
@ -300,16 +305,14 @@ impl<'tokens, 'src> Parser<'tokens, 'src> {
items.push(Item::Assignment(self.parse_assignment(true)?)); items.push(Item::Assignment(self.parse_assignment(true)?));
} else { } else {
items.push(Item::Recipe(self.parse_recipe(doc, false)?)); items.push(Item::Recipe(self.parse_recipe(doc, false)?));
} },
} keyword::SET =>
keyword::SET => {
if self.next_are(&[Identifier, Identifier, ColonEquals]) { if self.next_are(&[Identifier, Identifier, ColonEquals]) {
items.push(Item::Set(self.parse_set()?)); items.push(Item::Set(self.parse_set()?));
} else { } else {
items.push(Item::Recipe(self.parse_recipe(doc, false)?)); items.push(Item::Recipe(self.parse_recipe(doc, false)?));
} },
} _ =>
_ => {
if self.next_are(&[Identifier, Equals]) { if self.next_are(&[Identifier, Equals]) {
warnings.push(Warning::DeprecatedEquals { warnings.push(Warning::DeprecatedEquals {
equals: self.get(1)?, equals: self.get(1)?,
@ -319,16 +322,15 @@ impl<'tokens, 'src> Parser<'tokens, 'src> {
items.push(Item::Assignment(self.parse_assignment(false)?)); items.push(Item::Assignment(self.parse_assignment(false)?));
} else { } else {
items.push(Item::Recipe(self.parse_recipe(doc, false)?)); items.push(Item::Recipe(self.parse_recipe(doc, false)?));
} },
}
}, },
At => { At => {
self.presume(At)?; self.presume(At)?;
items.push(Item::Recipe(self.parse_recipe(doc, true)?)); items.push(Item::Recipe(self.parse_recipe(doc, true)?));
} },
_ => { _ => {
return Err(self.unexpected_token(&[Identifier, At])?); return Err(self.unexpected_token(&[Identifier, At])?);
} },
} }
if next.kind != Comment { if next.kind != Comment {
@ -394,7 +396,7 @@ impl<'tokens, 'src> Parser<'tokens, 'src> {
let contents = &next.lexeme()[1..next.lexeme().len() - 1]; let contents = &next.lexeme()[1..next.lexeme().len() - 1];
let token = self.advance()?; let token = self.advance()?;
Ok(Expression::Backtick { contents, token }) Ok(Expression::Backtick { contents, token })
} },
Identifier => { Identifier => {
let name = self.parse_name()?; let name = self.parse_name()?;
@ -406,13 +408,13 @@ impl<'tokens, 'src> Parser<'tokens, 'src> {
} else { } else {
Ok(Expression::Variable { name }) Ok(Expression::Variable { name })
} }
} },
ParenL => { ParenL => {
self.presume(ParenL)?; self.presume(ParenL)?;
let contents = Box::new(self.parse_expression()?); let contents = Box::new(self.parse_expression()?);
self.expect(ParenR)?; self.expect(ParenR)?;
Ok(Expression::Group { contents }) Ok(Expression::Group { contents })
} },
_ => Err(self.unexpected_token(&[StringCooked, StringRaw, Backtick, Identifier, ParenL])?), _ => Err(self.unexpected_token(&[StringCooked, StringRaw, Backtick, Identifier, ParenL])?),
} }
} }
@ -443,7 +445,7 @@ impl<'tokens, 'src> Parser<'tokens, 'src> {
return Err( return Err(
token.error(CompilationErrorKind::InvalidEscapeSequence { character: other }), token.error(CompilationErrorKind::InvalidEscapeSequence { character: other }),
); );
} },
} }
escape = false; escape = false;
} else if c == '\\' { } else if c == '\\' {
@ -456,7 +458,7 @@ impl<'tokens, 'src> Parser<'tokens, 'src> {
raw, raw,
cooked: Cow::Owned(cooked), cooked: Cow::Owned(cooked),
}) })
} },
_ => Err(token.error(CompilationErrorKind::Internal { _ => Err(token.error(CompilationErrorKind::Internal {
message: "`Parser::parse_string_literal` called on non-string token".to_string(), message: "`Parser::parse_string_literal` called on non-string token".to_string(),
})), })),
@ -656,7 +658,7 @@ impl<'tokens, 'src> Parser<'tokens, 'src> {
value: Setting::Shell(setting::Shell { command, arguments }), value: Setting::Shell(setting::Shell { command, arguments }),
name, name,
}) })
} },
_ => Err(name.error(CompilationErrorKind::UnknownSetting { _ => Err(name.error(CompilationErrorKind::UnknownSetting {
setting: name.lexeme(), setting: name.lexeme(),
})), })),
@ -743,7 +745,7 @@ mod tests {
kind, kind,
}; };
assert_eq!(have, want); assert_eq!(have, want);
} },
} }
} }

View File

@ -71,8 +71,8 @@ impl PlatformInterface for Platform {
} }
fn set_execute_permission(_path: &Path) -> Result<(), io::Error> { fn set_execute_permission(_path: &Path) -> Result<(), io::Error> {
// it is not necessary to set an execute permission on a script on windows, // it is not necessary to set an execute permission on a script on
// so this is a nop // windows, so this is a nop
Ok(()) Ok(())
} }

View File

@ -1,8 +1,8 @@
use crate::common::*; use crate::common::*;
pub(crate) trait PlatformInterface { pub(crate) trait PlatformInterface {
/// Construct a command equivalent to running the script at `path` with the /// Construct a command equivalent to running the script at `path`
/// shebang line `shebang` /// with the shebang line `shebang`
fn make_shebang_command( fn make_shebang_command(
path: &Path, path: &Path,
working_directory: &Path, working_directory: &Path,
@ -13,9 +13,11 @@ pub(crate) trait PlatformInterface {
/// Set the execute permission on the file pointed to by `path` /// Set the execute permission on the file pointed to by `path`
fn set_execute_permission(path: &Path) -> Result<(), io::Error>; fn set_execute_permission(path: &Path) -> Result<(), io::Error>;
/// Extract the signal from a process exit status, if it was terminated by a signal /// Extract the signal from a process exit status, if it was
/// terminated by a signal
fn signal_from_exit_status(exit_status: process::ExitStatus) -> Option<i32>; fn signal_from_exit_status(exit_status: process::ExitStatus) -> Option<i32>;
/// Translate a path from a "native" path to a path the interpreter expects /// Translate a path from a "native" path to a path the interpreter
/// expects
fn to_shell_path(working_directory: &Path, path: &Path) -> Result<String, String>; fn to_shell_path(working_directory: &Path, path: &Path) -> Result<String, String>;
} }

View File

@ -10,8 +10,9 @@ use crate::common::*;
/// ///
/// - Overrides are of the form `NAME=.*` /// - Overrides are of the form `NAME=.*`
/// ///
/// - After overrides comes a single optional search directory argument. /// - After overrides comes a single optional search directory
/// This is either '.', '..', or an argument that contains a `/`. /// argument. This is either '.', '..', or an argument that contains
/// a `/`.
/// ///
/// If the argument contains a `/`, everything before and including /// If the argument contains a `/`, everything before and including
/// the slash is the search directory, and everything after is added /// the slash is the search directory, and everything after is added
@ -23,12 +24,12 @@ use crate::common::*;
/// being invoked and are a convenient way to override settings. /// being invoked and are a convenient way to override settings.
/// ///
/// For modes that do not take other arguments, the search directory /// For modes that do not take other arguments, the search directory
/// argument determines where to begin searching for the justfile. This /// argument determines where to begin searching for the justfile.
/// allows command lines like `just -l ..` and `just ../build` to find /// This allows command lines like `just -l ..` and `just ../build` to
/// the same justfile. /// find the same justfile.
/// ///
/// For modes that do take other arguments, the search argument is simply /// For modes that do take other arguments, the search argument is
/// prepended to rest. /// simply prepended to rest.
#[cfg_attr(test, derive(PartialEq, Debug))] #[cfg_attr(test, derive(PartialEq, Debug))]
pub struct Positional { pub struct Positional {
/// Overrides from values of the form `[a-zA-Z_][a-zA-Z0-9_-]*=.*` /// Overrides from values of the form `[a-zA-Z_][a-zA-Z0-9_-]*=.*`

View File

@ -2,8 +2,8 @@ use crate::common::*;
use std::process::{ExitStatus, Stdio}; use std::process::{ExitStatus, Stdio};
/// Return a `RuntimeError::Signal` if the process was terminated by a signal, /// Return a `RuntimeError::Signal` if the process was terminated by a
/// otherwise return an `RuntimeError::UnknownFailure` /// signal, otherwise return an `RuntimeError::UnknownFailure`
fn error_from_signal( fn error_from_signal(
recipe: &str, recipe: &str,
line_number: Option<usize>, line_number: Option<usize>,
@ -177,7 +177,7 @@ impl<'src, D> Recipe<'src, D> {
// run it! // run it!
match InterruptHandler::guard(|| command.status()) { match InterruptHandler::guard(|| command.status()) {
Ok(exit_status) => { Ok(exit_status) =>
if let Some(code) = exit_status.code() { if let Some(code) = exit_status.code() {
if code != 0 { if code != 0 {
return Err(RuntimeError::Code { return Err(RuntimeError::Code {
@ -188,8 +188,7 @@ impl<'src, D> Recipe<'src, D> {
} }
} else { } else {
return Err(error_from_signal(self.name(), None, exit_status)); return Err(error_from_signal(self.name(), None, exit_status));
} },
}
Err(io_error) => { Err(io_error) => {
return Err(RuntimeError::Shebang { return Err(RuntimeError::Shebang {
recipe: self.name(), recipe: self.name(),
@ -197,7 +196,7 @@ impl<'src, D> Recipe<'src, D> {
argument: argument.map(String::from), argument: argument.map(String::from),
io_error, io_error,
}); });
} },
}; };
} else { } else {
let mut lines = self.body.iter().peekable(); let mut lines = self.body.iter().peekable();
@ -260,7 +259,7 @@ impl<'src, D> Recipe<'src, D> {
cmd.export(dotenv, &scope); cmd.export(dotenv, &scope);
match InterruptHandler::guard(|| cmd.status()) { match InterruptHandler::guard(|| cmd.status()) {
Ok(exit_status) => { Ok(exit_status) =>
if let Some(code) = exit_status.code() { if let Some(code) = exit_status.code() {
if code != 0 { if code != 0 {
return Err(RuntimeError::Code { return Err(RuntimeError::Code {
@ -275,14 +274,13 @@ impl<'src, D> Recipe<'src, D> {
Some(line_number), Some(line_number),
exit_status, exit_status,
)); ));
} },
}
Err(io_error) => { Err(io_error) => {
return Err(RuntimeError::IoError { return Err(RuntimeError::IoError {
recipe: self.name(), recipe: self.name(),
io_error, io_error,
}); });
} },
}; };
} }
} }

View File

@ -119,7 +119,7 @@ impl<'src> Display for RuntimeError<'src> {
if let Some(suggestion) = *suggestion { if let Some(suggestion) = *suggestion {
write!(f, "\nDid you mean `{}`?", suggestion)?; write!(f, "\nDid you mean `{}`?", suggestion)?;
} }
} },
UnknownOverrides { overrides } => { UnknownOverrides { overrides } => {
write!( write!(
f, f,
@ -127,7 +127,7 @@ impl<'src> Display for RuntimeError<'src> {
Count("Variable", overrides.len()), Count("Variable", overrides.len()),
List::and_ticked(overrides), List::and_ticked(overrides),
)?; )?;
} },
ArgumentCountMismatch { ArgumentCountMismatch {
recipe, recipe,
parameters, parameters,
@ -173,12 +173,12 @@ impl<'src> Display for RuntimeError<'src> {
write!(f, " {}", param)?; write!(f, " {}", param)?;
} }
} }
} },
Code { Code {
recipe, recipe,
line_number, line_number,
code, code,
} => { } =>
if let Some(n) = line_number { if let Some(n) = line_number {
write!( write!(
f, f,
@ -187,8 +187,7 @@ impl<'src> Display for RuntimeError<'src> {
)?; )?;
} else { } else {
write!(f, "Recipe `{}` failed with exit code {}", recipe, code)?; write!(f, "Recipe `{}` failed with exit code {}", recipe, code)?;
} },
}
Cygpath { Cygpath {
recipe, recipe,
output_error, output_error,
@ -196,56 +195,56 @@ impl<'src> Display for RuntimeError<'src> {
OutputError::Code(code) => { OutputError::Code(code) => {
write!( write!(
f, f,
"Cygpath failed with exit code {} while translating recipe `{}` \ "Cygpath failed with exit code {} while translating recipe `{}` shebang interpreter \
shebang interpreter path", path",
code, recipe code, recipe
)?; )?;
} },
OutputError::Signal(signal) => { OutputError::Signal(signal) => {
write!( write!(
f, f,
"Cygpath terminated by signal {} while translating recipe `{}` \ "Cygpath terminated by signal {} while translating recipe `{}` shebang interpreter \
shebang interpreter path", path",
signal, recipe signal, recipe
)?; )?;
} },
OutputError::Unknown => { OutputError::Unknown => {
write!( write!(
f, f,
"Cygpath experienced an unknown failure while translating recipe `{}` \ "Cygpath experienced an unknown failure while translating recipe `{}` shebang \
shebang interpreter path", interpreter path",
recipe recipe
)?; )?;
} },
OutputError::Io(io_error) => { OutputError::Io(io_error) => {
match io_error.kind() { match io_error.kind() {
io::ErrorKind::NotFound => write!( io::ErrorKind::NotFound => write!(
f, f,
"Could not find `cygpath` executable to translate recipe `{}` \ "Could not find `cygpath` executable to translate recipe `{}` shebang interpreter \
shebang interpreter path:\n{}", path:\n{}",
recipe, io_error recipe, io_error
), ),
io::ErrorKind::PermissionDenied => write!( io::ErrorKind::PermissionDenied => write!(
f, f,
"Could not run `cygpath` executable to translate recipe `{}` \ "Could not run `cygpath` executable to translate recipe `{}` shebang interpreter \
shebang interpreter path:\n{}", path:\n{}",
recipe, io_error recipe, io_error
), ),
_ => write!(f, "Could not run `cygpath` executable:\n{}", io_error), _ => write!(f, "Could not run `cygpath` executable:\n{}", io_error),
}?; }?;
} },
OutputError::Utf8(utf8_error) => { OutputError::Utf8(utf8_error) => {
write!( write!(
f, f,
"Cygpath successfully translated recipe `{}` shebang interpreter path, \ "Cygpath successfully translated recipe `{}` shebang interpreter path, but output was \
but output was not utf8: {}", not utf8: {}",
recipe, utf8_error recipe, utf8_error
)?; )?;
} },
}, },
Dotenv { dotenv_error } => { Dotenv { dotenv_error } => {
writeln!(f, "Failed to load .env: {}", dotenv_error)?; writeln!(f, "Failed to load .env: {}", dotenv_error)?;
} },
FunctionCall { function, message } => { FunctionCall { function, message } => {
writeln!( writeln!(
f, f,
@ -253,13 +252,13 @@ impl<'src> Display for RuntimeError<'src> {
function.lexeme(), function.lexeme(),
message message
)?; )?;
} },
Shebang { Shebang {
recipe, recipe,
command, command,
argument, argument,
io_error, io_error,
} => { } =>
if let Some(argument) = argument { if let Some(argument) = argument {
write!( write!(
f, f,
@ -272,13 +271,12 @@ impl<'src> Display for RuntimeError<'src> {
"Recipe `{}` with shebang `#!{}` execution error: {}", "Recipe `{}` with shebang `#!{}` execution error: {}",
recipe, command, io_error recipe, command, io_error
)?; )?;
} },
}
Signal { Signal {
recipe, recipe,
line_number, line_number,
signal, signal,
} => { } =>
if let Some(n) = line_number { if let Some(n) = line_number {
write!( write!(
f, f,
@ -287,12 +285,11 @@ impl<'src> Display for RuntimeError<'src> {
)?; )?;
} else { } else {
write!(f, "Recipe `{}` was terminated by signal {}", recipe, signal)?; write!(f, "Recipe `{}` was terminated by signal {}", recipe, signal)?;
} },
}
Unknown { Unknown {
recipe, recipe,
line_number, line_number,
} => { } =>
if let Some(n) = line_number { if let Some(n) = line_number {
write!( write!(
f, f,
@ -301,8 +298,7 @@ impl<'src> Display for RuntimeError<'src> {
)?; )?;
} else { } else {
write!(f, "Recipe `{}` failed for an unknown reason", recipe)?; write!(f, "Recipe `{}` failed for an unknown reason", recipe)?;
} },
}
IoError { recipe, io_error } => { IoError { recipe, io_error } => {
match io_error.kind() { match io_error.kind() {
io::ErrorKind::NotFound => writeln!( io::ErrorKind::NotFound => writeln!(
@ -317,28 +313,27 @@ impl<'src> Display for RuntimeError<'src> {
), ),
_ => writeln!( _ => writeln!(
f, f,
"Recipe `{}` could not be run because of an IO error while \ "Recipe `{}` could not be run because of an IO error while launching `sh`:{}",
launching `sh`:{}",
recipe, io_error recipe, io_error
), ),
}?; }?;
} },
TmpdirIoError { recipe, io_error } => writeln!( TmpdirIoError { recipe, io_error } => writeln!(
f, f,
"Recipe `{}` could not be run because of an IO error while trying \ "Recipe `{}` could not be run because of an IO error while trying to create a temporary \
to create a temporary directory or write a file to that directory`:{}", directory or write a file to that directory`:{}",
recipe, io_error recipe, io_error
)?, )?,
Backtick { output_error, .. } => match output_error { Backtick { output_error, .. } => match output_error {
OutputError::Code(code) => { OutputError::Code(code) => {
writeln!(f, "Backtick failed with exit code {}", code)?; writeln!(f, "Backtick failed with exit code {}", code)?;
} },
OutputError::Signal(signal) => { OutputError::Signal(signal) => {
writeln!(f, "Backtick was terminated by signal {}", signal)?; writeln!(f, "Backtick was terminated by signal {}", signal)?;
} },
OutputError::Unknown => { OutputError::Unknown => {
writeln!(f, "Backtick failed for an unknown reason")?; writeln!(f, "Backtick failed for an unknown reason")?;
} },
OutputError::Io(io_error) => { OutputError::Io(io_error) => {
match io_error.kind() { match io_error.kind() {
io::ErrorKind::NotFound => write!( io::ErrorKind::NotFound => write!(
@ -353,23 +348,22 @@ impl<'src> Display for RuntimeError<'src> {
), ),
_ => write!( _ => write!(
f, f,
"Backtick could not be run because of an IO \ "Backtick could not be run because of an IO error while launching `sh`:\n{}",
error while launching `sh`:\n{}",
io_error io_error
), ),
}?; }?;
} },
OutputError::Utf8(utf8_error) => { OutputError::Utf8(utf8_error) => {
writeln!( writeln!(
f, f,
"Backtick succeeded but stdout was not utf8: {}", "Backtick succeeded but stdout was not utf8: {}",
utf8_error utf8_error
)?; )?;
} },
}, },
NoRecipes => { NoRecipes => {
writeln!(f, "Justfile contains no recipes.",)?; writeln!(f, "Justfile contains no recipes.",)?;
} },
DefaultRecipeRequiresArguments { DefaultRecipeRequiresArguments {
recipe, recipe,
min_arguments, min_arguments,
@ -381,7 +375,7 @@ impl<'src> Display for RuntimeError<'src> {
min_arguments, min_arguments,
Count("argument", *min_arguments), Count("argument", *min_arguments),
)?; )?;
} },
Internal { message } => { Internal { message } => {
write!( write!(
f, f,
@ -389,7 +383,7 @@ impl<'src> Display for RuntimeError<'src> {
consider filing an issue: https://github.com/casey/just/issues/new", consider filing an issue: https://github.com/casey/just/issues/new",
message message
)?; )?;
} },
} }
write!(f, "{}", message.suffix())?; write!(f, "{}", message.suffix())?;

View File

@ -25,7 +25,7 @@ impl Search {
justfile, justfile,
working_directory, working_directory,
}) })
} },
SearchConfig::FromSearchDirectory { search_directory } => { SearchConfig::FromSearchDirectory { search_directory } => {
let search_directory = Self::clean(invocation_directory, search_directory); let search_directory = Self::clean(invocation_directory, search_directory);
@ -38,7 +38,7 @@ impl Search {
justfile, justfile,
working_directory, working_directory,
}) })
} },
SearchConfig::WithJustfile { justfile } => { SearchConfig::WithJustfile { justfile } => {
let justfile = Self::clean(invocation_directory, justfile); let justfile = Self::clean(invocation_directory, justfile);
@ -49,7 +49,7 @@ impl Search {
justfile, justfile,
working_directory, working_directory,
}) })
} },
SearchConfig::WithJustfileAndWorkingDirectory { SearchConfig::WithJustfileAndWorkingDirectory {
justfile, justfile,
@ -75,7 +75,7 @@ impl Search {
justfile, justfile,
working_directory, working_directory,
}) })
} },
SearchConfig::FromSearchDirectory { search_directory } => { SearchConfig::FromSearchDirectory { search_directory } => {
let search_directory = Self::clean(invocation_directory, search_directory); let search_directory = Self::clean(invocation_directory, search_directory);
@ -88,7 +88,7 @@ impl Search {
justfile, justfile,
working_directory, working_directory,
}) })
} },
SearchConfig::WithJustfile { justfile } => { SearchConfig::WithJustfile { justfile } => {
let justfile = Self::clean(invocation_directory, justfile); let justfile = Self::clean(invocation_directory, justfile);
@ -99,7 +99,7 @@ impl Search {
justfile, justfile,
working_directory, working_directory,
}) })
} },
SearchConfig::WithJustfileAndWorkingDirectory { SearchConfig::WithJustfileAndWorkingDirectory {
justfile, justfile,
@ -205,7 +205,7 @@ mod tests {
match Search::justfile(tmp.path()) { match Search::justfile(tmp.path()) {
Err(SearchError::NotFound) => { Err(SearchError::NotFound) => {
assert!(true); assert!(true);
} },
_ => panic!("No justfile found error was expected"), _ => panic!("No justfile found error was expected"),
} }
} }
@ -227,7 +227,7 @@ mod tests {
match Search::justfile(path.as_path()) { match Search::justfile(path.as_path()) {
Err(SearchError::MultipleCandidates { .. }) => { Err(SearchError::MultipleCandidates { .. }) => {
assert!(true); assert!(true);
} },
_ => panic!("Multiple candidates error was expected"), _ => panic!("Multiple candidates error was expected"),
} }
} }
@ -242,7 +242,7 @@ mod tests {
match Search::justfile(path.as_path()) { match Search::justfile(path.as_path()) {
Ok(_path) => { Ok(_path) => {
assert!(true); assert!(true);
} },
_ => panic!("No errors were expected"), _ => panic!("No errors were expected"),
} }
} }
@ -268,7 +268,7 @@ mod tests {
match Search::justfile(path.as_path()) { match Search::justfile(path.as_path()) {
Ok(_path) => { Ok(_path) => {
assert!(true); assert!(true);
} },
_ => panic!("No errors were expected"), _ => panic!("No errors were expected"),
} }
} }
@ -287,7 +287,7 @@ mod tests {
match Search::justfile(path.as_path()) { match Search::justfile(path.as_path()) {
Ok(_path) => { Ok(_path) => {
assert!(true); assert!(true);
} },
_ => panic!("No errors were expected"), _ => panic!("No errors were expected"),
} }
} }
@ -311,7 +311,7 @@ mod tests {
path.pop(); path.pop();
path.push(FILENAME); path.push(FILENAME);
assert_eq!(found_path, path); assert_eq!(found_path, path);
} },
_ => panic!("No errors were expected"), _ => panic!("No errors were expected"),
} }
} }

View File

@ -1,6 +1,7 @@
use crate::common::*; use crate::common::*;
/// String wrapper that uses nonblank characters to display spaces and tabs /// String wrapper that uses nonblank characters to display spaces and
/// tabs
pub struct ShowWhitespace<'str>(pub &'str str); pub struct ShowWhitespace<'str>(pub &'str str);
impl<'str> Display for ShowWhitespace<'str> { impl<'str> Display for ShowWhitespace<'str> {

View File

@ -1,12 +1,12 @@
//! Justfile summary creation, for testing purposes only. //! Justfile summary creation, for testing purposes only.
//! //!
//! The contents of this module are not bound by any stability guarantees. //! The contents of this module are not bound by any stability
//! Breaking changes may be introduced at any time. //! guarantees. Breaking changes may be introduced at any time.
//! //!
//! The main entry point into this module is the `summary` function, which //! The main entry point into this module is the `summary` function,
//! parses a justfile at a given path and produces a `Summary` object, //! which parses a justfile at a given path and produces a `Summary`
//! which broadly captures the functionality of the parsed justfile, or //! object, which broadly captures the functionality of the parsed
//! an error message. //! justfile, or an error message.
//! //!
//! This functionality is intended to be used with `janus`, a tool for //! This functionality is intended to be used with `janus`, a tool for
//! ensuring that changes to just do not inadvertently break or //! ensuring that changes to just do not inadvertently break or

View File

@ -73,8 +73,8 @@ impl<'key, V: Keyed<'key>> Index<&'key str> for Table<'key, V> {
} }
impl<'key, V: Keyed<'key>> IntoIterator for Table<'key, V> { impl<'key, V: Keyed<'key>> IntoIterator for Table<'key, V> {
type Item = (&'key str, V);
type IntoIter = btree_map::IntoIter<&'key str, V>; type IntoIter = btree_map::IntoIter<&'key str, V>;
type Item = (&'key str, V);
fn into_iter(self) -> btree_map::IntoIter<&'key str, V> { fn into_iter(self) -> btree_map::IntoIter<&'key str, V> {
self.map.into_iter() self.map.into_iter()
@ -82,8 +82,8 @@ impl<'key, V: Keyed<'key>> IntoIterator for Table<'key, V> {
} }
impl<'table, V: Keyed<'table> + 'table> IntoIterator for &'table Table<'table, V> { impl<'table, V: Keyed<'table> + 'table> IntoIterator for &'table Table<'table, V> {
type Item = (&'table &'table str, &'table V);
type IntoIter = btree_map::Iter<'table, &'table str, V>; type IntoIter = btree_map::Iter<'table, &'table str, V>;
type Item = (&'table &'table str, &'table V);
#[must_use] #[must_use]
fn into_iter(self) -> btree_map::Iter<'table, &'table str, V> { fn into_iter(self) -> btree_map::Iter<'table, &'table str, V> {

View File

@ -76,7 +76,7 @@ pub(crate) fn analysis_error(
kind, kind,
}; };
assert_eq!(have, want); assert_eq!(have, want);
} },
} }
} }

View File

@ -46,7 +46,7 @@ impl<'src> Thunk<'src> {
args: [a, b], args: [a, b],
name, name,
}) })
} },
_ => Err( _ => Err(
name.error(CompilationErrorKind::FunctionArgumentCountMismatch { name.error(CompilationErrorKind::FunctionArgumentCountMismatch {
function: name.lexeme(), function: name.lexeme(),

View File

@ -63,16 +63,15 @@ impl<'src> Token<'src> {
space_width, space_width,
color.suffix() color.suffix()
)?; )?;
} },
None => { None =>
if self.offset != self.src.len() { if self.offset != self.src.len() {
write!( write!(
f, f,
"internal error: Error has invalid line number: {}", "internal error: Error has invalid line number: {}",
line_number line_number
)? )?
} },
}
} }
Ok(()) Ok(())
} }

View File

@ -31,10 +31,7 @@ pub(crate) enum TokenKind {
impl Display for TokenKind { impl Display for TokenKind {
fn fmt(&self, f: &mut Formatter) -> Result<(), fmt::Error> { fn fmt(&self, f: &mut Formatter) -> Result<(), fmt::Error> {
use TokenKind::*; use TokenKind::*;
write!( write!(f, "{}", match *self {
f,
"{}",
match *self {
At => "'@'", At => "'@'",
Backtick => "backtick", Backtick => "backtick",
BracketL => "'['", BracketL => "'['",
@ -59,7 +56,6 @@ impl Display for TokenKind {
Text => "command text", Text => "command text",
Whitespace => "whitespace", Whitespace => "whitespace",
Unspecified => "unspecified", Unspecified => "unspecified",
} })
)
} }
} }

View File

@ -2,9 +2,10 @@ use crate::common::*;
use std::mem; use std::mem;
/// Construct a `Tree` from a symbolic expression literal. This macro, and the /// Construct a `Tree` from a symbolic expression literal. This macro,
/// Tree type, are only used in the Parser unit tests, as a concise notation /// and the Tree type, are only used in the Parser unit tests, as a
/// representing the expected results of parsing a given string. /// concise notation representing the expected results of parsing a
/// given string.
macro_rules! tree { macro_rules! tree {
{ {
($($child:tt)*) ($($child:tt)*)
@ -62,19 +63,20 @@ impl<'text> Tree<'text> {
Tree::atom(format!("\"{}\"", contents.as_ref())) Tree::atom(format!("\"{}\"", contents.as_ref()))
} }
/// Push a child node into self, turning it into a List if it was an Atom /// Push a child node into self, turning it into a List if it was an
/// Atom
pub(crate) fn push(self, tree: impl Into<Tree<'text>>) -> Tree<'text> { pub(crate) fn push(self, tree: impl Into<Tree<'text>>) -> Tree<'text> {
match self { match self {
Tree::List(mut children) => { Tree::List(mut children) => {
children.push(tree.into()); children.push(tree.into());
Tree::List(children) Tree::List(children)
} },
Tree::Atom(text) => Tree::List(vec![Tree::Atom(text), tree.into()]), Tree::Atom(text) => Tree::List(vec![Tree::Atom(text), tree.into()]),
} }
} }
/// Extend a self with a tail of Trees, turning self into a List if it /// Extend a self with a tail of Trees, turning self into a List if
/// was an Atom /// it was an Atom
pub(crate) fn extend<I, T>(self, tail: I) -> Tree<'text> pub(crate) fn extend<I, T>(self, tail: I) -> Tree<'text>
where where
I: IntoIterator<Item = T>, I: IntoIterator<Item = T>,
@ -114,7 +116,7 @@ impl Display for Tree<'_> {
} }
write!(f, ")") write!(f, ")")
} },
Tree::Atom(text) => write!(f, "{}", text), Tree::Atom(text) => write!(f, "{}", text),
} }
} }

View File

@ -24,11 +24,11 @@ impl<'expression, 'src> Iterator for Variables<'expression, 'src> {
self.stack.push(lhs); self.stack.push(lhs);
self.stack.push(rhs); self.stack.push(rhs);
self.next() self.next()
} },
Some(Expression::Group { contents }) => { Some(Expression::Group { contents }) => {
self.stack.push(contents); self.stack.push(contents);
self.next() self.next()
} },
} }
} }
} }

View File

@ -32,7 +32,7 @@ impl Display for Warning<'_> {
f, f,
"Please see this issue for more details: https://github.com/casey/just/issues/379" "Please see this issue for more details: https://github.com/casey/just/issues/379"
)?; )?;
} },
} }
write!(f, "{}", message.suffix())?; write!(f, "{}", message.suffix())?;

View File

@ -100,7 +100,7 @@ impl Entry {
for (name, entry) in entries { for (name, entry) in entries {
entry.instantiate(&path.join(name)); entry.instantiate(&path.join(name));
} }
} },
} }
} }
@ -137,7 +137,8 @@ macro_rules! entries {
$($name:tt : $contents:tt,)* $($name:tt : $contents:tt,)*
} => { } => {
{ {
let mut entries: std::collections::HashMap<&'static str, $crate::Entry> = std::collections::HashMap::new(); use std::collections::HashMap;
let mut entries: HashMap<&'static str, $crate::Entry> = HashMap::new();
$( $(
entries.insert($crate::name!($name), $crate::entry!($contents)); entries.insert($crate::name!($name), $crate::entry!($contents));

View File

@ -943,7 +943,11 @@ foo A B C='C':
", ",
args: ("foo", "bar"), args: ("foo", "bar"),
stdout: "", stdout: "",
stderr: "error: Recipe `foo` got 1 argument but takes at least 2\nusage:\n just foo A B C='C'\n", stderr: "
error: Recipe `foo` got 1 argument but takes at least 2
usage:
just foo A B C='C'
",
status: EXIT_FAILURE, status: EXIT_FAILURE,
} }

View File

@ -20,8 +20,8 @@ const DATA: &str = "OK";
const WANT: &str = "shebang: OK\nexpression: OK\ndefault: OK\nlinewise: OK\n"; const WANT: &str = "shebang: OK\nexpression: OK\ndefault: OK\nlinewise: OK\n";
/// Test that just runs with the correct working directory when invoked with /// Test that just runs with the correct working directory when
/// `--justfile` but not `--working-directory` /// invoked with `--justfile` but not `--working-directory`
#[test] #[test]
fn justfile_without_working_directory() -> Result<(), Box<dyn Error>> { fn justfile_without_working_directory() -> Result<(), Box<dyn Error>> {
let tmp = tmptree! { let tmp = tmptree! {
@ -46,9 +46,9 @@ fn justfile_without_working_directory() -> Result<(), Box<dyn Error>> {
Ok(()) Ok(())
} }
/// Test that just runs with the correct working directory when invoked with /// Test that just runs with the correct working directory when
/// `--justfile` but not `--working-directory`, and justfile path has no /// invoked with `--justfile` but not `--working-directory`, and
/// parent /// justfile path has no parent
#[test] #[test]
fn justfile_without_working_directory_relative() -> Result<(), Box<dyn Error>> { fn justfile_without_working_directory_relative() -> Result<(), Box<dyn Error>> {
let tmp = tmptree! { let tmp = tmptree! {
@ -74,7 +74,8 @@ fn justfile_without_working_directory_relative() -> Result<(), Box<dyn Error>> {
Ok(()) Ok(())
} }
/// Test that just invokes commands from the directory in which the justfile is found /// Test that just invokes commands from the directory in which the
/// justfile is found
#[test] #[test]
fn change_working_directory_to_search_justfile_parent() -> Result<(), Box<dyn Error>> { fn change_working_directory_to_search_justfile_parent() -> Result<(), Box<dyn Error>> {
let tmp = tmptree! { let tmp = tmptree! {
@ -99,8 +100,8 @@ fn change_working_directory_to_search_justfile_parent() -> Result<(), Box<dyn Er
Ok(()) Ok(())
} }
/// Test that just runs with the correct working directory when invoked with /// Test that just runs with the correct working directory when
/// `--justfile` but not `--working-directory` /// invoked with `--justfile` but not `--working-directory`
#[test] #[test]
fn justfile_and_working_directory() -> Result<(), Box<dyn Error>> { fn justfile_and_working_directory() -> Result<(), Box<dyn Error>> {
let tmp = tmptree! { let tmp = tmptree! {
@ -129,8 +130,8 @@ fn justfile_and_working_directory() -> Result<(), Box<dyn Error>> {
Ok(()) Ok(())
} }
/// Test that just runs with the correct working directory when invoked with /// Test that just runs with the correct working directory when
/// `--justfile` but not `--working-directory` /// invoked with `--justfile` but not `--working-directory`
#[test] #[test]
fn search_dir_child() -> Result<(), Box<dyn Error>> { fn search_dir_child() -> Result<(), Box<dyn Error>> {
let tmp = tmptree! { let tmp = tmptree! {
@ -157,8 +158,8 @@ fn search_dir_child() -> Result<(), Box<dyn Error>> {
Ok(()) Ok(())
} }
/// Test that just runs with the correct working directory when invoked with /// Test that just runs with the correct working directory when
/// `--justfile` but not `--working-directory` /// invoked with `--justfile` but not `--working-directory`
#[test] #[test]
fn search_dir_parent() -> Result<(), Box<dyn Error>> { fn search_dir_parent() -> Result<(), Box<dyn Error>> {
let tmp = tmptree! { let tmp = tmptree! {