diff --git a/src/analyzer.rs b/src/analyzer.rs index 99c3db2..d6f3aed 100644 --- a/src/analyzer.rs +++ b/src/analyzer.rs @@ -111,7 +111,12 @@ impl<'src> Analyzer<'src> { for recipe in recipes { define(recipe.name, "recipe", settings.allow_duplicate_recipes)?; - recipe_table.insert(recipe.clone()); + if recipe_table + .get(recipe.name.lexeme()) + .map_or(true, |original| recipe.depth <= original.depth) + { + recipe_table.insert(recipe.clone()); + } } let recipes = RecipeResolver::resolve_recipes(recipe_table, &self.assignments)?; diff --git a/src/compiler.rs b/src/compiler.rs index 82730bd..6680a84 100644 --- a/src/compiler.rs +++ b/src/compiler.rs @@ -13,14 +13,14 @@ impl Compiler { let mut srcs: HashMap = HashMap::new(); let mut loaded = Vec::new(); - let mut stack: Vec = Vec::new(); - stack.push(root.into()); + let mut stack: Vec<(PathBuf, u32)> = Vec::new(); + stack.push((root.into(), 0)); - while let Some(current) = stack.pop() { + while let Some((current, depth)) = stack.pop() { let (relative, src) = loader.load(root, ¤t)?; loaded.push(relative.into()); let tokens = Lexer::lex(relative, src)?; - let mut ast = Parser::parse(current != root, ¤t, &tokens)?; + let mut ast = Parser::parse(depth, ¤t, &tokens)?; paths.insert(current.clone(), relative.into()); srcs.insert(current.clone(), src); @@ -50,7 +50,7 @@ impl Compiler { return Err(Error::CircularImport { current, import }); } *absolute = Some(import.clone()); - stack.push(import); + stack.push((import, depth + 1)); } Item::Import { relative, absolute } => { let import = current.parent().unwrap().join(&relative.cooked).lexiclean(); @@ -58,7 +58,7 @@ impl Compiler { return Err(Error::CircularImport { current, import }); } *absolute = Some(import.clone()); - stack.push(import); + stack.push((import, depth + 1)); } _ => {} } @@ -120,7 +120,7 @@ impl Compiler { #[cfg(test)] pub(crate) fn test_compile(src: &str) -> CompileResult { let tokens = Lexer::test_lex(src)?; - let ast = Parser::parse(false, &PathBuf::new(), &tokens)?; + let ast = Parser::parse(0, &PathBuf::new(), &tokens)?; let root = PathBuf::from("justfile"); let mut asts: HashMap = HashMap::new(); asts.insert(root.clone(), ast); diff --git a/src/parser.rs b/src/parser.rs index cb254f1..b8ca0cc 100644 --- a/src/parser.rs +++ b/src/parser.rs @@ -34,14 +34,14 @@ pub(crate) struct Parser<'tokens, 'src> { depth: usize, /// Path to the file being parsed path: PathBuf, - /// Parsing a submodule - submodule: bool, + /// Depth of submodule being parsed + submodule: u32, } impl<'tokens, 'src> Parser<'tokens, 'src> { /// Parse `tokens` into an `Ast` pub(crate) fn parse( - submodule: bool, + submodule: u32, path: &Path, tokens: &'tokens [Token<'src>], ) -> CompileResult<'src, Ast<'src>> { @@ -724,7 +724,7 @@ impl<'tokens, 'src> Parser<'tokens, 'src> { priors, private: name.lexeme().starts_with('_'), quiet, - submodule: self.submodule, + depth: self.submodule, }) } @@ -942,7 +942,7 @@ mod tests { fn test(text: &str, want: Tree) { let unindented = unindent(text); let tokens = Lexer::test_lex(&unindented).expect("lexing failed"); - let justfile = Parser::parse(false, &PathBuf::new(), &tokens).expect("parsing failed"); + let justfile = Parser::parse(0, &PathBuf::new(), &tokens).expect("parsing failed"); let have = justfile.tree(); if have != want { println!("parsed text: {unindented}"); @@ -980,7 +980,7 @@ mod tests { ) { let tokens = Lexer::test_lex(src).expect("Lexing failed in parse test..."); - match Parser::parse(false, &PathBuf::new(), &tokens) { + match Parser::parse(0, &PathBuf::new(), &tokens) { Ok(_) => panic!("Parsing unexpectedly succeeded"), Err(have) => { let want = CompileError { diff --git a/src/recipe.rs b/src/recipe.rs index 598e6a1..6f03927 100644 --- a/src/recipe.rs +++ b/src/recipe.rs @@ -35,7 +35,7 @@ pub(crate) struct Recipe<'src, D = Dependency<'src>> { pub(crate) quiet: bool, pub(crate) shebang: bool, #[serde(skip)] - pub(crate) submodule: bool, + pub(crate) depth: u32, } impl<'src, D> Recipe<'src, D> { @@ -226,7 +226,7 @@ impl<'src, D> Recipe<'src, D> { let mut cmd = context.settings.shell_command(config); if self.change_directory() { - cmd.current_dir(if self.submodule { + cmd.current_dir(if self.depth > 0 { self.path.parent().unwrap() } else { &context.search.working_directory @@ -366,7 +366,7 @@ impl<'src, D> Recipe<'src, D> { let mut command = Platform::make_shebang_command( &path, if self.change_directory() { - if self.submodule { + if self.depth > 0 { Some(self.path.parent().unwrap()) } else { Some(&context.search.working_directory) diff --git a/src/testing.rs b/src/testing.rs index adb6243..e97883f 100644 --- a/src/testing.rs +++ b/src/testing.rs @@ -59,8 +59,7 @@ pub(crate) fn analysis_error( ) { let tokens = Lexer::test_lex(src).expect("Lexing failed in parse test..."); - let ast = - Parser::parse(false, &PathBuf::new(), &tokens).expect("Parsing failed in analysis test..."); + let ast = Parser::parse(0, &PathBuf::new(), &tokens).expect("Parsing failed in analysis test..."); let root = PathBuf::from("justfile"); let mut asts: HashMap = HashMap::new(); diff --git a/src/unresolved_recipe.rs b/src/unresolved_recipe.rs index b7f379f..c545394 100644 --- a/src/unresolved_recipe.rs +++ b/src/unresolved_recipe.rs @@ -48,6 +48,7 @@ impl<'src> UnresolvedRecipe<'src> { attributes: self.attributes, body: self.body, dependencies, + depth: self.depth, doc: self.doc, name: self.name, parameters: self.parameters, @@ -56,7 +57,6 @@ impl<'src> UnresolvedRecipe<'src> { private: self.private, quiet: self.quiet, shebang: self.shebang, - submodule: self.submodule, }) } } diff --git a/tests/imports.rs b/tests/imports.rs index 2c2c913..b1cb395 100644 --- a/tests/imports.rs +++ b/tests/imports.rs @@ -127,3 +127,28 @@ fn include_error() { ) .run(); } + +#[test] +fn recipes_in_import_are_overridden_by_recipes_in_parent() { + Test::new() + .tree(tree! { + "import.justfile": " + a: + @echo IMPORT + ", + }) + .justfile( + " + import './import.justfile' + + set allow-duplicate-recipes + + a: + @echo ROOT + ", + ) + .test_round_trip(false) + .arg("a") + .stdout("ROOT\n") + .run(); +}