Override imported recipes (#1790)

This commit is contained in:
Casey Rodarmor 2023-12-28 17:34:37 -08:00 committed by GitHub
parent 85b5a92e69
commit 8ea278c58b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 49 additions and 20 deletions

View File

@ -111,8 +111,13 @@ impl<'src> Analyzer<'src> {
for recipe in recipes { for recipe in recipes {
define(recipe.name, "recipe", settings.allow_duplicate_recipes)?; define(recipe.name, "recipe", settings.allow_duplicate_recipes)?;
if recipe_table
.get(recipe.name.lexeme())
.map_or(true, |original| recipe.depth <= original.depth)
{
recipe_table.insert(recipe.clone()); recipe_table.insert(recipe.clone());
} }
}
let recipes = RecipeResolver::resolve_recipes(recipe_table, &self.assignments)?; let recipes = RecipeResolver::resolve_recipes(recipe_table, &self.assignments)?;

View File

@ -13,14 +13,14 @@ impl Compiler {
let mut srcs: HashMap<PathBuf, &str> = HashMap::new(); let mut srcs: HashMap<PathBuf, &str> = HashMap::new();
let mut loaded = Vec::new(); let mut loaded = Vec::new();
let mut stack: Vec<PathBuf> = Vec::new(); let mut stack: Vec<(PathBuf, u32)> = Vec::new();
stack.push(root.into()); stack.push((root.into(), 0));
while let Some(current) = stack.pop() { while let Some((current, depth)) = stack.pop() {
let (relative, src) = loader.load(root, &current)?; let (relative, src) = loader.load(root, &current)?;
loaded.push(relative.into()); loaded.push(relative.into());
let tokens = Lexer::lex(relative, src)?; let tokens = Lexer::lex(relative, src)?;
let mut ast = Parser::parse(current != root, &current, &tokens)?; let mut ast = Parser::parse(depth, &current, &tokens)?;
paths.insert(current.clone(), relative.into()); paths.insert(current.clone(), relative.into());
srcs.insert(current.clone(), src); srcs.insert(current.clone(), src);
@ -50,7 +50,7 @@ impl Compiler {
return Err(Error::CircularImport { current, import }); return Err(Error::CircularImport { current, import });
} }
*absolute = Some(import.clone()); *absolute = Some(import.clone());
stack.push(import); stack.push((import, depth + 1));
} }
Item::Import { relative, absolute } => { Item::Import { relative, absolute } => {
let import = current.parent().unwrap().join(&relative.cooked).lexiclean(); let import = current.parent().unwrap().join(&relative.cooked).lexiclean();
@ -58,7 +58,7 @@ impl Compiler {
return Err(Error::CircularImport { current, import }); return Err(Error::CircularImport { current, import });
} }
*absolute = Some(import.clone()); *absolute = Some(import.clone());
stack.push(import); stack.push((import, depth + 1));
} }
_ => {} _ => {}
} }
@ -120,7 +120,7 @@ impl Compiler {
#[cfg(test)] #[cfg(test)]
pub(crate) fn test_compile(src: &str) -> CompileResult<Justfile> { pub(crate) fn test_compile(src: &str) -> CompileResult<Justfile> {
let tokens = Lexer::test_lex(src)?; let tokens = Lexer::test_lex(src)?;
let ast = Parser::parse(false, &PathBuf::new(), &tokens)?; let ast = Parser::parse(0, &PathBuf::new(), &tokens)?;
let root = PathBuf::from("justfile"); let root = PathBuf::from("justfile");
let mut asts: HashMap<PathBuf, Ast> = HashMap::new(); let mut asts: HashMap<PathBuf, Ast> = HashMap::new();
asts.insert(root.clone(), ast); asts.insert(root.clone(), ast);

View File

@ -34,14 +34,14 @@ pub(crate) struct Parser<'tokens, 'src> {
depth: usize, depth: usize,
/// Path to the file being parsed /// Path to the file being parsed
path: PathBuf, path: PathBuf,
/// Parsing a submodule /// Depth of submodule being parsed
submodule: bool, submodule: u32,
} }
impl<'tokens, 'src> Parser<'tokens, 'src> { impl<'tokens, 'src> Parser<'tokens, 'src> {
/// Parse `tokens` into an `Ast` /// Parse `tokens` into an `Ast`
pub(crate) fn parse( pub(crate) fn parse(
submodule: bool, submodule: u32,
path: &Path, path: &Path,
tokens: &'tokens [Token<'src>], tokens: &'tokens [Token<'src>],
) -> CompileResult<'src, Ast<'src>> { ) -> CompileResult<'src, Ast<'src>> {
@ -724,7 +724,7 @@ impl<'tokens, 'src> Parser<'tokens, 'src> {
priors, priors,
private: name.lexeme().starts_with('_'), private: name.lexeme().starts_with('_'),
quiet, quiet,
submodule: self.submodule, depth: self.submodule,
}) })
} }
@ -942,7 +942,7 @@ mod tests {
fn test(text: &str, want: Tree) { fn test(text: &str, want: Tree) {
let unindented = unindent(text); let unindented = unindent(text);
let tokens = Lexer::test_lex(&unindented).expect("lexing failed"); let tokens = Lexer::test_lex(&unindented).expect("lexing failed");
let justfile = Parser::parse(false, &PathBuf::new(), &tokens).expect("parsing failed"); let justfile = Parser::parse(0, &PathBuf::new(), &tokens).expect("parsing failed");
let have = justfile.tree(); let have = justfile.tree();
if have != want { if have != want {
println!("parsed text: {unindented}"); println!("parsed text: {unindented}");
@ -980,7 +980,7 @@ mod tests {
) { ) {
let tokens = Lexer::test_lex(src).expect("Lexing failed in parse test..."); let tokens = Lexer::test_lex(src).expect("Lexing failed in parse test...");
match Parser::parse(false, &PathBuf::new(), &tokens) { match Parser::parse(0, &PathBuf::new(), &tokens) {
Ok(_) => panic!("Parsing unexpectedly succeeded"), Ok(_) => panic!("Parsing unexpectedly succeeded"),
Err(have) => { Err(have) => {
let want = CompileError { let want = CompileError {

View File

@ -35,7 +35,7 @@ pub(crate) struct Recipe<'src, D = Dependency<'src>> {
pub(crate) quiet: bool, pub(crate) quiet: bool,
pub(crate) shebang: bool, pub(crate) shebang: bool,
#[serde(skip)] #[serde(skip)]
pub(crate) submodule: bool, pub(crate) depth: u32,
} }
impl<'src, D> Recipe<'src, D> { impl<'src, D> Recipe<'src, D> {
@ -226,7 +226,7 @@ impl<'src, D> Recipe<'src, D> {
let mut cmd = context.settings.shell_command(config); let mut cmd = context.settings.shell_command(config);
if self.change_directory() { if self.change_directory() {
cmd.current_dir(if self.submodule { cmd.current_dir(if self.depth > 0 {
self.path.parent().unwrap() self.path.parent().unwrap()
} else { } else {
&context.search.working_directory &context.search.working_directory
@ -366,7 +366,7 @@ impl<'src, D> Recipe<'src, D> {
let mut command = Platform::make_shebang_command( let mut command = Platform::make_shebang_command(
&path, &path,
if self.change_directory() { if self.change_directory() {
if self.submodule { if self.depth > 0 {
Some(self.path.parent().unwrap()) Some(self.path.parent().unwrap())
} else { } else {
Some(&context.search.working_directory) Some(&context.search.working_directory)

View File

@ -59,8 +59,7 @@ pub(crate) fn analysis_error(
) { ) {
let tokens = Lexer::test_lex(src).expect("Lexing failed in parse test..."); let tokens = Lexer::test_lex(src).expect("Lexing failed in parse test...");
let ast = let ast = Parser::parse(0, &PathBuf::new(), &tokens).expect("Parsing failed in analysis test...");
Parser::parse(false, &PathBuf::new(), &tokens).expect("Parsing failed in analysis test...");
let root = PathBuf::from("justfile"); let root = PathBuf::from("justfile");
let mut asts: HashMap<PathBuf, Ast> = HashMap::new(); let mut asts: HashMap<PathBuf, Ast> = HashMap::new();

View File

@ -48,6 +48,7 @@ impl<'src> UnresolvedRecipe<'src> {
attributes: self.attributes, attributes: self.attributes,
body: self.body, body: self.body,
dependencies, dependencies,
depth: self.depth,
doc: self.doc, doc: self.doc,
name: self.name, name: self.name,
parameters: self.parameters, parameters: self.parameters,
@ -56,7 +57,6 @@ impl<'src> UnresolvedRecipe<'src> {
private: self.private, private: self.private,
quiet: self.quiet, quiet: self.quiet,
shebang: self.shebang, shebang: self.shebang,
submodule: self.submodule,
}) })
} }
} }

View File

@ -127,3 +127,28 @@ fn include_error() {
) )
.run(); .run();
} }
#[test]
fn recipes_in_import_are_overridden_by_recipes_in_parent() {
Test::new()
.tree(tree! {
"import.justfile": "
a:
@echo IMPORT
",
})
.justfile(
"
import './import.justfile'
set allow-duplicate-recipes
a:
@echo ROOT
",
)
.test_round_trip(false)
.arg("a")
.stdout("ROOT\n")
.run();
}