Allow duplicate imports (#2437)

This commit is contained in:
Casey Rodarmor 2024-10-30 15:23:00 -07:00 committed by GitHub
parent 28c4e9a13c
commit 4683a63adc
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
8 changed files with 85 additions and 44 deletions

View file

@ -3319,7 +3319,33 @@ Imports may be made optional by putting a `?` after the `import` keyword:
import? 'foo/bar.just'
```
Missing source files for optional imports do not produce an error.
Importing the same source file multiple times is not an error<sup>master</sup>.
This allows importing multiple justfiles, for example `foo.just` and
`bar.just`, which both import a third justfile containing shared recipes, for
example `baz.just`, without the duplicate import of `baz.just` being an error:
```mf
# justfile
import 'foo.just'
import 'bar.just'
```
```mf
# foo.just
import 'baz.just'
foo: baz
```
```mf
# bar.just
import 'baz.just'
bar: baz
```
```just
# baz
baz:
```
### Modules<sup>1.19.0</sup>

View file

@ -35,6 +35,7 @@ impl<'run, 'src> Analyzer<'run, 'src> {
root: &Path,
) -> CompileResult<'src, Justfile<'src>> {
let mut definitions = HashMap::new();
let mut imports = HashSet::new();
let mut stack = Vec::new();
let ast = asts.get(root).unwrap();
@ -54,7 +55,9 @@ impl<'run, 'src> Analyzer<'run, 'src> {
Item::Comment(_) => (),
Item::Import { absolute, .. } => {
if let Some(absolute) = absolute {
stack.push(asts.get(absolute).unwrap());
if imports.insert(absolute) {
stack.push(asts.get(absolute).unwrap());
}
}
}
Item::Module {

View file

@ -21,7 +21,6 @@ impl Compiler {
let tokens = Lexer::lex(relative, src)?;
let mut ast = Parser::parse(
current.file_depth,
&current.path,
&current.import_offsets,
&current.namepath,
&tokens,
@ -214,14 +213,7 @@ impl Compiler {
#[cfg(test)]
pub(crate) fn test_compile(src: &str) -> CompileResult<Justfile> {
let tokens = Lexer::test_lex(src)?;
let ast = Parser::parse(
0,
&PathBuf::new(),
&[],
&Namepath::default(),
&tokens,
&PathBuf::new(),
)?;
let ast = Parser::parse(0, &[], &Namepath::default(), &tokens, &PathBuf::new())?;
let root = PathBuf::from("justfile");
let mut asts: HashMap<PathBuf, Ast> = HashMap::new();
asts.insert(root.clone(), ast);

View file

@ -26,7 +26,6 @@ use {super::*, TokenKind::*};
pub(crate) struct Parser<'run, 'src> {
expected_tokens: BTreeSet<TokenKind>,
file_depth: u32,
file_path: &'run Path,
import_offsets: Vec<usize>,
module_namepath: &'run Namepath<'src>,
next_token: usize,
@ -39,7 +38,6 @@ impl<'run, 'src> Parser<'run, 'src> {
/// Parse `tokens` into an `Ast`
pub(crate) fn parse(
file_depth: u32,
file_path: &'run Path,
import_offsets: &[usize],
module_namepath: &'run Namepath<'src>,
tokens: &'run [Token<'src>],
@ -48,7 +46,6 @@ impl<'run, 'src> Parser<'run, 'src> {
Self {
expected_tokens: BTreeSet::new(),
file_depth,
file_path,
import_offsets: import_offsets.to_vec(),
module_namepath,
next_token: 0,
@ -910,7 +907,6 @@ impl<'run, 'src> Parser<'run, 'src> {
dependencies,
doc,
file_depth: self.file_depth,
file_path: self.file_path.into(),
import_offsets: self.import_offsets.clone(),
name,
namepath: self.module_namepath.join(name),
@ -1162,15 +1158,8 @@ mod tests {
fn test(text: &str, want: Tree) {
let unindented = unindent(text);
let tokens = Lexer::test_lex(&unindented).expect("lexing failed");
let justfile = Parser::parse(
0,
&PathBuf::new(),
&[],
&Namepath::default(),
&tokens,
&PathBuf::new(),
)
.expect("parsing failed");
let justfile = Parser::parse(0, &[], &Namepath::default(), &tokens, &PathBuf::new())
.expect("parsing failed");
let have = justfile.tree();
if have != want {
println!("parsed text: {unindented}");
@ -1208,14 +1197,7 @@ mod tests {
) {
let tokens = Lexer::test_lex(src).expect("Lexing failed in parse test...");
match Parser::parse(
0,
&PathBuf::new(),
&[],
&Namepath::default(),
&tokens,
&PathBuf::new(),
) {
match Parser::parse(0, &[], &Namepath::default(), &tokens, &PathBuf::new()) {
Ok(_) => panic!("Parsing unexpectedly succeeded"),
Err(have) => {
let want = CompileError {

View file

@ -26,8 +26,6 @@ pub(crate) struct Recipe<'src, D = Dependency<'src>> {
#[serde(skip)]
pub(crate) file_depth: u32,
#[serde(skip)]
pub(crate) file_path: PathBuf,
#[serde(skip)]
pub(crate) import_offsets: Vec<usize>,
pub(crate) name: Name<'src>,
pub(crate) namepath: Namepath<'src>,

View file

@ -59,15 +59,8 @@ pub(crate) fn analysis_error(
) {
let tokens = Lexer::test_lex(src).expect("Lexing failed in parse test...");
let ast = Parser::parse(
0,
&PathBuf::new(),
&[],
&Namepath::default(),
&tokens,
&PathBuf::new(),
)
.expect("Parsing failed in analysis test...");
let ast = Parser::parse(0, &[], &Namepath::default(), &tokens, &PathBuf::new())
.expect("Parsing failed in analysis test...");
let root = PathBuf::from("justfile");
let mut asts: HashMap<PathBuf, Ast> = HashMap::new();

View file

@ -50,7 +50,6 @@ impl<'src> UnresolvedRecipe<'src> {
dependencies,
doc: self.doc,
file_depth: self.file_depth,
file_path: self.file_path,
import_offsets: self.import_offsets,
name: self.name,
namepath: self.namepath,

View file

@ -360,3 +360,51 @@ fn reused_import_are_allowed() {
})
.run();
}
#[test]
fn multiply_imported_items_do_not_conflict() {
Test::new()
.justfile(
"
import 'a.just'
import 'a.just'
foo: bar
",
)
.write(
"a.just",
"
x := 'y'
@bar:
echo hello
",
)
.stdout("hello\n")
.run();
}
#[test]
fn nested_multiply_imported_items_do_not_conflict() {
Test::new()
.justfile(
"
import 'a.just'
import 'b.just'
foo: bar
",
)
.write("a.just", "import 'c.just'")
.write("b.just", "import 'c.just'")
.write(
"c.just",
"
x := 'y'
@bar:
echo hello
",
)
.stdout("hello\n")
.run();
}