mirror of
https://github.com/casey/just.git
synced 2025-08-04 15:08:39 +00:00
Guarantee that Namepath
s are non-empty (#2638)
This commit is contained in:
parent
d0372d1032
commit
c8567ddc1c
5 changed files with 25 additions and 13 deletions
|
@ -22,7 +22,7 @@ impl Compiler {
|
|||
let mut ast = Parser::parse(
|
||||
current.file_depth,
|
||||
¤t.import_offsets,
|
||||
¤t.namepath,
|
||||
current.namepath.as_ref(),
|
||||
&tokens,
|
||||
¤t.working_directory,
|
||||
)?;
|
||||
|
@ -213,7 +213,7 @@ impl Compiler {
|
|||
#[cfg(test)]
|
||||
pub(crate) fn test_compile(src: &str) -> CompileResult<Justfile> {
|
||||
let tokens = Lexer::test_lex(src)?;
|
||||
let ast = Parser::parse(0, &[], &Namepath::default(), &tokens, &PathBuf::new())?;
|
||||
let ast = Parser::parse(0, &[], None, &tokens, &PathBuf::new())?;
|
||||
let root = PathBuf::from("justfile");
|
||||
let mut asts: HashMap<PathBuf, Ast> = HashMap::new();
|
||||
asts.insert(root.clone(), ast);
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use super::*;
|
||||
|
||||
#[derive(Default, Clone, Debug, Eq, PartialEq, Ord, PartialOrd)]
|
||||
#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd)]
|
||||
pub(crate) struct Namepath<'src>(Vec<Name<'src>>);
|
||||
|
||||
impl<'src> Namepath<'src> {
|
||||
|
@ -28,6 +28,12 @@ impl Display for Namepath<'_> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'src> From<Name<'src>> for Namepath<'src> {
|
||||
fn from(name: Name<'src>) -> Self {
|
||||
Self(vec![name])
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for Namepath<'_> {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
|
|
|
@ -27,7 +27,7 @@ pub(crate) struct Parser<'run, 'src> {
|
|||
expected_tokens: BTreeSet<TokenKind>,
|
||||
file_depth: u32,
|
||||
import_offsets: Vec<usize>,
|
||||
module_namepath: &'run Namepath<'src>,
|
||||
module_namepath: Option<&'run Namepath<'src>>,
|
||||
next_token: usize,
|
||||
recursion_depth: usize,
|
||||
tokens: &'run [Token<'src>],
|
||||
|
@ -40,7 +40,7 @@ impl<'run, 'src> Parser<'run, 'src> {
|
|||
pub(crate) fn parse(
|
||||
file_depth: u32,
|
||||
import_offsets: &[usize],
|
||||
module_namepath: &'run Namepath<'src>,
|
||||
module_namepath: Option<&'run Namepath<'src>>,
|
||||
tokens: &'run [Token<'src>],
|
||||
working_directory: &'run Path,
|
||||
) -> CompileResult<'src, Ast<'src>> {
|
||||
|
@ -996,7 +996,9 @@ impl<'run, 'src> Parser<'run, 'src> {
|
|||
file_depth: self.file_depth,
|
||||
import_offsets: self.import_offsets.clone(),
|
||||
name,
|
||||
namepath: self.module_namepath.join(name),
|
||||
namepath: self
|
||||
.module_namepath
|
||||
.map_or_else(|| name.into(), |module_namepath| module_namepath.join(name)),
|
||||
parameters: positional.into_iter().chain(variadic).collect(),
|
||||
priors,
|
||||
private,
|
||||
|
@ -1255,8 +1257,7 @@ mod tests {
|
|||
fn test(text: &str, want: Tree) {
|
||||
let unindented = unindent(text);
|
||||
let tokens = Lexer::test_lex(&unindented).expect("lexing failed");
|
||||
let justfile = Parser::parse(0, &[], &Namepath::default(), &tokens, &PathBuf::new())
|
||||
.expect("parsing failed");
|
||||
let justfile = Parser::parse(0, &[], None, &tokens, &PathBuf::new()).expect("parsing failed");
|
||||
let have = justfile.tree();
|
||||
if have != want {
|
||||
println!("parsed text: {unindented}");
|
||||
|
@ -1294,7 +1295,7 @@ mod tests {
|
|||
) {
|
||||
let tokens = Lexer::test_lex(src).expect("Lexing failed in parse test...");
|
||||
|
||||
match Parser::parse(0, &[], &Namepath::default(), &tokens, &PathBuf::new()) {
|
||||
match Parser::parse(0, &[], None, &tokens, &PathBuf::new()) {
|
||||
Ok(_) => panic!("Parsing unexpectedly succeeded"),
|
||||
Err(have) => {
|
||||
let want = CompileError {
|
||||
|
|
|
@ -5,7 +5,7 @@ pub(crate) struct Source<'src> {
|
|||
pub(crate) file_depth: u32,
|
||||
pub(crate) file_path: Vec<PathBuf>,
|
||||
pub(crate) import_offsets: Vec<usize>,
|
||||
pub(crate) namepath: Namepath<'src>,
|
||||
pub(crate) namepath: Option<Namepath<'src>>,
|
||||
pub(crate) path: PathBuf,
|
||||
pub(crate) working_directory: PathBuf,
|
||||
}
|
||||
|
@ -16,7 +16,7 @@ impl<'src> Source<'src> {
|
|||
file_depth: 0,
|
||||
file_path: vec![path.into()],
|
||||
import_offsets: Vec::new(),
|
||||
namepath: Namepath::default(),
|
||||
namepath: None,
|
||||
path: path.into(),
|
||||
working_directory: path.parent().unwrap().into(),
|
||||
}
|
||||
|
@ -53,7 +53,12 @@ impl<'src> Source<'src> {
|
|||
.chain(iter::once(path.clone()))
|
||||
.collect(),
|
||||
import_offsets: Vec::new(),
|
||||
namepath: self.namepath.join(name),
|
||||
namepath: Some(
|
||||
self
|
||||
.namepath
|
||||
.as_ref()
|
||||
.map_or_else(|| name.into(), |namepath| namepath.join(name)),
|
||||
),
|
||||
path: path.clone(),
|
||||
working_directory: path.parent().unwrap().into(),
|
||||
}
|
||||
|
|
|
@ -59,7 +59,7 @@ pub(crate) fn analysis_error(
|
|||
) {
|
||||
let tokens = Lexer::test_lex(src).expect("Lexing failed in parse test...");
|
||||
|
||||
let ast = Parser::parse(0, &[], &Namepath::default(), &tokens, &PathBuf::new())
|
||||
let ast = Parser::parse(0, &[], None, &tokens, &PathBuf::new())
|
||||
.expect("Parsing failed in analysis test...");
|
||||
|
||||
let root = PathBuf::from("justfile");
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue