add pedantic clippy setting and fix/allow warnings (#147)
Some checks are pending
lint / pre-commit (push) Waiting to run
lint / rustfmt (push) Waiting to run
lint / clippy (push) Waiting to run
lint / cargo-check (push) Waiting to run
release / build (push) Waiting to run
release / test (push) Waiting to run
release / release (push) Blocked by required conditions
test / generate-matrix (push) Waiting to run
test / Python , Django () (push) Blocked by required conditions
test / tests (push) Blocked by required conditions
zizmor 🌈 / zizmor latest via PyPI (push) Waiting to run

This commit is contained in:
Josh Thomas 2025-05-14 18:21:43 -05:00 committed by GitHub
parent e87c917cb6
commit d677aacf7c
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
24 changed files with 180 additions and 113 deletions

View file

@ -26,12 +26,6 @@ jobs:
with: with:
persist-credentials: false persist-credentials: false
- name: Install nightly toolchain for rustfmt
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b
with:
toolchain: nightly
components: rustfmt
- name: Install uv - name: Install uv
uses: astral-sh/setup-uv@6b9c6063abd6010835644d4c2e1bef4cf5cd0fca # v6.0.1 uses: astral-sh/setup-uv@6b9c6063abd6010835644d4c2e1bef4cf5cd0fca # v6.0.1
with: with:
@ -49,3 +43,44 @@ jobs:
--all-files \ --all-files \
--show-diff-on-failure \ --show-diff-on-failure \
--color always --color always
rustfmt:
runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: actions-rust-lang/setup-rust-toolchain@9d7e65c320fdb52dcd45ffaa68deb6c02c8754d9
with:
toolchain: nightly
components: rustfmt
- name: Run rustfmt
run: cargo +nightly fmt --all -- --check
clippy:
runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: actions-rust-lang/setup-rust-toolchain@9d7e65c320fdb52dcd45ffaa68deb6c02c8754d9
with:
components: clippy
- name: Run clippy
run: cargo clippy --all-targets --all-features -- -D warnings
cargo-check:
runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: actions-rust-lang/setup-rust-toolchain@9d7e65c320fdb52dcd45ffaa68deb6c02c8754d9
- name: Run cargo check
run: cargo check --all-targets --all-features

View file

@ -1,6 +1,3 @@
default_language_version:
rust: "1.86"
repos: repos:
- repo: https://github.com/pre-commit/pre-commit-hooks - repo: https://github.com/pre-commit/pre-commit-hooks
rev: v5.0.0 rev: v5.0.0
@ -21,30 +18,3 @@ repos:
rev: v1.6.0 rev: v1.6.0
hooks: hooks:
- id: zizmor - id: zizmor
- repo: local
hooks:
- id: fmt
name: cargo fmt
description: format files with "cargo fmt"
types:
- rust
language: rust
entry: cargo +nightly fmt
args:
- --
- id: check
name: cargo check
description: check the package for errors with "cargo check"
types:
- rust
language: rust
entry: cargo check
pass_filenames: false
- id: clippy
name: cargo clippy
description: check the package with "cargo clippy"
types:
- rust
language: rust
entry: cargo clippy
pass_filenames: false

View file

@ -23,6 +23,10 @@ thiserror = "2.0"
tokio = { version = "1.42", features = ["full"] } tokio = { version = "1.42", features = ["full"] }
tower-lsp-server = { version = "0.21", features = ["proposed"] } tower-lsp-server = { version = "0.21", features = ["proposed"] }
[workspace.lints.clippy]
pedantic = { level = "warn", priority = -1 }
missing_errors_doc = "allow"
[profile.dev.package] [profile.dev.package]
insta.opt-level = 3 insta.opt-level = 3
similar.opt-level = 3 similar.opt-level = 3

View file

@ -14,3 +14,6 @@ toml = "0.8"
[dev-dependencies] [dev-dependencies]
tempfile = "3.19" tempfile = "3.19"
[lints]
workspace = true

View file

@ -79,10 +79,12 @@ impl Settings {
Ok(settings) Ok(settings)
} }
#[must_use]
pub fn debug(&self) -> bool { pub fn debug(&self) -> bool {
self.debug self.debug
} }
#[must_use]
pub fn venv_path(&self) -> Option<&str> { pub fn venv_path(&self) -> Option<&str> {
self.venv_path.as_deref() self.venv_path.as_deref()
} }

View file

@ -6,3 +6,6 @@ publish = false
[dependencies] [dependencies]
pyo3-build-config = { workspace = true, features = ["resolve-config"] } pyo3-build-config = { workspace = true, features = ["resolve-config"] }
[lints]
workspace = true

View file

@ -31,7 +31,7 @@ pub fn setup_python_linking() {
// Only link libpython explicitly if we are NOT building an extension module. // Only link libpython explicitly if we are NOT building an extension module.
if !is_extension_module { if !is_extension_module {
if let Some(lib_name) = &config.lib_name { if let Some(lib_name) = &config.lib_name {
println!("cargo:rustc-link-lib=dylib={}", lib_name); println!("cargo:rustc-link-lib=dylib={lib_name}");
} else { } else {
// Warn only if linking is actually needed but we can't find the lib name // Warn only if linking is actually needed but we can't find the lib name
println!("cargo:warning=Python library name not found in config (needed for non-extension module println!("cargo:warning=Python library name not found in config (needed for non-extension module
@ -43,9 +43,9 @@ builds).");
// These are needed for test executables and potential future standalone binaries, // These are needed for test executables and potential future standalone binaries,
// and generally harmless for extension modules. // and generally harmless for extension modules.
if let Some(lib_dir) = &config.lib_dir { if let Some(lib_dir) = &config.lib_dir {
println!("cargo:rustc-link-search=native={}", lib_dir); println!("cargo:rustc-link-search=native={lib_dir}");
#[cfg(not(windows))] #[cfg(not(windows))]
println!("cargo:rustc-link-arg=-Wl,-rpath,{}", lib_dir); println!("cargo:rustc-link-arg=-Wl,-rpath,{lib_dir}");
} else { } else {
// Warn only if linking is actually needed but we can't find the lib dir // Warn only if linking is actually needed but we can't find the lib dir
if !is_extension_module { if !is_extension_module {

View file

@ -19,3 +19,6 @@ djls-dev = { workspace = true }
[dev-dependencies] [dev-dependencies]
tempfile = { workspace = true } tempfile = { workspace = true }
[lints]
workspace = true

View file

@ -23,6 +23,7 @@ pub struct DjangoProject {
} }
impl DjangoProject { impl DjangoProject {
#[must_use]
pub fn new(path: PathBuf) -> Self { pub fn new(path: PathBuf) -> Self {
Self { Self {
path, path,
@ -64,17 +65,19 @@ impl DjangoProject {
Ok(()) Ok(())
} }
Err(e) => { Err(e) => {
eprintln!("Failed to import Django: {}", e); eprintln!("Failed to import Django: {e}");
Err(e) Err(e)
} }
} }
}) })
} }
#[must_use]
pub fn template_tags(&self) -> Option<&TemplateTags> { pub fn template_tags(&self) -> Option<&TemplateTags> {
self.template_tags.as_ref() self.template_tags.as_ref()
} }
#[must_use]
pub fn path(&self) -> &Path { pub fn path(&self) -> &Path {
&self.path &self.path
} }
@ -84,7 +87,7 @@ impl fmt::Display for DjangoProject {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
writeln!(f, "Project path: {}", self.path.display())?; writeln!(f, "Project path: {}", self.path.display())?;
if let Some(py_env) = &self.env { if let Some(py_env) = &self.env {
write!(f, "{}", py_env)?; write!(f, "{py_env}")?;
} }
Ok(()) Ok(())
} }
@ -141,7 +144,7 @@ mod tests {
let project = DjangoProject::new(project_path.clone()); let project = DjangoProject::new(project_path.clone());
let display_str = format!("{}", project); let display_str = format!("{project}");
assert!(display_str.contains(&format!("Project path: {}", project_path.display()))); assert!(display_str.contains(&format!("Project path: {}", project_path.display())));
} }
} }

View file

@ -96,9 +96,8 @@ impl PythonEnvironment {
} }
fn from_system_python() -> Option<Self> { fn from_system_python() -> Option<Self> {
let python_path = match system::find_executable("python") { let Ok(python_path) = system::find_executable("python") else {
Ok(p) => p, return None;
Err(_) => return None,
}; };
let bin_dir = python_path.parent()?; let bin_dir = python_path.parent()?;
let prefix = bin_dir.parent()?; let prefix = bin_dir.parent()?;

View file

@ -35,7 +35,7 @@ impl TemplateTags {
let library_name = if module_name.is_empty() { let library_name = if module_name.is_empty() {
"builtins".to_string() "builtins".to_string()
} else { } else {
module_name.split('.').last().unwrap_or("").to_string() module_name.split('.').next_back().unwrap_or("").to_string()
}; };
tags.push(TemplateTag::new(tag_name, library_name, doc)); tags.push(TemplateTag::new(tag_name, library_name, doc));
@ -90,7 +90,7 @@ impl TemplateTag {
&self.library &self.library
} }
pub fn doc(&self) -> &Option<String> { pub fn doc(&self) -> Option<&String> {
&self.doc self.doc.as_ref()
} }
} }

View file

@ -104,7 +104,7 @@ impl Store {
#[allow(dead_code)] #[allow(dead_code)]
pub fn is_version_valid(&self, uri: &str, version: i32) -> bool { pub fn is_version_valid(&self, uri: &str, version: i32) -> bool {
self.get_version(uri).map_or(false, |v| v == version) self.get_version(uri) == Some(version)
} }
pub fn get_completions( pub fn get_completions(
@ -240,9 +240,10 @@ impl TextDocument {
} }
pub fn get_template_tag_context(&self, position: Position) -> Option<TemplateTagContext> { pub fn get_template_tag_context(&self, position: Position) -> Option<TemplateTagContext> {
let line = self.get_line(position.line.try_into().ok()?)?; let line = self.get_line(position.line)?;
let prefix = &line[..position.character.try_into().ok()?]; let char_pos: usize = position.character.try_into().ok()?;
let rest_of_line = &line[position.character.try_into().ok()?..]; let prefix = &line[..char_pos];
let rest_of_line = &line[char_pos..];
let rest_trimmed = rest_of_line.trim_start(); let rest_trimmed = rest_of_line.trim_start();
prefix.rfind("{%").map(|tag_start| { prefix.rfind("{%").map(|tag_start| {

View file

@ -13,3 +13,6 @@ toml = "0.8"
[dev-dependencies] [dev-dependencies]
insta = { version = "1.42", features = ["yaml"] } insta = { version = "1.42", features = ["yaml"] }
tempfile = "3.19" tempfile = "3.19"
[lints]
workspace = true

View file

@ -45,7 +45,7 @@ impl LineOffsets {
} }
pub fn position_to_line_col(&self, position: usize) -> (usize, usize) { pub fn position_to_line_col(&self, position: usize) -> (usize, usize) {
let position = position as u32; let position = u32::try_from(position).unwrap_or_default();
let line = match self.0.binary_search(&position) { let line = match self.0.binary_search(&position) {
Ok(exact_line) => exact_line, // Position is at start of this line Ok(exact_line) => exact_line, // Position is at start of this line
Err(0) => 0, // Before first line start Err(0) => 0, // Before first line start
@ -108,19 +108,21 @@ impl Span {
Self { start, length } Self { start, length }
} }
pub fn start(&self) -> &u32 { #[allow(clippy::trivially_copy_pass_by_ref)]
&self.start pub fn start(&self) -> u32 {
self.start
} }
pub fn length(&self) -> &u32 { #[allow(clippy::trivially_copy_pass_by_ref)]
&self.length pub fn length(&self) -> u32 {
self.length
} }
} }
impl From<Token> for Span { impl From<Token> for Span {
fn from(token: Token) -> Self { fn from(token: Token) -> Self {
let start = token.start().unwrap_or(0); let start = token.start().unwrap_or(0);
let length = token.content().len() as u32; let length = u32::try_from(token.content().len()).unwrap_or(0);
Span::new(start, length) Span::new(start, length)
} }
} }
@ -201,14 +203,14 @@ mod tests {
// Variable starts after newline + "{{" // Variable starts after newline + "{{"
let (line, col) = nodelist let (line, col) = nodelist
.line_offsets() .line_offsets()
.position_to_line_col(*span.start() as usize); .position_to_line_col(span.start() as usize);
assert_eq!( assert_eq!(
(line, col), (line, col),
(2, 0), (2, 0),
"Variable should start at line 2, col 3" "Variable should start at line 2, col 3"
); );
assert_eq!(*span.length(), 9, "Variable span should cover 'user.name'"); assert_eq!(span.length(), 9, "Variable span should cover 'user.name'");
} }
} }
} }

View file

@ -44,6 +44,7 @@ impl From<std::io::Error> for TemplateError {
} }
impl TemplateError { impl TemplateError {
#[must_use]
pub fn span(&self) -> Option<Span> { pub fn span(&self) -> Option<Span> {
match self { match self {
TemplateError::Validation(AstError::InvalidTagStructure { span, .. }) => Some(*span), TemplateError::Validation(AstError::InvalidTagStructure { span, .. }) => Some(*span),
@ -51,6 +52,7 @@ impl TemplateError {
} }
} }
#[must_use]
pub fn severity(&self) -> lsp_types::DiagnosticSeverity { pub fn severity(&self) -> lsp_types::DiagnosticSeverity {
match self { match self {
TemplateError::Lexer(_) | TemplateError::Parser(_) => { TemplateError::Lexer(_) | TemplateError::Parser(_) => {
@ -61,6 +63,7 @@ impl TemplateError {
} }
} }
#[must_use]
pub fn code(&self) -> &'static str { pub fn code(&self) -> &'static str {
match self { match self {
TemplateError::Lexer(_) => "LEX", TemplateError::Lexer(_) => "LEX",
@ -74,7 +77,7 @@ impl TemplateError {
pub fn to_lsp_diagnostic(error: &TemplateError, _source: &str) -> lsp_types::Diagnostic { pub fn to_lsp_diagnostic(error: &TemplateError, _source: &str) -> lsp_types::Diagnostic {
let range = error.span().map_or_else(lsp_types::Range::default, |span| { let range = error.span().map_or_else(lsp_types::Range::default, |span| {
let start = lsp_types::Position::new(0, *span.start()); let start = lsp_types::Position::new(0, span.start());
let end = lsp_types::Position::new(0, span.start() + span.length()); let end = lsp_types::Position::new(0, span.start() + span.length());
lsp_types::Range::new(start, end) lsp_types::Range::new(start, end)
}); });

View file

@ -23,6 +23,7 @@ impl Lexer {
} }
} }
#[allow(clippy::too_many_lines)]
pub fn tokenize(&mut self) -> Result<TokenStream, LexerError> { pub fn tokenize(&mut self) -> Result<TokenStream, LexerError> {
let mut tokens = TokenStream::default(); let mut tokens = TokenStream::default();
@ -62,7 +63,7 @@ impl Lexer {
self.consume()?; // > self.consume()?; // >
TokenType::HtmlTagClose(tag) TokenType::HtmlTagClose(tag)
} }
'!' if self.matches("<!--")? => { '!' if self.matches("<!--") => {
self.consume_n(4)?; // <!-- self.consume_n(4)?; // <!--
let content = self.consume_until("-->")?; let content = self.consume_until("-->")?;
self.consume_n(3)?; // --> self.consume_n(3)?; // -->
@ -76,8 +77,8 @@ impl Lexer {
TokenType::ScriptTagOpen(tag) TokenType::ScriptTagOpen(tag)
} else if tag.starts_with("style") { } else if tag.starts_with("style") {
TokenType::StyleTagOpen(tag) TokenType::StyleTagOpen(tag)
} else if tag.ends_with("/") { } else if tag.ends_with('/') {
TokenType::HtmlTagVoid(tag.trim_end_matches("/").to_string()) TokenType::HtmlTagVoid(tag.trim_end_matches('/').to_string())
} else { } else {
TokenType::HtmlTagOpen(tag) TokenType::HtmlTagOpen(tag)
} }
@ -168,22 +169,34 @@ impl Lexer {
} }
#[allow(dead_code)] #[allow(dead_code)]
fn peek_until(&self, end: &str) -> Result<bool, LexerError> { fn peek_until(&self, end: &str) -> bool {
let mut index = self.current; let mut index = self.current;
let end_chars: Vec<char> = end.chars().collect(); let end_chars: Vec<char> = end.chars().collect();
while index < self.chars.len() { while index < self.chars.len() {
if self.chars[index..].starts_with(&end_chars) { if self.chars[index..].starts_with(&end_chars) {
return Ok(true); return true;
} }
index += 1; index += 1;
} }
Ok(false) false
} }
#[allow(clippy::cast_sign_loss)]
fn peek_at(&self, offset: isize) -> Result<char, LexerError> { fn peek_at(&self, offset: isize) -> Result<char, LexerError> {
let index = self.current as isize + offset; // Safely handle negative offsets
self.item_at(index as usize) let index = if offset < 0 {
// Check if we would underflow
if self.current < offset.unsigned_abs() {
return Err(LexerError::AtBeginningOfSource);
}
self.current - offset.unsigned_abs()
} else {
// Safe addition since offset is positive
self.current + (offset as usize)
};
self.item_at(index)
} }
fn item_at(&self, index: usize) -> Result<char, LexerError> { fn item_at(&self, index: usize) -> Result<char, LexerError> {
@ -193,19 +206,22 @@ impl Lexer {
// much easier // much easier
Ok('\0') Ok('\0')
} else { } else {
Ok(self.source.chars().nth(index).unwrap()) self.source
.chars()
.nth(index)
.ok_or(LexerError::InvalidCharacterAccess)
} }
} }
fn matches(&mut self, pattern: &str) -> Result<bool, LexerError> { fn matches(&mut self, pattern: &str) -> bool {
let mut i = self.current; let mut i = self.current;
for c in pattern.chars() { for c in pattern.chars() {
if i >= self.chars.len() || self.chars[i] != c { if i >= self.chars.len() || self.chars[i] != c {
return Ok(false); return false;
} }
i += 1; i += 1;
} }
Ok(true) true
} }
fn is_at_end(&self) -> bool { fn is_at_end(&self) -> bool {
@ -310,7 +326,7 @@ mod tests {
#[test] #[test]
fn test_tokenize_comments() { fn test_tokenize_comments() {
let source = r#"<!-- HTML comment --> let source = r"<!-- HTML comment -->
{# Django comment #} {# Django comment #}
<script> <script>
// JS single line comment // JS single line comment
@ -319,7 +335,7 @@ mod tests {
</script> </script>
<style> <style>
/* CSS comment */ /* CSS comment */
</style>"#; </style>";
let mut lexer = Lexer::new(source); let mut lexer = Lexer::new(source);
let tokens = lexer.tokenize().unwrap(); let tokens = lexer.tokenize().unwrap();
insta::assert_yaml_snapshot!(tokens); insta::assert_yaml_snapshot!(tokens);
@ -368,11 +384,11 @@ mod tests {
#[test] #[test]
fn test_tokenize_nested_delimiters() { fn test_tokenize_nested_delimiters() {
let source = r#"{{ user.name }} let source = r"{{ user.name }}
{% if true %} {% if true %}
{# comment #} {# comment #}
<!-- html comment --> <!-- html comment -->
<div>text</div>"#; <div>text</div>";
assert!(Lexer::new(source).tokenize().is_ok()); assert!(Lexer::new(source).tokenize().is_ok());
} }

View file

@ -16,6 +16,7 @@ pub struct Parser {
} }
impl Parser { impl Parser {
#[must_use]
pub fn new(tokens: TokenStream) -> Self { pub fn new(tokens: TokenStream) -> Self {
Self { Self {
tokens, tokens,
@ -36,7 +37,7 @@ impl Parser {
Err(err) => { Err(err) => {
if !self.is_at_end() { if !self.is_at_end() {
self.errors.push(err); self.errors.push(err);
self.synchronize()? self.synchronize()?;
} }
} }
} }
@ -70,7 +71,7 @@ impl Parser {
// Only treat Django comments as Comment nodes // Only treat Django comments as Comment nodes
if open != "{#" { if open != "{#" {
return self.parse_text(); return self.parse_text();
}; }
let token = self.peek_previous()?; let token = self.peek_previous()?;
@ -145,8 +146,9 @@ impl Parser {
}; };
let start = token.start().unwrap_or(0); let start = token.start().unwrap_or(0);
let offset = text.find(content.as_str()).unwrap_or(0) as u32; let offset = u32::try_from(text.find(content.as_str()).unwrap_or(0))
let length = content.len() as u32; .expect("Offset should fit in u32");
let length = u32::try_from(content.len()).expect("Content length should fit in u32");
let span = Span::new(start + offset, length); let span = Span::new(start + offset, length);
Ok(Node::Text { content, span }) Ok(Node::Text { content, span })
@ -165,9 +167,21 @@ impl Parser {
self.peek_at(-1) self.peek_at(-1)
} }
#[allow(clippy::cast_sign_loss)]
fn peek_at(&self, offset: isize) -> Result<Token, ParserError> { fn peek_at(&self, offset: isize) -> Result<Token, ParserError> {
let index = self.current as isize + offset; // Safely handle negative offsets
self.item_at(index as usize) let index = if offset < 0 {
// Check if we would underflow
if self.current < offset.unsigned_abs() {
return Err(ParserError::stream_error(StreamError::BeforeStart));
}
self.current - offset.unsigned_abs()
} else {
// Safe addition since offset is positive
self.current + (offset as usize)
};
self.item_at(index)
} }
fn item_at(&self, index: usize) -> Result<Token, ParserError> { fn item_at(&self, index: usize) -> Result<Token, ParserError> {
@ -232,6 +246,7 @@ impl Parser {
#[derive(Debug)] #[derive(Debug)]
pub enum StreamError { pub enum StreamError {
AtBeginning, AtBeginning,
BeforeStart,
AtEnd, AtEnd,
Empty, Empty,
InvalidAccess, InvalidAccess,
@ -501,7 +516,7 @@ mod tests {
let mut parser = Parser::new(tokens); let mut parser = Parser::new(tokens);
let (nodelist, errors) = parser.parse().unwrap(); let (nodelist, errors) = parser.parse().unwrap();
insta::assert_yaml_snapshot!(nodelist); insta::assert_yaml_snapshot!(nodelist);
eprintln!("{:?}", errors); eprintln!("{errors:?}");
assert!(errors.is_empty()); assert!(errors.is_empty());
} }
} }
@ -636,7 +651,7 @@ mod tests {
let (nodelist, errors) = parser.parse().unwrap(); let (nodelist, errors) = parser.parse().unwrap();
let offsets = nodelist.line_offsets(); let offsets = nodelist.line_offsets();
eprintln!("{:?}", offsets); eprintln!("{offsets:?}");
assert_eq!(offsets.position_to_line_col(0), (1, 0)); // Start of line 1 assert_eq!(offsets.position_to_line_col(0), (1, 0)); // Start of line 1
assert_eq!(offsets.position_to_line_col(6), (2, 0)); // Start of line 2 assert_eq!(offsets.position_to_line_col(6), (2, 0)); // Start of line 2
assert!(errors.is_empty()); assert!(errors.is_empty());

View file

@ -92,7 +92,7 @@ impl TagSpecs {
Ok(TagSpecs(specs)) Ok(TagSpecs(specs))
} }
/// Merge another TagSpecs into this one, with the other taking precedence /// Merge another `TagSpecs` into this one, with the other taking precedence
#[allow(dead_code)] #[allow(dead_code)]
pub fn merge(&mut self, other: TagSpecs) -> &mut Self { pub fn merge(&mut self, other: TagSpecs) -> &mut Self {
self.0.extend(other.0); self.0.extend(other.0);
@ -138,8 +138,7 @@ impl TagSpec {
is_spec_node = true; is_spec_node = true;
} else { } else {
return Err(format!( return Err(format!(
"Invalid prefix '{}' resulted in empty tag name component.", "Invalid prefix '{p}' resulted in empty tag name component."
p
)); ));
} }
} else { } else {
@ -163,10 +162,10 @@ impl TagSpec {
// Otherwise, if it's a table, recurse into its children. // Otherwise, if it's a table, recurse into its children.
if !is_spec_node { if !is_spec_node {
if let Some(table) = value.as_table() { if let Some(table) = value.as_table() {
for (key, inner_value) in table.iter() { for (key, inner_value) in table {
let new_prefix = match prefix { let new_prefix = match prefix {
None => key.clone(), None => key.clone(),
Some(p) => format!("{}.{}", p, key), Some(p) => format!("{p}.{key}"),
}; };
Self::extract_specs(inner_value, Some(&new_prefix), specs)?; Self::extract_specs(inner_value, Some(&new_prefix), specs)?;
} }
@ -244,14 +243,13 @@ mod tests {
]; ];
for tag in expected_tags { for tag in expected_tags {
assert!(specs.get(tag).is_some(), "{} tag should be present", tag); assert!(specs.get(tag).is_some(), "{tag} tag should be present");
} }
for tag in missing_tags { for tag in missing_tags {
assert!( assert!(
specs.get(tag).is_none(), specs.get(tag).is_none(),
"{} tag should not be present yet", "{tag} tag should not be present yet"
tag
); );
} }

View file

@ -22,7 +22,7 @@ pub enum TokenType {
} }
impl TokenType { impl TokenType {
pub fn len(&self) -> Option<usize> { pub fn len(&self) -> usize {
match self { match self {
TokenType::DjangoBlock(s) TokenType::DjangoBlock(s)
| TokenType::DjangoVariable(s) | TokenType::DjangoVariable(s)
@ -33,17 +33,18 @@ impl TokenType {
| TokenType::ScriptTagClose(s) | TokenType::ScriptTagClose(s)
| TokenType::StyleTagOpen(s) | TokenType::StyleTagOpen(s)
| TokenType::StyleTagClose(s) | TokenType::StyleTagClose(s)
| TokenType::Text(s) => Some(s.len()), | TokenType::Text(s) => s.len(),
TokenType::Comment(content, _, _) => Some(content.len()), TokenType::Comment(content, _, _) => content.len(),
TokenType::Whitespace(n) => Some(*n), TokenType::Whitespace(n) => *n,
TokenType::Newline => Some(1), TokenType::Newline => 1,
TokenType::Eof => Some(0), TokenType::Eof => 0,
} }
} }
} }
#[derive(Clone, Debug, Serialize, PartialEq)] #[derive(Clone, Debug, Serialize, PartialEq)]
pub struct Token { pub struct Token {
#[allow(clippy::struct_field_names)]
token_type: TokenType, token_type: TokenType,
line: usize, line: usize,
start: Option<usize>, start: Option<usize>,
@ -93,7 +94,7 @@ impl Token {
| TokenType::StyleTagClose(s) => s.to_string(), | TokenType::StyleTagClose(s) => s.to_string(),
TokenType::Whitespace(len) => " ".repeat(*len), TokenType::Whitespace(len) => " ".repeat(*len),
TokenType::Newline => "\n".to_string(), TokenType::Newline => "\n".to_string(),
TokenType::Eof => "".to_string(), TokenType::Eof => String::new(),
} }
} }
@ -106,11 +107,12 @@ impl Token {
} }
pub fn start(&self) -> Option<u32> { pub fn start(&self) -> Option<u32> {
self.start.map(|s| s as u32) self.start
.map(|s| u32::try_from(s).expect("Start position should fit in u32"))
} }
pub fn length(&self) -> Option<u32> { pub fn length(&self) -> u32 {
self.token_type.len().map(|l| l as u32) u32::try_from(self.token_type.len()).expect("Token length should fit in u32")
} }
pub fn is_token_type(&self, token_type: &TokenType) -> bool { pub fn is_token_type(&self, token_type: &TokenType) -> bool {

View file

@ -30,3 +30,6 @@ clap = { version = "4.5", features = ["derive"] }
[build-dependencies] [build-dependencies]
djls-dev = { workspace = true } djls-dev = { workspace = true }
[lints]
workspace = true

View file

@ -1,3 +1,5 @@
use std::fmt::Write;
use anyhow::Result; use anyhow::Result;
use clap::Parser; use clap::Parser;
@ -33,7 +35,7 @@ pub fn run(args: Vec<String>) -> Result<()> {
Err(e) => { Err(e) => {
let mut msg = e.to_string(); let mut msg = e.to_string();
if let Some(source) = e.source() { if let Some(source) = e.source() {
msg += &format!(", caused by {}", source); let _ = write!(msg, ", caused by {source}");
} }
Exit::error().with_message(msg).process_exit() Exit::error().with_message(msg).process_exit()
} }

View file

@ -36,7 +36,7 @@ impl From<ExitStatus> for i32 {
impl fmt::Display for ExitStatus { impl fmt::Display for ExitStatus {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let msg = self.as_str(); let msg = self.as_str();
write!(f, "{}", msg) write!(f, "{msg}")
} }
} }
@ -69,7 +69,7 @@ impl Exit {
pub fn process_exit(self) -> ! { pub fn process_exit(self) -> ! {
if let Some(message) = self.message { if let Some(message) = self.message {
println!("{}", message) println!("{message}");
} }
std::process::exit(self.status.as_raw()) std::process::exit(self.status.as_raw())
} }
@ -78,7 +78,7 @@ impl Exit {
pub fn ok(self) -> Result<()> { pub fn ok(self) -> Result<()> {
match self.status { match self.status {
ExitStatus::Success => Ok(()), ExitStatus::Success => Ok(()),
_ => Err(self.into()), ExitStatus::Error => Err(self.into()),
} }
} }
@ -92,8 +92,8 @@ impl fmt::Display for Exit {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let status_str = self.status.as_str(); let status_str = self.status.as_str();
match &self.message { match &self.message {
Some(msg) => write!(f, "{}: {}", status_str, msg), Some(msg) => write!(f, "{status_str}: {msg}"),
None => write!(f, "{}", status_str), None => write!(f, "{status_str}"),
} }
} }
} }

View file

@ -1,7 +1,7 @@
/// PyO3 entrypoint for the Django Language Server CLI. /// `PyO3` entrypoint for the Django Language Server CLI.
/// ///
/// This module provides a Python interface using PyO3 to solve Python runtime /// This module provides a Python interface using `PyO3` to solve Python runtime
/// interpreter linking issues. The PyO3 approach avoids complexities with /// interpreter linking issues. The `PyO3` approach avoids complexities with
/// static/dynamic linking when building binaries that interact with Python. /// static/dynamic linking when building binaries that interact with Python.
mod args; mod args;
mod cli; mod cli;

View file

@ -1,7 +1,7 @@
/// Binary interface for local development only. /// Binary interface for local development only.
/// ///
/// This binary exists for development and testing with `cargo run`. /// This binary exists for development and testing with `cargo run`.
/// The production CLI is distributed through the PyO3 interface in lib.rs. /// The production CLI is distributed through the `PyO3` interface in lib.rs.
mod args; mod args;
mod cli; mod cli;
mod commands; mod commands;