mirror of
https://github.com/joshuadavidthomas/django-language-server.git
synced 2025-07-07 20:55:02 +00:00
add pedantic clippy setting and fix/allow warnings (#147)
Some checks are pending
lint / pre-commit (push) Waiting to run
lint / rustfmt (push) Waiting to run
lint / clippy (push) Waiting to run
lint / cargo-check (push) Waiting to run
release / build (push) Waiting to run
release / test (push) Waiting to run
release / release (push) Blocked by required conditions
test / generate-matrix (push) Waiting to run
test / Python , Django () (push) Blocked by required conditions
test / tests (push) Blocked by required conditions
zizmor 🌈 / zizmor latest via PyPI (push) Waiting to run
Some checks are pending
lint / pre-commit (push) Waiting to run
lint / rustfmt (push) Waiting to run
lint / clippy (push) Waiting to run
lint / cargo-check (push) Waiting to run
release / build (push) Waiting to run
release / test (push) Waiting to run
release / release (push) Blocked by required conditions
test / generate-matrix (push) Waiting to run
test / Python , Django () (push) Blocked by required conditions
test / tests (push) Blocked by required conditions
zizmor 🌈 / zizmor latest via PyPI (push) Waiting to run
This commit is contained in:
parent
e87c917cb6
commit
d677aacf7c
24 changed files with 180 additions and 113 deletions
47
.github/workflows/lint.yml
vendored
47
.github/workflows/lint.yml
vendored
|
@ -26,12 +26,6 @@ jobs:
|
|||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install nightly toolchain for rustfmt
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b
|
||||
with:
|
||||
toolchain: nightly
|
||||
components: rustfmt
|
||||
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@6b9c6063abd6010835644d4c2e1bef4cf5cd0fca # v6.0.1
|
||||
with:
|
||||
|
@ -49,3 +43,44 @@ jobs:
|
|||
--all-files \
|
||||
--show-diff-on-failure \
|
||||
--color always
|
||||
|
||||
rustfmt:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: actions-rust-lang/setup-rust-toolchain@9d7e65c320fdb52dcd45ffaa68deb6c02c8754d9
|
||||
with:
|
||||
toolchain: nightly
|
||||
components: rustfmt
|
||||
|
||||
- name: Run rustfmt
|
||||
run: cargo +nightly fmt --all -- --check
|
||||
|
||||
clippy:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: actions-rust-lang/setup-rust-toolchain@9d7e65c320fdb52dcd45ffaa68deb6c02c8754d9
|
||||
with:
|
||||
components: clippy
|
||||
|
||||
- name: Run clippy
|
||||
run: cargo clippy --all-targets --all-features -- -D warnings
|
||||
|
||||
cargo-check:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: actions-rust-lang/setup-rust-toolchain@9d7e65c320fdb52dcd45ffaa68deb6c02c8754d9
|
||||
|
||||
- name: Run cargo check
|
||||
run: cargo check --all-targets --all-features
|
||||
|
|
|
@ -1,6 +1,3 @@
|
|||
default_language_version:
|
||||
rust: "1.86"
|
||||
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v5.0.0
|
||||
|
@ -21,30 +18,3 @@ repos:
|
|||
rev: v1.6.0
|
||||
hooks:
|
||||
- id: zizmor
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: fmt
|
||||
name: cargo fmt
|
||||
description: format files with "cargo fmt"
|
||||
types:
|
||||
- rust
|
||||
language: rust
|
||||
entry: cargo +nightly fmt
|
||||
args:
|
||||
- --
|
||||
- id: check
|
||||
name: cargo check
|
||||
description: check the package for errors with "cargo check"
|
||||
types:
|
||||
- rust
|
||||
language: rust
|
||||
entry: cargo check
|
||||
pass_filenames: false
|
||||
- id: clippy
|
||||
name: cargo clippy
|
||||
description: check the package with "cargo clippy"
|
||||
types:
|
||||
- rust
|
||||
language: rust
|
||||
entry: cargo clippy
|
||||
pass_filenames: false
|
||||
|
|
|
@ -23,6 +23,10 @@ thiserror = "2.0"
|
|||
tokio = { version = "1.42", features = ["full"] }
|
||||
tower-lsp-server = { version = "0.21", features = ["proposed"] }
|
||||
|
||||
[workspace.lints.clippy]
|
||||
pedantic = { level = "warn", priority = -1 }
|
||||
missing_errors_doc = "allow"
|
||||
|
||||
[profile.dev.package]
|
||||
insta.opt-level = 3
|
||||
similar.opt-level = 3
|
||||
|
|
|
@ -14,3 +14,6 @@ toml = "0.8"
|
|||
|
||||
[dev-dependencies]
|
||||
tempfile = "3.19"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
|
|
@ -79,10 +79,12 @@ impl Settings {
|
|||
Ok(settings)
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn debug(&self) -> bool {
|
||||
self.debug
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn venv_path(&self) -> Option<&str> {
|
||||
self.venv_path.as_deref()
|
||||
}
|
||||
|
|
|
@ -6,3 +6,6 @@ publish = false
|
|||
|
||||
[dependencies]
|
||||
pyo3-build-config = { workspace = true, features = ["resolve-config"] }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
|
|
@ -31,7 +31,7 @@ pub fn setup_python_linking() {
|
|||
// Only link libpython explicitly if we are NOT building an extension module.
|
||||
if !is_extension_module {
|
||||
if let Some(lib_name) = &config.lib_name {
|
||||
println!("cargo:rustc-link-lib=dylib={}", lib_name);
|
||||
println!("cargo:rustc-link-lib=dylib={lib_name}");
|
||||
} else {
|
||||
// Warn only if linking is actually needed but we can't find the lib name
|
||||
println!("cargo:warning=Python library name not found in config (needed for non-extension module
|
||||
|
@ -43,9 +43,9 @@ builds).");
|
|||
// These are needed for test executables and potential future standalone binaries,
|
||||
// and generally harmless for extension modules.
|
||||
if let Some(lib_dir) = &config.lib_dir {
|
||||
println!("cargo:rustc-link-search=native={}", lib_dir);
|
||||
println!("cargo:rustc-link-search=native={lib_dir}");
|
||||
#[cfg(not(windows))]
|
||||
println!("cargo:rustc-link-arg=-Wl,-rpath,{}", lib_dir);
|
||||
println!("cargo:rustc-link-arg=-Wl,-rpath,{lib_dir}");
|
||||
} else {
|
||||
// Warn only if linking is actually needed but we can't find the lib dir
|
||||
if !is_extension_module {
|
||||
|
|
|
@ -19,3 +19,6 @@ djls-dev = { workspace = true }
|
|||
|
||||
[dev-dependencies]
|
||||
tempfile = { workspace = true }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
|
|
@ -23,6 +23,7 @@ pub struct DjangoProject {
|
|||
}
|
||||
|
||||
impl DjangoProject {
|
||||
#[must_use]
|
||||
pub fn new(path: PathBuf) -> Self {
|
||||
Self {
|
||||
path,
|
||||
|
@ -64,17 +65,19 @@ impl DjangoProject {
|
|||
Ok(())
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Failed to import Django: {}", e);
|
||||
eprintln!("Failed to import Django: {e}");
|
||||
Err(e)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn template_tags(&self) -> Option<&TemplateTags> {
|
||||
self.template_tags.as_ref()
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn path(&self) -> &Path {
|
||||
&self.path
|
||||
}
|
||||
|
@ -84,7 +87,7 @@ impl fmt::Display for DjangoProject {
|
|||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
writeln!(f, "Project path: {}", self.path.display())?;
|
||||
if let Some(py_env) = &self.env {
|
||||
write!(f, "{}", py_env)?;
|
||||
write!(f, "{py_env}")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -141,7 +144,7 @@ mod tests {
|
|||
|
||||
let project = DjangoProject::new(project_path.clone());
|
||||
|
||||
let display_str = format!("{}", project);
|
||||
let display_str = format!("{project}");
|
||||
assert!(display_str.contains(&format!("Project path: {}", project_path.display())));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -96,9 +96,8 @@ impl PythonEnvironment {
|
|||
}
|
||||
|
||||
fn from_system_python() -> Option<Self> {
|
||||
let python_path = match system::find_executable("python") {
|
||||
Ok(p) => p,
|
||||
Err(_) => return None,
|
||||
let Ok(python_path) = system::find_executable("python") else {
|
||||
return None;
|
||||
};
|
||||
let bin_dir = python_path.parent()?;
|
||||
let prefix = bin_dir.parent()?;
|
||||
|
|
|
@ -35,7 +35,7 @@ impl TemplateTags {
|
|||
let library_name = if module_name.is_empty() {
|
||||
"builtins".to_string()
|
||||
} else {
|
||||
module_name.split('.').last().unwrap_or("").to_string()
|
||||
module_name.split('.').next_back().unwrap_or("").to_string()
|
||||
};
|
||||
|
||||
tags.push(TemplateTag::new(tag_name, library_name, doc));
|
||||
|
@ -90,7 +90,7 @@ impl TemplateTag {
|
|||
&self.library
|
||||
}
|
||||
|
||||
pub fn doc(&self) -> &Option<String> {
|
||||
&self.doc
|
||||
pub fn doc(&self) -> Option<&String> {
|
||||
self.doc.as_ref()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -104,7 +104,7 @@ impl Store {
|
|||
|
||||
#[allow(dead_code)]
|
||||
pub fn is_version_valid(&self, uri: &str, version: i32) -> bool {
|
||||
self.get_version(uri).map_or(false, |v| v == version)
|
||||
self.get_version(uri) == Some(version)
|
||||
}
|
||||
|
||||
pub fn get_completions(
|
||||
|
@ -240,9 +240,10 @@ impl TextDocument {
|
|||
}
|
||||
|
||||
pub fn get_template_tag_context(&self, position: Position) -> Option<TemplateTagContext> {
|
||||
let line = self.get_line(position.line.try_into().ok()?)?;
|
||||
let prefix = &line[..position.character.try_into().ok()?];
|
||||
let rest_of_line = &line[position.character.try_into().ok()?..];
|
||||
let line = self.get_line(position.line)?;
|
||||
let char_pos: usize = position.character.try_into().ok()?;
|
||||
let prefix = &line[..char_pos];
|
||||
let rest_of_line = &line[char_pos..];
|
||||
let rest_trimmed = rest_of_line.trim_start();
|
||||
|
||||
prefix.rfind("{%").map(|tag_start| {
|
||||
|
|
|
@ -13,3 +13,6 @@ toml = "0.8"
|
|||
[dev-dependencies]
|
||||
insta = { version = "1.42", features = ["yaml"] }
|
||||
tempfile = "3.19"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
|
|
@ -45,7 +45,7 @@ impl LineOffsets {
|
|||
}
|
||||
|
||||
pub fn position_to_line_col(&self, position: usize) -> (usize, usize) {
|
||||
let position = position as u32;
|
||||
let position = u32::try_from(position).unwrap_or_default();
|
||||
let line = match self.0.binary_search(&position) {
|
||||
Ok(exact_line) => exact_line, // Position is at start of this line
|
||||
Err(0) => 0, // Before first line start
|
||||
|
@ -108,19 +108,21 @@ impl Span {
|
|||
Self { start, length }
|
||||
}
|
||||
|
||||
pub fn start(&self) -> &u32 {
|
||||
&self.start
|
||||
#[allow(clippy::trivially_copy_pass_by_ref)]
|
||||
pub fn start(&self) -> u32 {
|
||||
self.start
|
||||
}
|
||||
|
||||
pub fn length(&self) -> &u32 {
|
||||
&self.length
|
||||
#[allow(clippy::trivially_copy_pass_by_ref)]
|
||||
pub fn length(&self) -> u32 {
|
||||
self.length
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Token> for Span {
|
||||
fn from(token: Token) -> Self {
|
||||
let start = token.start().unwrap_or(0);
|
||||
let length = token.content().len() as u32;
|
||||
let length = u32::try_from(token.content().len()).unwrap_or(0);
|
||||
Span::new(start, length)
|
||||
}
|
||||
}
|
||||
|
@ -201,14 +203,14 @@ mod tests {
|
|||
// Variable starts after newline + "{{"
|
||||
let (line, col) = nodelist
|
||||
.line_offsets()
|
||||
.position_to_line_col(*span.start() as usize);
|
||||
.position_to_line_col(span.start() as usize);
|
||||
assert_eq!(
|
||||
(line, col),
|
||||
(2, 0),
|
||||
"Variable should start at line 2, col 3"
|
||||
);
|
||||
|
||||
assert_eq!(*span.length(), 9, "Variable span should cover 'user.name'");
|
||||
assert_eq!(span.length(), 9, "Variable span should cover 'user.name'");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -44,6 +44,7 @@ impl From<std::io::Error> for TemplateError {
|
|||
}
|
||||
|
||||
impl TemplateError {
|
||||
#[must_use]
|
||||
pub fn span(&self) -> Option<Span> {
|
||||
match self {
|
||||
TemplateError::Validation(AstError::InvalidTagStructure { span, .. }) => Some(*span),
|
||||
|
@ -51,6 +52,7 @@ impl TemplateError {
|
|||
}
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn severity(&self) -> lsp_types::DiagnosticSeverity {
|
||||
match self {
|
||||
TemplateError::Lexer(_) | TemplateError::Parser(_) => {
|
||||
|
@ -61,6 +63,7 @@ impl TemplateError {
|
|||
}
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn code(&self) -> &'static str {
|
||||
match self {
|
||||
TemplateError::Lexer(_) => "LEX",
|
||||
|
@ -74,7 +77,7 @@ impl TemplateError {
|
|||
|
||||
pub fn to_lsp_diagnostic(error: &TemplateError, _source: &str) -> lsp_types::Diagnostic {
|
||||
let range = error.span().map_or_else(lsp_types::Range::default, |span| {
|
||||
let start = lsp_types::Position::new(0, *span.start());
|
||||
let start = lsp_types::Position::new(0, span.start());
|
||||
let end = lsp_types::Position::new(0, span.start() + span.length());
|
||||
lsp_types::Range::new(start, end)
|
||||
});
|
||||
|
|
|
@ -23,6 +23,7 @@ impl Lexer {
|
|||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_lines)]
|
||||
pub fn tokenize(&mut self) -> Result<TokenStream, LexerError> {
|
||||
let mut tokens = TokenStream::default();
|
||||
|
||||
|
@ -62,7 +63,7 @@ impl Lexer {
|
|||
self.consume()?; // >
|
||||
TokenType::HtmlTagClose(tag)
|
||||
}
|
||||
'!' if self.matches("<!--")? => {
|
||||
'!' if self.matches("<!--") => {
|
||||
self.consume_n(4)?; // <!--
|
||||
let content = self.consume_until("-->")?;
|
||||
self.consume_n(3)?; // -->
|
||||
|
@ -76,8 +77,8 @@ impl Lexer {
|
|||
TokenType::ScriptTagOpen(tag)
|
||||
} else if tag.starts_with("style") {
|
||||
TokenType::StyleTagOpen(tag)
|
||||
} else if tag.ends_with("/") {
|
||||
TokenType::HtmlTagVoid(tag.trim_end_matches("/").to_string())
|
||||
} else if tag.ends_with('/') {
|
||||
TokenType::HtmlTagVoid(tag.trim_end_matches('/').to_string())
|
||||
} else {
|
||||
TokenType::HtmlTagOpen(tag)
|
||||
}
|
||||
|
@ -168,22 +169,34 @@ impl Lexer {
|
|||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
fn peek_until(&self, end: &str) -> Result<bool, LexerError> {
|
||||
fn peek_until(&self, end: &str) -> bool {
|
||||
let mut index = self.current;
|
||||
let end_chars: Vec<char> = end.chars().collect();
|
||||
|
||||
while index < self.chars.len() {
|
||||
if self.chars[index..].starts_with(&end_chars) {
|
||||
return Ok(true);
|
||||
return true;
|
||||
}
|
||||
index += 1;
|
||||
}
|
||||
Ok(false)
|
||||
false
|
||||
}
|
||||
|
||||
#[allow(clippy::cast_sign_loss)]
|
||||
fn peek_at(&self, offset: isize) -> Result<char, LexerError> {
|
||||
let index = self.current as isize + offset;
|
||||
self.item_at(index as usize)
|
||||
// Safely handle negative offsets
|
||||
let index = if offset < 0 {
|
||||
// Check if we would underflow
|
||||
if self.current < offset.unsigned_abs() {
|
||||
return Err(LexerError::AtBeginningOfSource);
|
||||
}
|
||||
self.current - offset.unsigned_abs()
|
||||
} else {
|
||||
// Safe addition since offset is positive
|
||||
self.current + (offset as usize)
|
||||
};
|
||||
|
||||
self.item_at(index)
|
||||
}
|
||||
|
||||
fn item_at(&self, index: usize) -> Result<char, LexerError> {
|
||||
|
@ -193,19 +206,22 @@ impl Lexer {
|
|||
// much easier
|
||||
Ok('\0')
|
||||
} else {
|
||||
Ok(self.source.chars().nth(index).unwrap())
|
||||
self.source
|
||||
.chars()
|
||||
.nth(index)
|
||||
.ok_or(LexerError::InvalidCharacterAccess)
|
||||
}
|
||||
}
|
||||
|
||||
fn matches(&mut self, pattern: &str) -> Result<bool, LexerError> {
|
||||
fn matches(&mut self, pattern: &str) -> bool {
|
||||
let mut i = self.current;
|
||||
for c in pattern.chars() {
|
||||
if i >= self.chars.len() || self.chars[i] != c {
|
||||
return Ok(false);
|
||||
return false;
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
Ok(true)
|
||||
true
|
||||
}
|
||||
|
||||
fn is_at_end(&self) -> bool {
|
||||
|
@ -310,7 +326,7 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn test_tokenize_comments() {
|
||||
let source = r#"<!-- HTML comment -->
|
||||
let source = r"<!-- HTML comment -->
|
||||
{# Django comment #}
|
||||
<script>
|
||||
// JS single line comment
|
||||
|
@ -319,7 +335,7 @@ mod tests {
|
|||
</script>
|
||||
<style>
|
||||
/* CSS comment */
|
||||
</style>"#;
|
||||
</style>";
|
||||
let mut lexer = Lexer::new(source);
|
||||
let tokens = lexer.tokenize().unwrap();
|
||||
insta::assert_yaml_snapshot!(tokens);
|
||||
|
@ -368,11 +384,11 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn test_tokenize_nested_delimiters() {
|
||||
let source = r#"{{ user.name }}
|
||||
let source = r"{{ user.name }}
|
||||
{% if true %}
|
||||
{# comment #}
|
||||
<!-- html comment -->
|
||||
<div>text</div>"#;
|
||||
<div>text</div>";
|
||||
assert!(Lexer::new(source).tokenize().is_ok());
|
||||
}
|
||||
|
||||
|
|
|
@ -16,6 +16,7 @@ pub struct Parser {
|
|||
}
|
||||
|
||||
impl Parser {
|
||||
#[must_use]
|
||||
pub fn new(tokens: TokenStream) -> Self {
|
||||
Self {
|
||||
tokens,
|
||||
|
@ -36,7 +37,7 @@ impl Parser {
|
|||
Err(err) => {
|
||||
if !self.is_at_end() {
|
||||
self.errors.push(err);
|
||||
self.synchronize()?
|
||||
self.synchronize()?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -70,7 +71,7 @@ impl Parser {
|
|||
// Only treat Django comments as Comment nodes
|
||||
if open != "{#" {
|
||||
return self.parse_text();
|
||||
};
|
||||
}
|
||||
|
||||
let token = self.peek_previous()?;
|
||||
|
||||
|
@ -145,8 +146,9 @@ impl Parser {
|
|||
};
|
||||
|
||||
let start = token.start().unwrap_or(0);
|
||||
let offset = text.find(content.as_str()).unwrap_or(0) as u32;
|
||||
let length = content.len() as u32;
|
||||
let offset = u32::try_from(text.find(content.as_str()).unwrap_or(0))
|
||||
.expect("Offset should fit in u32");
|
||||
let length = u32::try_from(content.len()).expect("Content length should fit in u32");
|
||||
let span = Span::new(start + offset, length);
|
||||
|
||||
Ok(Node::Text { content, span })
|
||||
|
@ -165,9 +167,21 @@ impl Parser {
|
|||
self.peek_at(-1)
|
||||
}
|
||||
|
||||
#[allow(clippy::cast_sign_loss)]
|
||||
fn peek_at(&self, offset: isize) -> Result<Token, ParserError> {
|
||||
let index = self.current as isize + offset;
|
||||
self.item_at(index as usize)
|
||||
// Safely handle negative offsets
|
||||
let index = if offset < 0 {
|
||||
// Check if we would underflow
|
||||
if self.current < offset.unsigned_abs() {
|
||||
return Err(ParserError::stream_error(StreamError::BeforeStart));
|
||||
}
|
||||
self.current - offset.unsigned_abs()
|
||||
} else {
|
||||
// Safe addition since offset is positive
|
||||
self.current + (offset as usize)
|
||||
};
|
||||
|
||||
self.item_at(index)
|
||||
}
|
||||
|
||||
fn item_at(&self, index: usize) -> Result<Token, ParserError> {
|
||||
|
@ -232,6 +246,7 @@ impl Parser {
|
|||
#[derive(Debug)]
|
||||
pub enum StreamError {
|
||||
AtBeginning,
|
||||
BeforeStart,
|
||||
AtEnd,
|
||||
Empty,
|
||||
InvalidAccess,
|
||||
|
@ -501,7 +516,7 @@ mod tests {
|
|||
let mut parser = Parser::new(tokens);
|
||||
let (nodelist, errors) = parser.parse().unwrap();
|
||||
insta::assert_yaml_snapshot!(nodelist);
|
||||
eprintln!("{:?}", errors);
|
||||
eprintln!("{errors:?}");
|
||||
assert!(errors.is_empty());
|
||||
}
|
||||
}
|
||||
|
@ -636,7 +651,7 @@ mod tests {
|
|||
let (nodelist, errors) = parser.parse().unwrap();
|
||||
|
||||
let offsets = nodelist.line_offsets();
|
||||
eprintln!("{:?}", offsets);
|
||||
eprintln!("{offsets:?}");
|
||||
assert_eq!(offsets.position_to_line_col(0), (1, 0)); // Start of line 1
|
||||
assert_eq!(offsets.position_to_line_col(6), (2, 0)); // Start of line 2
|
||||
assert!(errors.is_empty());
|
||||
|
|
|
@ -92,7 +92,7 @@ impl TagSpecs {
|
|||
Ok(TagSpecs(specs))
|
||||
}
|
||||
|
||||
/// Merge another TagSpecs into this one, with the other taking precedence
|
||||
/// Merge another `TagSpecs` into this one, with the other taking precedence
|
||||
#[allow(dead_code)]
|
||||
pub fn merge(&mut self, other: TagSpecs) -> &mut Self {
|
||||
self.0.extend(other.0);
|
||||
|
@ -138,8 +138,7 @@ impl TagSpec {
|
|||
is_spec_node = true;
|
||||
} else {
|
||||
return Err(format!(
|
||||
"Invalid prefix '{}' resulted in empty tag name component.",
|
||||
p
|
||||
"Invalid prefix '{p}' resulted in empty tag name component."
|
||||
));
|
||||
}
|
||||
} else {
|
||||
|
@ -163,10 +162,10 @@ impl TagSpec {
|
|||
// Otherwise, if it's a table, recurse into its children.
|
||||
if !is_spec_node {
|
||||
if let Some(table) = value.as_table() {
|
||||
for (key, inner_value) in table.iter() {
|
||||
for (key, inner_value) in table {
|
||||
let new_prefix = match prefix {
|
||||
None => key.clone(),
|
||||
Some(p) => format!("{}.{}", p, key),
|
||||
Some(p) => format!("{p}.{key}"),
|
||||
};
|
||||
Self::extract_specs(inner_value, Some(&new_prefix), specs)?;
|
||||
}
|
||||
|
@ -244,14 +243,13 @@ mod tests {
|
|||
];
|
||||
|
||||
for tag in expected_tags {
|
||||
assert!(specs.get(tag).is_some(), "{} tag should be present", tag);
|
||||
assert!(specs.get(tag).is_some(), "{tag} tag should be present");
|
||||
}
|
||||
|
||||
for tag in missing_tags {
|
||||
assert!(
|
||||
specs.get(tag).is_none(),
|
||||
"{} tag should not be present yet",
|
||||
tag
|
||||
"{tag} tag should not be present yet"
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -22,7 +22,7 @@ pub enum TokenType {
|
|||
}
|
||||
|
||||
impl TokenType {
|
||||
pub fn len(&self) -> Option<usize> {
|
||||
pub fn len(&self) -> usize {
|
||||
match self {
|
||||
TokenType::DjangoBlock(s)
|
||||
| TokenType::DjangoVariable(s)
|
||||
|
@ -33,17 +33,18 @@ impl TokenType {
|
|||
| TokenType::ScriptTagClose(s)
|
||||
| TokenType::StyleTagOpen(s)
|
||||
| TokenType::StyleTagClose(s)
|
||||
| TokenType::Text(s) => Some(s.len()),
|
||||
TokenType::Comment(content, _, _) => Some(content.len()),
|
||||
TokenType::Whitespace(n) => Some(*n),
|
||||
TokenType::Newline => Some(1),
|
||||
TokenType::Eof => Some(0),
|
||||
| TokenType::Text(s) => s.len(),
|
||||
TokenType::Comment(content, _, _) => content.len(),
|
||||
TokenType::Whitespace(n) => *n,
|
||||
TokenType::Newline => 1,
|
||||
TokenType::Eof => 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, PartialEq)]
|
||||
pub struct Token {
|
||||
#[allow(clippy::struct_field_names)]
|
||||
token_type: TokenType,
|
||||
line: usize,
|
||||
start: Option<usize>,
|
||||
|
@ -93,7 +94,7 @@ impl Token {
|
|||
| TokenType::StyleTagClose(s) => s.to_string(),
|
||||
TokenType::Whitespace(len) => " ".repeat(*len),
|
||||
TokenType::Newline => "\n".to_string(),
|
||||
TokenType::Eof => "".to_string(),
|
||||
TokenType::Eof => String::new(),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -106,11 +107,12 @@ impl Token {
|
|||
}
|
||||
|
||||
pub fn start(&self) -> Option<u32> {
|
||||
self.start.map(|s| s as u32)
|
||||
self.start
|
||||
.map(|s| u32::try_from(s).expect("Start position should fit in u32"))
|
||||
}
|
||||
|
||||
pub fn length(&self) -> Option<u32> {
|
||||
self.token_type.len().map(|l| l as u32)
|
||||
pub fn length(&self) -> u32 {
|
||||
u32::try_from(self.token_type.len()).expect("Token length should fit in u32")
|
||||
}
|
||||
|
||||
pub fn is_token_type(&self, token_type: &TokenType) -> bool {
|
||||
|
|
|
@ -30,3 +30,6 @@ clap = { version = "4.5", features = ["derive"] }
|
|||
|
||||
[build-dependencies]
|
||||
djls-dev = { workspace = true }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
use std::fmt::Write;
|
||||
|
||||
use anyhow::Result;
|
||||
use clap::Parser;
|
||||
|
||||
|
@ -33,7 +35,7 @@ pub fn run(args: Vec<String>) -> Result<()> {
|
|||
Err(e) => {
|
||||
let mut msg = e.to_string();
|
||||
if let Some(source) = e.source() {
|
||||
msg += &format!(", caused by {}", source);
|
||||
let _ = write!(msg, ", caused by {source}");
|
||||
}
|
||||
Exit::error().with_message(msg).process_exit()
|
||||
}
|
||||
|
|
|
@ -36,7 +36,7 @@ impl From<ExitStatus> for i32 {
|
|||
impl fmt::Display for ExitStatus {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let msg = self.as_str();
|
||||
write!(f, "{}", msg)
|
||||
write!(f, "{msg}")
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -69,7 +69,7 @@ impl Exit {
|
|||
|
||||
pub fn process_exit(self) -> ! {
|
||||
if let Some(message) = self.message {
|
||||
println!("{}", message)
|
||||
println!("{message}");
|
||||
}
|
||||
std::process::exit(self.status.as_raw())
|
||||
}
|
||||
|
@ -78,7 +78,7 @@ impl Exit {
|
|||
pub fn ok(self) -> Result<()> {
|
||||
match self.status {
|
||||
ExitStatus::Success => Ok(()),
|
||||
_ => Err(self.into()),
|
||||
ExitStatus::Error => Err(self.into()),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -92,8 +92,8 @@ impl fmt::Display for Exit {
|
|||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let status_str = self.status.as_str();
|
||||
match &self.message {
|
||||
Some(msg) => write!(f, "{}: {}", status_str, msg),
|
||||
None => write!(f, "{}", status_str),
|
||||
Some(msg) => write!(f, "{status_str}: {msg}"),
|
||||
None => write!(f, "{status_str}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/// PyO3 entrypoint for the Django Language Server CLI.
|
||||
/// `PyO3` entrypoint for the Django Language Server CLI.
|
||||
///
|
||||
/// This module provides a Python interface using PyO3 to solve Python runtime
|
||||
/// interpreter linking issues. The PyO3 approach avoids complexities with
|
||||
/// This module provides a Python interface using `PyO3` to solve Python runtime
|
||||
/// interpreter linking issues. The `PyO3` approach avoids complexities with
|
||||
/// static/dynamic linking when building binaries that interact with Python.
|
||||
mod args;
|
||||
mod cli;
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/// Binary interface for local development only.
|
||||
///
|
||||
/// This binary exists for development and testing with `cargo run`.
|
||||
/// The production CLI is distributed through the PyO3 interface in lib.rs.
|
||||
/// The production CLI is distributed through the `PyO3` interface in lib.rs.
|
||||
mod args;
|
||||
mod cli;
|
||||
mod commands;
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue