use nightly rustfmt and sort imports (#145)
Some checks are pending
lint / pre-commit (push) Waiting to run
release / build (push) Waiting to run
release / test (push) Waiting to run
release / release (push) Blocked by required conditions
test / generate-matrix (push) Waiting to run
test / Python , Django () (push) Blocked by required conditions
test / tests (push) Blocked by required conditions
zizmor 🌈 / zizmor latest via PyPI (push) Waiting to run

This commit is contained in:
Josh Thomas 2025-05-14 00:46:55 -05:00 committed by GitHub
parent 00140c58ca
commit e87c917cb6
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
25 changed files with 143 additions and 60 deletions

View file

@ -1,7 +1,10 @@
use crate::tokens::{Token, TokenStream, TokenType};
use serde::Serialize;
use thiserror::Error;
use crate::tokens::Token;
use crate::tokens::TokenStream;
use crate::tokens::TokenType;
#[derive(Clone, Debug, Default, Serialize)]
pub struct Ast {
nodelist: Vec<Node>,

View file

@ -1,10 +1,12 @@
use crate::ast::{AstError, Span};
use crate::lexer::LexerError;
use crate::parser::ParserError;
use serde::Serialize;
use thiserror::Error;
use tower_lsp_server::lsp_types;
use crate::ast::AstError;
use crate::ast::Span;
use crate::lexer::LexerError;
use crate::parser::ParserError;
#[derive(Debug, Error, Serialize)]
pub enum TemplateError {
#[error("Lexer error: {0}")]

View file

@ -1,6 +1,9 @@
use crate::tokens::{Token, TokenStream, TokenType};
use thiserror::Error;
use crate::tokens::Token;
use crate::tokens::TokenStream;
use crate::tokens::TokenType;
pub struct Lexer {
source: String,
chars: Vec<char>,

View file

@ -6,10 +6,12 @@ mod tagspecs;
mod tokens;
use ast::Ast;
pub use error::{to_lsp_diagnostic, QuickFix, TemplateError};
pub use error::to_lsp_diagnostic;
pub use error::QuickFix;
pub use error::TemplateError;
use lexer::Lexer;
pub use parser::{Parser, ParserError};
pub use parser::Parser;
pub use parser::ParserError;
/// Parses a Django template and returns the AST and any parsing errors.
///

View file

@ -1,8 +1,14 @@
use crate::ast::{Ast, AstError, Node, Span};
use crate::lexer::LexerError;
use crate::tokens::{Token, TokenStream, TokenType};
use thiserror::Error;
use crate::ast::Ast;
use crate::ast::AstError;
use crate::ast::Node;
use crate::ast::Span;
use crate::lexer::LexerError;
use crate::tokens::Token;
use crate::tokens::TokenStream;
use crate::tokens::TokenType;
pub struct Parser {
tokens: TokenStream,
current: usize,

View file

@ -1,8 +1,10 @@
use anyhow::Result;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::fs;
use std::path::Path;
use anyhow::Result;
use serde::Deserialize;
use serde::Serialize;
use thiserror::Error;
use toml::Value;
@ -184,9 +186,10 @@ pub struct EndTag {
#[cfg(test)]
mod tests {
use super::*;
use std::fs;
use super::*;
#[test]
fn test_can_load_builtins() -> Result<(), anyhow::Error> {
let specs = TagSpecs::load_builtin_specs()?;

View file

@ -1,5 +1,7 @@
use std::ops::Deref;
use std::ops::DerefMut;
use serde::Serialize;
use std::ops::{Deref, DerefMut};
#[derive(Clone, Debug, Serialize, PartialEq)]
pub enum TokenType {