mirror of
https://github.com/joshuadavidthomas/django-language-server.git
synced 2025-09-18 16:20:24 +00:00
updates
This commit is contained in:
parent
8fdf5ca038
commit
564c6824ab
3 changed files with 78 additions and 43 deletions
|
@ -1,4 +1,4 @@
|
||||||
use crate::ast::{Assignment, Ast, AstError, Block, DjangoFilter, LineOffsets, Node, Span, Tag};
|
use crate::ast::{Ast, AstError, Block, DjangoFilter, LineOffsets, Node, Span, Tag};
|
||||||
use crate::tagspecs::{TagSpec, TagType};
|
use crate::tagspecs::{TagSpec, TagType};
|
||||||
use crate::tokens::{Token, TokenStream, TokenType};
|
use crate::tokens::{Token, TokenStream, TokenType};
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
|
@ -52,21 +52,13 @@ impl Parser {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn next_node(&mut self) -> Result<Node, ParserError> {
|
fn next_node(&mut self) -> Result<Node, ParserError> {
|
||||||
let token = self.peek()?;
|
let token = self.consume()?;
|
||||||
|
|
||||||
match token.token_type() {
|
match token.token_type() {
|
||||||
TokenType::Comment(content, start, end) => {
|
TokenType::Comment(content, open, _) => self.parse_comment(content, open),
|
||||||
self.consume()?;
|
|
||||||
self.parse_comment(content, start, end.as_deref())
|
|
||||||
}
|
|
||||||
TokenType::Eof => Err(ParserError::Ast(AstError::StreamError("AtEnd".to_string()))),
|
TokenType::Eof => Err(ParserError::Ast(AstError::StreamError("AtEnd".to_string()))),
|
||||||
TokenType::DjangoBlock(content) => {
|
TokenType::DjangoBlock(content) => self.parse_django_block(content),
|
||||||
self.consume()?;
|
TokenType::DjangoVariable(content) => self.parse_django_variable(content),
|
||||||
self.parse_django_block(content)
|
|
||||||
}
|
|
||||||
TokenType::DjangoVariable(content) => {
|
|
||||||
self.consume()?;
|
|
||||||
self.parse_django_variable(content)
|
|
||||||
}
|
|
||||||
TokenType::HtmlTagClose(_)
|
TokenType::HtmlTagClose(_)
|
||||||
| TokenType::HtmlTagOpen(_)
|
| TokenType::HtmlTagOpen(_)
|
||||||
| TokenType::HtmlTagVoid(_)
|
| TokenType::HtmlTagVoid(_)
|
||||||
|
@ -76,38 +68,31 @@ impl Parser {
|
||||||
| TokenType::StyleTagClose(_)
|
| TokenType::StyleTagClose(_)
|
||||||
| TokenType::StyleTagOpen(_)
|
| TokenType::StyleTagOpen(_)
|
||||||
| TokenType::Text(_)
|
| TokenType::Text(_)
|
||||||
| TokenType::Whitespace(_) => {
|
| TokenType::Whitespace(_) => self.parse_text(),
|
||||||
self.consume()?;
|
|
||||||
self.parse_text()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_comment(
|
fn parse_comment(&mut self, content: &str, open: &str) -> Result<Node, ParserError> {
|
||||||
&mut self,
|
|
||||||
content: &str,
|
|
||||||
start: &str,
|
|
||||||
end: Option<&str>,
|
|
||||||
) -> Result<Node, ParserError> {
|
|
||||||
let token = self.peek_previous()?;
|
|
||||||
let start_pos = token.start().unwrap_or(0);
|
|
||||||
|
|
||||||
// Only treat Django comments as Comment nodes
|
// Only treat Django comments as Comment nodes
|
||||||
if start == "{#" && end == Some("#}") {
|
if open != "{#" {
|
||||||
|
return self.parse_text();
|
||||||
|
};
|
||||||
|
|
||||||
|
let token = self.peek_previous()?;
|
||||||
|
let start = token.start().unwrap_or(0);
|
||||||
|
|
||||||
Ok(Node::Comment {
|
Ok(Node::Comment {
|
||||||
content: content.to_string(),
|
content: content.to_string(),
|
||||||
span: Span::new(start_pos, token.token_type().len().unwrap_or(0) as u32),
|
span: Span::new(start, token.token_type().len().unwrap_or(0) as u32),
|
||||||
})
|
})
|
||||||
} else {
|
|
||||||
self.parse_text()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_django_block(&mut self, content: &str) -> Result<Node, ParserError> {
|
fn parse_django_block(&mut self, content: &str) -> Result<Node, ParserError> {
|
||||||
let token = self.peek_previous()?;
|
let token = self.peek_previous()?;
|
||||||
let start_pos = token.start().unwrap_or(0);
|
let start = token.start().unwrap_or(0);
|
||||||
let total_length = token.length().unwrap_or(0);
|
let length = token.length().unwrap_or(0);
|
||||||
let span = Span::new(start_pos, total_length);
|
|
||||||
|
let span = Span::new(start, length);
|
||||||
|
|
||||||
let bits: Vec<String> = content.split_whitespace().map(String::from).collect();
|
let bits: Vec<String> = content.split_whitespace().map(String::from).collect();
|
||||||
let tag_name = bits.first().ok_or(ParserError::EmptyTag)?.clone();
|
let tag_name = bits.first().ok_or(ParserError::EmptyTag)?.clone();
|
||||||
|
@ -278,14 +263,14 @@ impl Parser {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_text(&mut self) -> Result<Node, ParserError> {
|
fn parse_text(&mut self) -> Result<Node, ParserError> {
|
||||||
let start_token = self.peek_previous()?;
|
let token = self.peek_previous()?;
|
||||||
let start_pos = start_token.start().unwrap_or(0);
|
let start = token.start().unwrap_or(0);
|
||||||
|
|
||||||
if start_token.token_type() == &TokenType::Newline {
|
if token.token_type() == &TokenType::Newline {
|
||||||
return self.next_node();
|
return self.next_node();
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut text = start_token.token_type().to_string();
|
let mut text = token.token_type().to_string();
|
||||||
|
|
||||||
while let Ok(token) = self.peek() {
|
while let Ok(token) = self.peek() {
|
||||||
match token.token_type() {
|
match token.token_type() {
|
||||||
|
@ -311,7 +296,7 @@ impl Parser {
|
||||||
|
|
||||||
Ok(Node::Text {
|
Ok(Node::Text {
|
||||||
content,
|
content,
|
||||||
span: Span::new(start_pos + offset, length),
|
span: Span::new(start + offset, length),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -445,8 +430,19 @@ mod tests {
|
||||||
|
|
||||||
mod django {
|
mod django {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_parse_django_variable() {
|
fn test_parse_django_variable() {
|
||||||
|
let source = "{{ user.name }}";
|
||||||
|
let tokens = Lexer::new(source).tokenize().unwrap();
|
||||||
|
let mut parser = Parser::new(tokens);
|
||||||
|
let (ast, errors) = parser.parse().unwrap();
|
||||||
|
insta::assert_yaml_snapshot!(ast);
|
||||||
|
assert!(errors.is_empty());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_django_variable_with_filter() {
|
||||||
let source = "{{ user.name|title }}";
|
let source = "{{ user.name|title }}";
|
||||||
let tokens = Lexer::new(source).tokenize().unwrap();
|
let tokens = Lexer::new(source).tokenize().unwrap();
|
||||||
let mut parser = Parser::new(tokens);
|
let mut parser = Parser::new(tokens);
|
||||||
|
|
|
@ -0,0 +1,17 @@
|
||||||
|
---
|
||||||
|
source: crates/djls-template-ast/src/parser.rs
|
||||||
|
assertion_line: 440
|
||||||
|
expression: ast
|
||||||
|
snapshot_kind: text
|
||||||
|
---
|
||||||
|
nodes:
|
||||||
|
- Variable:
|
||||||
|
bits:
|
||||||
|
- user
|
||||||
|
- name
|
||||||
|
filters: []
|
||||||
|
span:
|
||||||
|
start: 3
|
||||||
|
length: 9
|
||||||
|
line_offsets:
|
||||||
|
- 0
|
|
@ -0,0 +1,22 @@
|
||||||
|
---
|
||||||
|
source: crates/djls-template-ast/src/parser.rs
|
||||||
|
assertion_line: 450
|
||||||
|
expression: ast
|
||||||
|
snapshot_kind: text
|
||||||
|
---
|
||||||
|
nodes:
|
||||||
|
- Variable:
|
||||||
|
bits:
|
||||||
|
- user
|
||||||
|
- name
|
||||||
|
filters:
|
||||||
|
- name: title
|
||||||
|
args: []
|
||||||
|
span:
|
||||||
|
start: 4
|
||||||
|
length: 15
|
||||||
|
span:
|
||||||
|
start: 3
|
||||||
|
length: 15
|
||||||
|
line_offsets:
|
||||||
|
- 0
|
Loading…
Add table
Add a link
Reference in a new issue