mirror of
https://github.com/joshuadavidthomas/django-template-ast.git
synced 2025-08-04 08:58:17 +00:00
borrow the string for the Token
instead of allocating (#13)
This commit is contained in:
parent
a34ac87242
commit
3968104da3
2 changed files with 10 additions and 10 deletions
12
src/lexer.rs
12
src/lexer.rs
|
@ -4,7 +4,7 @@ use crate::token::{Token, TokenType, Tokenizer};
|
|||
|
||||
pub struct Lexer<'a> {
|
||||
source: &'a str,
|
||||
tokens: Vec<Token>,
|
||||
tokens: Vec<Token<'a>>,
|
||||
state: LexerState,
|
||||
}
|
||||
|
||||
|
@ -17,7 +17,7 @@ impl<'a> Lexer<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
fn scan_token(&mut self) -> Result<(), LexerError> {
|
||||
fn _scan_token(&mut self) -> Result<(), LexerError> {
|
||||
let c = self.advance();
|
||||
|
||||
let token_type = match c {
|
||||
|
@ -211,7 +211,7 @@ impl<'a> Lexer<'a> {
|
|||
}
|
||||
|
||||
impl<'a> Tokenizer for Lexer<'a> {
|
||||
type Token = Token;
|
||||
type Token = Token<'a>;
|
||||
type TokenType = TokenType;
|
||||
type Error = LexerError;
|
||||
|
||||
|
@ -222,16 +222,16 @@ impl<'a> Tokenizer for Lexer<'a> {
|
|||
}
|
||||
|
||||
self.tokens
|
||||
.push(Token::new(TokenType::Eof, String::new(), self.state.line));
|
||||
.push(Token::new(TokenType::Eof, "", self.state.line));
|
||||
Ok(self.tokens.clone())
|
||||
}
|
||||
|
||||
fn scan_token(&mut self) -> Result<(), LexerError> {
|
||||
self.scan_token()
|
||||
self._scan_token()
|
||||
}
|
||||
|
||||
fn add_token(&mut self, token_type: Self::TokenType) {
|
||||
let text = self.source[self.state.start..self.state.current].to_string();
|
||||
let text = &self.source[self.state.start..self.state.current];
|
||||
if token_type != TokenType::Whitespace {
|
||||
self.tokens
|
||||
.push(Token::new(token_type, text, self.state.line));
|
||||
|
|
|
@ -42,14 +42,14 @@ pub enum TokenType {
|
|||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Token {
|
||||
pub struct Token<'a> {
|
||||
pub token_type: TokenType,
|
||||
pub lexeme: String,
|
||||
pub lexeme: &'a str,
|
||||
pub line: usize,
|
||||
}
|
||||
|
||||
impl Token {
|
||||
pub fn new(token_type: TokenType, lexeme: String, line: usize) -> Self {
|
||||
impl<'a> Token<'a> {
|
||||
pub fn new(token_type: TokenType, lexeme: &'a str, line: usize) -> Self {
|
||||
Token {
|
||||
token_type,
|
||||
lexeme,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue