borrow the string for the Token instead of allocating (#13)

This commit is contained in:
Josh Thomas 2024-10-14 11:06:49 -05:00 committed by GitHub
parent a34ac87242
commit 3968104da3
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
2 changed files with 10 additions and 10 deletions

View file

@ -4,7 +4,7 @@ use crate::token::{Token, TokenType, Tokenizer};
pub struct Lexer<'a> { pub struct Lexer<'a> {
source: &'a str, source: &'a str,
tokens: Vec<Token>, tokens: Vec<Token<'a>>,
state: LexerState, state: LexerState,
} }
@ -17,7 +17,7 @@ impl<'a> Lexer<'a> {
} }
} }
fn scan_token(&mut self) -> Result<(), LexerError> { fn _scan_token(&mut self) -> Result<(), LexerError> {
let c = self.advance(); let c = self.advance();
let token_type = match c { let token_type = match c {
@ -211,7 +211,7 @@ impl<'a> Lexer<'a> {
} }
impl<'a> Tokenizer for Lexer<'a> { impl<'a> Tokenizer for Lexer<'a> {
type Token = Token; type Token = Token<'a>;
type TokenType = TokenType; type TokenType = TokenType;
type Error = LexerError; type Error = LexerError;
@ -222,16 +222,16 @@ impl<'a> Tokenizer for Lexer<'a> {
} }
self.tokens self.tokens
.push(Token::new(TokenType::Eof, String::new(), self.state.line)); .push(Token::new(TokenType::Eof, "", self.state.line));
Ok(self.tokens.clone()) Ok(self.tokens.clone())
} }
fn scan_token(&mut self) -> Result<(), LexerError> { fn scan_token(&mut self) -> Result<(), LexerError> {
self.scan_token() self._scan_token()
} }
fn add_token(&mut self, token_type: Self::TokenType) { fn add_token(&mut self, token_type: Self::TokenType) {
let text = self.source[self.state.start..self.state.current].to_string(); let text = &self.source[self.state.start..self.state.current];
if token_type != TokenType::Whitespace { if token_type != TokenType::Whitespace {
self.tokens self.tokens
.push(Token::new(token_type, text, self.state.line)); .push(Token::new(token_type, text, self.state.line));

View file

@ -42,14 +42,14 @@ pub enum TokenType {
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct Token { pub struct Token<'a> {
pub token_type: TokenType, pub token_type: TokenType,
pub lexeme: String, pub lexeme: &'a str,
pub line: usize, pub line: usize,
} }
impl Token { impl<'a> Token<'a> {
pub fn new(token_type: TokenType, lexeme: String, line: usize) -> Self { pub fn new(token_type: TokenType, lexeme: &'a str, line: usize) -> Self {
Token { Token {
token_type, token_type,
lexeme, lexeme,