wait, idiomatic Rust is Pascal for Enums? booooring

This commit is contained in:
Josh Thomas 2024-10-12 15:54:33 -05:00
parent d117b3148d
commit ce8c732d80

View file

@ -2,41 +2,47 @@ use std::fmt;
#[derive(Debug, Clone, PartialEq)]
enum TokenType {
LEFT_PAREN, // (
RIGHT_PAREN, // )
LEFT_BRACE, // {
RIGHT_BRACE, // }
LEFT_BRACKET, // [
RIGHT_BRACKET, // ]
LEFT_ANGLE, // <
RIGHT_ANGLE, // >
COMMA, // ,
DOT, // .
MINUS, // -
PLUS, // +
COLON, // :
SEMICOLON, // ;
SLASH, // /
STAR, // *
BANG, // !
EQUAL, // =
PIPE, // |
PERCENT, // %
HASH, // #
SINGLE_QUOTE, // '
DOUBLE_QUOTE, // "
DOUBLE_LEFT_BRACE, // {{
DOUBLE_RIGHT_BRACE, // }}
LEFT_BRACE_PERCENT, // {%
PERCENT_RIGHT_BRACE, // %}
LEFT_BRACE_HASH, // {#
HASH_RIGHT_BRACE, // #}
BANG_EQUAL, // !=
DOUBLE_EQUAL, // ==
LEFT_ANGLE_EQUAL, // <=
RIGHT_ANGLE_EQUAL, // =>
TEXT,
EOF,
LeftParen, // (
RightParen, // )
LeftBrace, // {
RightBrace, // }
LeftBracket, // [
RightBracket, // ]
LeftAngle, // <
RightAngle, // >
Comma, // ,
Dot, // .
Minus, // -
Plus, // +
Colon, // :
Semicolon, // ;
Slash, // /
Star, // *
Bang, // !
Equal, // =
Pipe, // |
Percent, // %
Hash, // #
SingleQuote, // '
DoubleQuote, // "
DoubleLeftBrace, // {{
DoubleRightBrace, // }}
LeftBracePercent, // {%
PercentRightBrace, // %}
LeftBraceHash, // {#
HashRightBrace, // #}
BangEqual, // !=
DoubleEqual, // ==
LeftAngleEqual, // <=
RightAngleEqual, // =>
Text,
Eof,
}
impl Default for TokenType {
fn default() -> Self {
Self::Comma
}
}
#[derive(Debug, Clone)]
@ -99,7 +105,7 @@ impl Lexer {
}
self.tokens
.push(Token::new(TokenType::EOF, String::new(), None, self.line));
.push(Token::new(TokenType::Eof, String::new(), None, self.line));
self.tokens.clone()
}
@ -107,85 +113,85 @@ impl Lexer {
use TokenType::*;
let c = self.advance();
match c {
'(' => self.add_token(LEFT_PAREN),
')' => self.add_token(RIGHT_PAREN),
'[' => self.add_token(LEFT_BRACKET),
']' => self.add_token(RIGHT_BRACKET),
',' => self.add_token(COMMA),
'.' => self.add_token(DOT),
'-' => self.add_token(MINUS),
'+' => self.add_token(PLUS),
':' => self.add_token(COLON),
';' => self.add_token(SEMICOLON),
'*' => self.add_token(STAR),
'|' => self.add_token(PIPE),
'\'' => self.add_token(SINGLE_QUOTE),
'"' => self.add_token(DOUBLE_QUOTE),
'(' => self.add_token(LeftParen),
')' => self.add_token(RightParen),
'[' => self.add_token(LeftBracket),
']' => self.add_token(RightBracket),
',' => self.add_token(Comma),
'.' => self.add_token(Dot),
'-' => self.add_token(Minus),
'+' => self.add_token(Plus),
':' => self.add_token(Colon),
';' => self.add_token(Semicolon),
'*' => self.add_token(Star),
'|' => self.add_token(Pipe),
'\'' => self.add_token(SingleQuote),
'"' => self.add_token(DoubleQuote),
'{' => {
let token_type = if self.match_char('{') {
DOUBLE_LEFT_BRACE
DoubleLeftBrace
} else if self.match_char('%') {
LEFT_BRACE_PERCENT
LeftBracePercent
} else if self.match_char('#') {
LEFT_BRACE_HASH
LeftBraceHash
} else {
LEFT_BRACE
LeftBrace
};
self.add_token(token_type);
}
'}' => {
let token_type = if self.match_char('}') {
DOUBLE_RIGHT_BRACE
DoubleRightBrace
} else {
RIGHT_BRACE
RightBrace
};
self.add_token(token_type);
}
'%' => {
let token_type = if self.match_char('}') {
PERCENT_RIGHT_BRACE
PercentRightBrace
} else {
PERCENT
Percent
};
self.add_token(token_type);
}
'#' => {
let token_type = if self.match_char('}') {
HASH_RIGHT_BRACE
HashRightBrace
} else {
HASH
Hash
};
self.add_token(token_type);
}
'!' => {
let token_type = if self.match_char('=') {
BANG_EQUAL
BangEqual
} else {
BANG
Bang
};
self.add_token(token_type);
}
'=' => {
let token_type = if self.match_char('=') {
DOUBLE_EQUAL
DoubleEqual
} else {
EQUAL
Equal
};
self.add_token(token_type);
}
'<' => {
let token_type = if self.match_char('=') {
LEFT_ANGLE_EQUAL
LeftAngleEqual
} else {
LEFT_ANGLE
LeftAngle
};
self.add_token(token_type);
}
'>' => {
let token_type = if self.match_char('=') {
RIGHT_ANGLE_EQUAL
RightAngleEqual
} else {
RIGHT_ANGLE
RightAngle
};
self.add_token(token_type);
}
@ -195,7 +201,7 @@ impl Lexer {
self.advance();
}
} else {
self.add_token(SLASH);
self.add_token(Slash);
}
}
' ' | '\r' | '\t' => {}
@ -214,7 +220,7 @@ impl Lexer {
let text = self.source[self.start..self.current].to_string();
if !text.is_empty() {
self.add_token_literal(TokenType::TEXT, Some(text));
self.add_token_literal(TokenType::Text, Some(text));
}
}
@ -312,105 +318,105 @@ mod tests {
#[test]
fn test_opening_tag() {
let tokens = tokenize("<html>");
assert_eq!(tokens[0].token_type, LEFT_ANGLE);
assert_eq!(tokens[1].token_type, TEXT);
assert_eq!(tokens[0].token_type, LeftAngle);
assert_eq!(tokens[1].token_type, Text);
assert_eq!(tokens[1].lexeme, "html");
assert_eq!(tokens[2].token_type, RIGHT_ANGLE);
assert_eq!(tokens[2].token_type, RightAngle);
}
#[test]
fn test_closing_tag() {
let tokens = tokenize("</body>");
assert_eq!(tokens[0].token_type, LEFT_ANGLE);
assert_eq!(tokens[1].token_type, SLASH);
assert_eq!(tokens[2].token_type, TEXT);
assert_eq!(tokens[0].token_type, LeftAngle);
assert_eq!(tokens[1].token_type, Slash);
assert_eq!(tokens[2].token_type, Text);
assert_eq!(tokens[2].lexeme, "body");
assert_eq!(tokens[3].token_type, RIGHT_ANGLE);
assert_eq!(tokens[3].token_type, RightAngle);
}
#[test]
fn test_html_attribute() {
let tokens = tokenize(r#"<a href="link">"#);
assert_eq!(tokens[0].token_type, LEFT_ANGLE);
assert_eq!(tokens[1].token_type, TEXT);
assert_eq!(tokens[0].token_type, LeftAngle);
assert_eq!(tokens[1].token_type, Text);
assert_eq!(tokens[1].lexeme, "a");
assert_eq!(tokens[2].token_type, TEXT);
assert_eq!(tokens[2].token_type, Text);
assert_eq!(tokens[2].lexeme, "href");
assert_eq!(tokens[3].token_type, EQUAL);
assert_eq!(tokens[4].token_type, DOUBLE_QUOTE);
assert_eq!(tokens[5].token_type, TEXT);
assert_eq!(tokens[3].token_type, Equal);
assert_eq!(tokens[4].token_type, DoubleQuote);
assert_eq!(tokens[5].token_type, Text);
assert_eq!(tokens[5].lexeme, "link");
assert_eq!(tokens[6].token_type, DOUBLE_QUOTE);
assert_eq!(tokens[7].token_type, RIGHT_ANGLE);
assert_eq!(tokens[6].token_type, DoubleQuote);
assert_eq!(tokens[7].token_type, RightAngle);
}
#[test]
fn test_django_variable() {
let tokens = tokenize("{{ variable }}");
assert_eq!(tokens[0].token_type, DOUBLE_LEFT_BRACE);
assert_eq!(tokens[1].token_type, TEXT);
assert_eq!(tokens[0].token_type, DoubleLeftBrace);
assert_eq!(tokens[1].token_type, Text);
assert_eq!(tokens[1].lexeme, "variable");
assert_eq!(tokens[2].token_type, DOUBLE_RIGHT_BRACE);
assert_eq!(tokens[2].token_type, DoubleRightBrace);
}
#[test]
fn test_django_templatetag() {
let tokens = tokenize("{% if condition %}");
assert_eq!(tokens[0].token_type, LEFT_BRACE_PERCENT);
assert_eq!(tokens[1].token_type, TEXT);
assert_eq!(tokens[0].token_type, LeftBracePercent);
assert_eq!(tokens[1].token_type, Text);
assert_eq!(tokens[1].lexeme, "if");
assert_eq!(tokens[2].token_type, TEXT);
assert_eq!(tokens[2].token_type, Text);
assert_eq!(tokens[2].lexeme, "condition");
assert_eq!(tokens[3].token_type, PERCENT_RIGHT_BRACE);
assert_eq!(tokens[3].token_type, PercentRightBrace);
}
#[test]
fn test_django_comment() {
let tokens = tokenize("{# This is a comment #}");
assert_eq!(tokens[0].token_type, LEFT_BRACE_HASH);
assert_eq!(tokens[1].token_type, TEXT);
assert_eq!(tokens[0].token_type, LeftBraceHash);
assert_eq!(tokens[1].token_type, Text);
assert_eq!(tokens[1].lexeme, "This");
assert_eq!(tokens[2].token_type, TEXT);
assert_eq!(tokens[2].token_type, Text);
assert_eq!(tokens[2].lexeme, "is");
assert_eq!(tokens[3].token_type, TEXT);
assert_eq!(tokens[3].token_type, Text);
assert_eq!(tokens[3].lexeme, "a");
assert_eq!(tokens[4].token_type, TEXT);
assert_eq!(tokens[4].token_type, Text);
assert_eq!(tokens[4].lexeme, "comment");
assert_eq!(tokens[5].token_type, HASH_RIGHT_BRACE);
assert_eq!(tokens[5].token_type, HashRightBrace);
}
#[test]
fn test_django_filter() {
let tokens = tokenize("{{ value|default:'default' }}");
assert_eq!(tokens[0].token_type, DOUBLE_LEFT_BRACE);
assert_eq!(tokens[1].token_type, TEXT);
assert_eq!(tokens[0].token_type, DoubleLeftBrace);
assert_eq!(tokens[1].token_type, Text);
assert_eq!(tokens[1].lexeme, "value");
assert_eq!(tokens[2].token_type, PIPE);
assert_eq!(tokens[3].token_type, TEXT);
assert_eq!(tokens[2].token_type, Pipe);
assert_eq!(tokens[3].token_type, Text);
assert_eq!(tokens[3].lexeme, "default");
assert_eq!(tokens[4].token_type, COLON);
assert_eq!(tokens[5].token_type, SINGLE_QUOTE);
assert_eq!(tokens[6].token_type, TEXT);
assert_eq!(tokens[4].token_type, Colon);
assert_eq!(tokens[5].token_type, SingleQuote);
assert_eq!(tokens[6].token_type, Text);
assert_eq!(tokens[6].lexeme, "default");
assert_eq!(tokens[7].token_type, SINGLE_QUOTE);
assert_eq!(tokens[8].token_type, DOUBLE_RIGHT_BRACE);
assert_eq!(tokens[7].token_type, SingleQuote);
assert_eq!(tokens[8].token_type, DoubleRightBrace);
}
#[test]
fn test_quoted_django_templatetag() {
let tokens = tokenize(r#"'{% url "api:index" %}'"#);
assert_eq!(tokens[0].token_type, SINGLE_QUOTE);
assert_eq!(tokens[1].token_type, LEFT_BRACE_PERCENT);
assert_eq!(tokens[2].token_type, TEXT);
assert_eq!(tokens[0].token_type, SingleQuote);
assert_eq!(tokens[1].token_type, LeftBracePercent);
assert_eq!(tokens[2].token_type, Text);
assert_eq!(tokens[2].lexeme, "url");
assert_eq!(tokens[3].token_type, DOUBLE_QUOTE);
assert_eq!(tokens[4].token_type, TEXT);
assert_eq!(tokens[3].token_type, DoubleQuote);
assert_eq!(tokens[4].token_type, Text);
assert_eq!(tokens[4].lexeme, "api");
assert_eq!(tokens[5].token_type, COLON);
assert_eq!(tokens[6].token_type, TEXT);
assert_eq!(tokens[5].token_type, Colon);
assert_eq!(tokens[6].token_type, Text);
assert_eq!(tokens[6].lexeme, "index");
assert_eq!(tokens[7].token_type, DOUBLE_QUOTE);
assert_eq!(tokens[8].token_type, PERCENT_RIGHT_BRACE);
assert_eq!(tokens[9].token_type, SINGLE_QUOTE);
assert_eq!(tokens[7].token_type, DoubleQuote);
assert_eq!(tokens[8].token_type, PercentRightBrace);
assert_eq!(tokens[9].token_type, SingleQuote);
}
}