mirror of
https://github.com/erg-lang/erg.git
synced 2025-10-01 05:11:09 +00:00
Test: update basic syntax tokenize test
This commit is contained in:
parent
870f88e14d
commit
bb67477e0c
1 changed files with 49 additions and 25 deletions
|
@ -12,68 +12,81 @@ use TokenKind::*;
|
|||
const FILE1: &str = "tests/test1_basic_syntax.er";
|
||||
|
||||
#[test]
|
||||
fn test_lexer() -> ParseResult<()> {
|
||||
fn test_lexer_for_basic() -> ParseResult<()> {
|
||||
let mut lexer = Lexer::new(Input::File(FILE1.into()));
|
||||
let newline = "\n";
|
||||
let /*mut*/ token_array = vec![
|
||||
(Newline, newline),
|
||||
(Newline, newline),
|
||||
(Newline, newline),
|
||||
(Symbol, "_a"),
|
||||
(Equal, "="),
|
||||
(IntLit, "1234"),
|
||||
(NatLit, "1_234"),
|
||||
(Plus, "+"),
|
||||
(RatioLit, "1113.0"),
|
||||
(Plus, "+"),
|
||||
(RatioLit, "0.30102"),
|
||||
// (Symbol, "a"),
|
||||
(RatioLit, "1113."),
|
||||
(Star, "*"),
|
||||
(RatioLit, "3_000.2e-4"),
|
||||
(Pow, "**"),
|
||||
(NatLit, "0003"),
|
||||
(Star, "*"),
|
||||
(RatioLit, ".4"),
|
||||
(Newline, newline),
|
||||
(Symbol, "a"),
|
||||
(Comma, ","),
|
||||
(UBar, "_"),
|
||||
(Comma, ","),
|
||||
(Spread, "..."),
|
||||
(Spread, "..."), // EllipsisLit
|
||||
(Symbol, "b"),
|
||||
(Equal, "="),
|
||||
(Symbol, "five_elem_tuple"),
|
||||
(Newline, newline),
|
||||
(Symbol, "f"),
|
||||
(Symbol, "x"),
|
||||
(Comma, ","),
|
||||
(Symbol, "y"),
|
||||
(Equal, "="),
|
||||
(Newline, newline),
|
||||
(Indent, " "),
|
||||
(Symbol, "x"),
|
||||
(Plus, "+"),
|
||||
(Symbol, "y"),
|
||||
(Newline, newline),
|
||||
(Dedent, ""),
|
||||
(Symbol, "if!"),
|
||||
(Symbol, "True"),
|
||||
(BoolLit, "True"),
|
||||
(Comma, ","),
|
||||
(Symbol, "do!"),
|
||||
(Newline, newline),
|
||||
(Indent, " "),
|
||||
(Symbol, "print!"),
|
||||
// (LParen, "("),
|
||||
(StrLit, "\\\\hello, world\\\""),
|
||||
// (RParen, ")"),
|
||||
(StrLit, "\"\\\\hello, world\\\"\""),
|
||||
(Newline, newline),
|
||||
(IntLit, "10"),
|
||||
(NatLit, "10"),
|
||||
(Dot, "."),
|
||||
(Symbol, "times!"),
|
||||
// (LParen, "("),
|
||||
// (RParen, ")"),
|
||||
(Symbol, "do!"),
|
||||
(Newline, newline),
|
||||
(Indent, " "),
|
||||
(Symbol, "if!"),
|
||||
(Symbol, "True"),
|
||||
(Symbol, "x"),
|
||||
(Dot, "."),
|
||||
(Symbol, "y"),
|
||||
(Dot, "."),
|
||||
(Symbol, "z"),
|
||||
(Comma, ","),
|
||||
(Symbol, "do!"),
|
||||
(Newline, newline),
|
||||
(Indent, " "),
|
||||
(Symbol, "print!"),
|
||||
(StrLit, ""),
|
||||
(StrLit, "\"\""),
|
||||
(Newline, newline),
|
||||
// (Comment, " illegal indent"),
|
||||
// (Illegal, "DEDENT"),
|
||||
// (Symbol, "do_nothing"),
|
||||
(Dedent, ""),
|
||||
(Newline, newline),
|
||||
(Newline, newline),
|
||||
(Symbol, "Hello"),
|
||||
(Equal, "="),
|
||||
(Symbol, "S2c"),
|
||||
// (LParen, "("),
|
||||
(StrLit, "hello"),
|
||||
// (RParen, ")"),
|
||||
(StrLit, "\"hello\""),
|
||||
(Newline, newline),
|
||||
(Dedent, ""),
|
||||
(Dedent, ""),
|
||||
|
@ -82,7 +95,7 @@ fn test_lexer() -> ParseResult<()> {
|
|||
(Newline, newline),
|
||||
(Indent, " "),
|
||||
(Newline, newline),
|
||||
(StrLit, "aaa"),
|
||||
(StrLit, "\"aaa\""),
|
||||
(Newline, newline),
|
||||
(Dedent, ""),
|
||||
(Symbol, "x"),
|
||||
|
@ -93,12 +106,23 @@ fn test_lexer() -> ParseResult<()> {
|
|||
(Symbol, "x"),
|
||||
(Semi, ";"),
|
||||
(Newline, newline),
|
||||
(IntLit, "10"),
|
||||
(NatLit, "10"),
|
||||
(Closed, ".."),
|
||||
(Symbol, "twelve"),
|
||||
(Semi, ";"),
|
||||
(Newline, newline),
|
||||
(EOF, "EOF"),
|
||||
(EOF, ""),
|
||||
];
|
||||
let mut tok: Token;
|
||||
for (id, i) in token_array.into_iter().enumerate() {
|
||||
tok = lexer.next().unwrap().unwrap();
|
||||
assert_eq!(tok, Token::from_str(i.0, i.1));
|
||||
print!("{id:>03}: ");
|
||||
println!("{tok}");
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
];
|
||||
|
||||
let mut tok: Token;
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue