Test: update basic syntax tokenize test

This commit is contained in:
GreasySlug 2022-09-13 17:22:27 +09:00
parent 870f88e14d
commit bb67477e0c

View file

@ -12,68 +12,81 @@ use TokenKind::*;
const FILE1: &str = "tests/test1_basic_syntax.er"; const FILE1: &str = "tests/test1_basic_syntax.er";
#[test] #[test]
fn test_lexer() -> ParseResult<()> { fn test_lexer_for_basic() -> ParseResult<()> {
let mut lexer = Lexer::new(Input::File(FILE1.into())); let mut lexer = Lexer::new(Input::File(FILE1.into()));
let newline = "\n"; let newline = "\n";
let /*mut*/ token_array = vec![ let /*mut*/ token_array = vec![
(Newline, newline),
(Newline, newline),
(Newline, newline),
(Symbol, "_a"), (Symbol, "_a"),
(Equal, "="), (Equal, "="),
(IntLit, "1234"), (NatLit, "1_234"),
(Plus, "+"), (Plus, "+"),
(RatioLit, "1113.0"), (RatioLit, "1113."),
(Plus, "+"), (Star, "*"),
(RatioLit, "0.30102"), (RatioLit, "3_000.2e-4"),
// (Symbol, "a"), (Pow, "**"),
(NatLit, "0003"),
(Star, "*"),
(RatioLit, ".4"),
(Newline, newline), (Newline, newline),
(Symbol, "a"), (Symbol, "a"),
(Comma, ","), (Comma, ","),
(UBar, "_"), (UBar, "_"),
(Comma, ","), (Comma, ","),
(Spread, "..."), (Spread, "..."), // EllipsisLit
(Symbol, "b"), (Symbol, "b"),
(Equal, "="), (Equal, "="),
(Symbol, "five_elem_tuple"), (Symbol, "five_elem_tuple"),
(Newline, newline), (Newline, newline),
(Symbol, "f"),
(Symbol, "x"),
(Comma, ","),
(Symbol, "y"),
(Equal, "="),
(Newline, newline),
(Indent, " "),
(Symbol, "x"),
(Plus, "+"),
(Symbol, "y"),
(Newline, newline),
(Dedent, ""),
(Symbol, "if!"), (Symbol, "if!"),
(Symbol, "True"), (BoolLit, "True"),
(Comma, ","), (Comma, ","),
(Symbol, "do!"), (Symbol, "do!"),
(Newline, newline), (Newline, newline),
(Indent, " "), (Indent, " "),
(Symbol, "print!"), (Symbol, "print!"),
// (LParen, "("), (StrLit, "\"\\\\hello, world\\\"\""),
(StrLit, "\\\\hello, world\\\""),
// (RParen, ")"),
(Newline, newline), (Newline, newline),
(IntLit, "10"), (NatLit, "10"),
(Dot, "."), (Dot, "."),
(Symbol, "times!"), (Symbol, "times!"),
// (LParen, "("),
// (RParen, ")"),
(Symbol, "do!"), (Symbol, "do!"),
(Newline, newline), (Newline, newline),
(Indent, " "), (Indent, " "),
(Symbol, "if!"), (Symbol, "if!"),
(Symbol, "True"), (Symbol, "x"),
(Dot, "."),
(Symbol, "y"),
(Dot, "."),
(Symbol, "z"),
(Comma, ","), (Comma, ","),
(Symbol, "do!"), (Symbol, "do!"),
(Newline, newline), (Newline, newline),
(Indent, " "), (Indent, " "),
(Symbol, "print!"), (Symbol, "print!"),
(StrLit, ""), (StrLit, "\"\""),
(Newline, newline), (Newline, newline),
// (Comment, " illegal indent"),
// (Illegal, "DEDENT"),
// (Symbol, "do_nothing"),
(Dedent, ""), (Dedent, ""),
(Newline, newline), (Newline, newline),
(Newline, newline), (Newline, newline),
(Symbol, "Hello"), (Symbol, "Hello"),
(Equal, "="), (Equal, "="),
(Symbol, "S2c"), (Symbol, "S2c"),
// (LParen, "("), (StrLit, "\"hello\""),
(StrLit, "hello"),
// (RParen, ")"),
(Newline, newline), (Newline, newline),
(Dedent, ""), (Dedent, ""),
(Dedent, ""), (Dedent, ""),
@ -82,7 +95,7 @@ fn test_lexer() -> ParseResult<()> {
(Newline, newline), (Newline, newline),
(Indent, " "), (Indent, " "),
(Newline, newline), (Newline, newline),
(StrLit, "aaa"), (StrLit, "\"aaa\""),
(Newline, newline), (Newline, newline),
(Dedent, ""), (Dedent, ""),
(Symbol, "x"), (Symbol, "x"),
@ -93,12 +106,23 @@ fn test_lexer() -> ParseResult<()> {
(Symbol, "x"), (Symbol, "x"),
(Semi, ";"), (Semi, ";"),
(Newline, newline), (Newline, newline),
(IntLit, "10"), (NatLit, "10"),
(Closed, ".."), (Closed, ".."),
(Symbol, "twelve"), (Symbol, "twelve"),
(Semi, ";"), (Semi, ";"),
(Newline, newline), (Newline, newline),
(EOF, "EOF"), (EOF, ""),
];
let mut tok: Token;
for (id, i) in token_array.into_iter().enumerate() {
tok = lexer.next().unwrap().unwrap();
assert_eq!(tok, Token::from_str(i.0, i.1));
print!("{id:>03}: ");
println!("{tok}");
}
Ok(())
}
]; ];
let mut tok: Token; let mut tok: Token;