test: check valid dedent at the end of block

This commit is contained in:
GreasySlug 2022-12-28 14:53:50 +09:00
parent 41828a6d9a
commit 9297953443
2 changed files with 57 additions and 1 deletions

View file

@ -0,0 +1,5 @@
for! 0..1, i =>
for! 0..1, j =>
for! 0..1, k =>
print! "hi"

View file

@ -13,6 +13,7 @@ const FILE1: &str = "tests/test1_basic_syntax.er";
const FILE2: &str = "tests/test2_advanced_syntax.er"; const FILE2: &str = "tests/test2_advanced_syntax.er";
const FILE3: &str = "tests/test3_literal_syntax.er"; const FILE3: &str = "tests/test3_literal_syntax.er";
const FILE4: &str = "tests/multi_line_str_literal.er"; const FILE4: &str = "tests/multi_line_str_literal.er";
const FILE5: &str = "tests/for.er";
#[test] #[test]
fn test_lexer_for_basic() -> ParseResult<()> { fn test_lexer_for_basic() -> ParseResult<()> {
@ -310,7 +311,7 @@ fn test_lexer_for_literals() -> ParseResult<()> {
fn test_lexer_for_multi_line_str_literal() -> ParseResult<()> { fn test_lexer_for_multi_line_str_literal() -> ParseResult<()> {
let mut lexer = Lexer::new(Input::File(FILE4.into())); let mut lexer = Lexer::new(Input::File(FILE4.into()));
let newline = "\n"; let newline = "\n";
let token_array = vec![ let token_array = [
(Newline, newline), (Newline, newline),
(Newline, newline), (Newline, newline),
(Symbol, "single_a"), (Symbol, "single_a"),
@ -401,6 +402,56 @@ line break\"\"\"",
Ok(()) Ok(())
} }
#[test]
fn for_loop() -> ParseResult<()> {
let mut lexer = Lexer::new(Input::File(FILE5.into()));
let newline = "\n";
let token_array = [
(Symbol, "for!"),
(NatLit, "0"),
(Closed, ".."),
(NatLit, "1"),
(Comma, ","),
(Symbol, "i"),
(ProcArrow, "=>"),
(Newline, newline),
(Indent, " "),
(Symbol, "for!"),
(NatLit, "0"),
(Closed, ".."),
(NatLit, "1"),
(Comma, ","),
(Symbol, "j"),
(ProcArrow, "=>"),
(Newline, newline),
(Indent, " "),
(Symbol, "for!"),
(NatLit, "0"),
(Closed, ".."),
(NatLit, "1"),
(Comma, ","),
(Symbol, "k"),
(ProcArrow, "=>"),
(Newline, newline),
(Newline, newline),
(Indent, " "),
(Symbol, "print!"),
(StrLit, "\"hi\""),
(Dedent, ""),
(Dedent, ""),
(Dedent, ""),
(EOF, ""),
];
let mut tok: Token;
for (id, i) in token_array.into_iter().enumerate() {
print!("{id:>03}: ");
tok = lexer.next().unwrap().unwrap();
assert_eq!(tok, Token::from_str(i.0, i.1));
println!("{tok}");
}
Ok(())
}
#[test] #[test]
fn tesop_te_prec() { fn tesop_te_prec() {
assert_eq!(Mod.precedence(), Some(170)); assert_eq!(Mod.precedence(), Some(170));