From bb67477e0cb07af6bb1414620b9caf03e85a78e6 Mon Sep 17 00:00:00 2001 From: GreasySlug <9619abgoni@gmail.com> Date: Tue, 13 Sep 2022 17:22:27 +0900 Subject: [PATCH 01/11] Test: update basic syntax tokenize test --- compiler/erg_parser/tests/test.rs | 74 ++++++++++++++++++++----------- 1 file changed, 49 insertions(+), 25 deletions(-) diff --git a/compiler/erg_parser/tests/test.rs b/compiler/erg_parser/tests/test.rs index 5e0d1320..73021af6 100644 --- a/compiler/erg_parser/tests/test.rs +++ b/compiler/erg_parser/tests/test.rs @@ -12,68 +12,81 @@ use TokenKind::*; const FILE1: &str = "tests/test1_basic_syntax.er"; #[test] -fn test_lexer() -> ParseResult<()> { +fn test_lexer_for_basic() -> ParseResult<()> { let mut lexer = Lexer::new(Input::File(FILE1.into())); let newline = "\n"; let /*mut*/ token_array = vec![ + (Newline, newline), + (Newline, newline), + (Newline, newline), (Symbol, "_a"), (Equal, "="), - (IntLit, "1234"), + (NatLit, "1_234"), (Plus, "+"), - (RatioLit, "1113.0"), - (Plus, "+"), - (RatioLit, "0.30102"), - // (Symbol, "a"), + (RatioLit, "1113."), + (Star, "*"), + (RatioLit, "3_000.2e-4"), + (Pow, "**"), + (NatLit, "0003"), + (Star, "*"), + (RatioLit, ".4"), (Newline, newline), (Symbol, "a"), (Comma, ","), (UBar, "_"), (Comma, ","), - (Spread, "..."), + (Spread, "..."), // EllipsisLit (Symbol, "b"), (Equal, "="), (Symbol, "five_elem_tuple"), (Newline, newline), + (Symbol, "f"), + (Symbol, "x"), + (Comma, ","), + (Symbol, "y"), + (Equal, "="), + (Newline, newline), + (Indent, " "), + (Symbol, "x"), + (Plus, "+"), + (Symbol, "y"), + (Newline, newline), + (Dedent, ""), (Symbol, "if!"), - (Symbol, "True"), + (BoolLit, "True"), (Comma, ","), (Symbol, "do!"), (Newline, newline), (Indent, " "), (Symbol, "print!"), - // (LParen, "("), - (StrLit, "\\\\hello, world\\\""), - // (RParen, ")"), + (StrLit, "\"\\\\hello, world\\\"\""), (Newline, newline), - (IntLit, "10"), + (NatLit, "10"), (Dot, "."), (Symbol, "times!"), - // (LParen, "("), - // (RParen, ")"), (Symbol, "do!"), (Newline, newline), (Indent, " "), (Symbol, "if!"), - (Symbol, "True"), + (Symbol, "x"), + (Dot, "."), + (Symbol, "y"), + (Dot, "."), + (Symbol, "z"), (Comma, ","), (Symbol, "do!"), (Newline, newline), (Indent, " "), (Symbol, "print!"), - (StrLit, ""), + (StrLit, "\"\""), (Newline, newline), - // (Comment, " illegal indent"), - // (Illegal, "DEDENT"), - // (Symbol, "do_nothing"), (Dedent, ""), (Newline, newline), (Newline, newline), (Symbol, "Hello"), (Equal, "="), (Symbol, "S2c"), - // (LParen, "("), - (StrLit, "hello"), - // (RParen, ")"), + (StrLit, "\"hello\""), (Newline, newline), (Dedent, ""), (Dedent, ""), @@ -82,7 +95,7 @@ fn test_lexer() -> ParseResult<()> { (Newline, newline), (Indent, " "), (Newline, newline), - (StrLit, "aaa"), + (StrLit, "\"aaa\""), (Newline, newline), (Dedent, ""), (Symbol, "x"), @@ -93,12 +106,23 @@ fn test_lexer() -> ParseResult<()> { (Symbol, "x"), (Semi, ";"), (Newline, newline), - (IntLit, "10"), + (NatLit, "10"), (Closed, ".."), (Symbol, "twelve"), (Semi, ";"), (Newline, newline), - (EOF, "EOF"), + (EOF, ""), + ]; + let mut tok: Token; + for (id, i) in token_array.into_iter().enumerate() { + tok = lexer.next().unwrap().unwrap(); + assert_eq!(tok, Token::from_str(i.0, i.1)); + print!("{id:>03}: "); + println!("{tok}"); + } + Ok(()) +} + ]; let mut tok: Token; From 4ea00a54cd77e50330b5c324c3b399f50119b08b Mon Sep 17 00:00:00 2001 From: GreasySlug <9619abgoni@gmail.com> Date: Tue, 13 Sep 2022 17:24:00 +0900 Subject: [PATCH 02/11] Test: add advanced syntax tokenize test --- compiler/erg_parser/tests/test.rs | 151 +++++++++++++++++++++++++++++- 1 file changed, 150 insertions(+), 1 deletion(-) diff --git a/compiler/erg_parser/tests/test.rs b/compiler/erg_parser/tests/test.rs index 73021af6..6309abd7 100644 --- a/compiler/erg_parser/tests/test.rs +++ b/compiler/erg_parser/tests/test.rs @@ -123,10 +123,159 @@ fn test_lexer_for_basic() -> ParseResult<()> { Ok(()) } +#[test] +fn test_lexer_for_advanced() -> ParseResult<()> { + let mut lexer = Lexer::new(Input::File(FILE2.into())); + let newline = "\n"; + let /*mut*/ token_array = vec![ + (Newline, newline), + (Newline, newline), + (Newline, newline), + (Newline, newline), + (Symbol, "f"), + (Symbol, "x"), + (Equal, "="), + (NatLit, "1"), + (Plus, "+"), + (Symbol, "x"), + (Plus, "+"), + (NatLit, "2"), + (Newline, newline), + (Symbol, "f"), + (Symbol, "x"), + (Comma, ","), + (Symbol, "y"), + (Equal, "="), + (Newline, newline), + (Indent, " "), + (NatLit, "1"), + (Plus, "+"), + (Symbol, "x"), + (Plus, "+"), + (Symbol, "y"), + (Newline, newline), + (Dedent, ""), + (Symbol, "f"), + (Symbol, "x"), + (Comma, ","), + (Symbol, "y"), + (Comma, ","), + (Symbol, "z"), + (Equal, "="), + (Newline, newline), + (Indent, " "), + (NatLit, "1"), + (Plus, "+"), + (Symbol, "x"), + (Plus, "+"), + (Symbol, "y"), + (Plus, "+"), + (Symbol, "z"), + (Newline, newline), + (Dedent, ""), + (Symbol, "assert"), + (NatLit, "4"), + (DblEq, "=="), + (Symbol, "f"), + (NatLit, "1"), + (Newline, newline), + (Symbol, "assert"), + (NatLit, "4"), + (DblEq, "=="), + (Symbol, "f"), + (NatLit, "1"), + (Comma, ","), + (NatLit, "1"), + (Newline, newline), + (Symbol, "assert"), + (NatLit, "3"), + (DblEq, "=="), + (Symbol, "f"), + (NatLit, "1"), + (Comma, ","), + (NatLit, "1"), + (Comma, ","), + (NatLit, "1"), + (Newline, newline), + (Newline, newline), + (Newline, newline), + (Symbol, "fib"), + (NatLit, "0"), + (Equal, "="), + (NatLit, "0"), + (Newline, newline), + (Symbol, "fib"), + (NatLit, "1"), + (Equal, "="), + (NatLit, "1"), + (Newline, newline), + (Symbol, "fib"), + (LParen, "("), + (Symbol, "n"), + (Colon, ":"), + (Symbol, "Nat"), + (RParen, ")"), + (FuncArrow, "->"), + (Symbol, "Nat"), + (Equal, "="), + (Symbol, "fib"), + (LParen, "("), + (Symbol, "n"), + (Minus, "-"), + (NatLit, "1"), + (RParen, ")"), + (Plus, "+"), + (Symbol, "fib"), + (LParen, "("), + (Symbol, "n"), + (Minus, "-"), + (NatLit, "2"), + (RParen, ")"), + (Newline, newline), + (Newline, newline), + (Newline, newline), + (Symbol, "t"), + (Equal, "="), + (Symbol, "if"), + (BoolLit, "True"), + (Colon, ":"), + (Newline, newline), + (Indent, " "), + (Symbol, "then"), + (Colon, ":"), + (NatLit, "1"), + (Newline, newline), + (Symbol, "else"), + (Colon, ":"), + (NatLit, "2"), + (Newline, newline), + (Dedent, ""), + (Symbol, "assert"), + (Symbol, "t"), + (DblEq, "=="), + (NatLit, "1"), + (Newline, newline), + (Newline, newline), + (Newline, newline), + (Symbol, "math"), + (Equal, "="), + (Symbol, "import"), + (StrLit, "\"math\""), + (Newline, newline), + (Newline, newline), + (LBrace, "{"), + (Symbol, "pi"), + (RBrace, "}"), + (Equal, "="), + (Symbol, "import"), + (StrLit, "\"math\""), + (Newline, newline), + (EOF, ""), ]; let mut tok: Token; - for i in token_array.into_iter() { + for (id, i) in token_array.into_iter().enumerate() { + print!("{id:>03}: "); tok = lexer.next().unwrap().unwrap(); assert_eq!(tok, Token::from_str(i.0, i.1)); println!("{tok}"); From 58c51f620c9ec2910b9b0c9221061ec457baf82f Mon Sep 17 00:00:00 2001 From: GreasySlug <9619abgoni@gmail.com> Date: Tue, 13 Sep 2022 17:24:37 +0900 Subject: [PATCH 03/11] Test: update Mod value --- compiler/erg_parser/tests/test.rs | 123 +++++++++++++++--------------- 1 file changed, 62 insertions(+), 61 deletions(-) diff --git a/compiler/erg_parser/tests/test.rs b/compiler/erg_parser/tests/test.rs index 6309abd7..25987d97 100644 --- a/compiler/erg_parser/tests/test.rs +++ b/compiler/erg_parser/tests/test.rs @@ -10,6 +10,7 @@ use erg_parser::token::*; use TokenKind::*; const FILE1: &str = "tests/test1_basic_syntax.er"; +const FILE2: &str = "tests/test2_advanced_syntax.er"; #[test] fn test_lexer_for_basic() -> ParseResult<()> { @@ -19,10 +20,10 @@ fn test_lexer_for_basic() -> ParseResult<()> { (Newline, newline), (Newline, newline), (Newline, newline), - (Symbol, "_a"), - (Equal, "="), + (Symbol, "_a"), + (Equal, "="), (NatLit, "1_234"), - (Plus, "+"), + (Plus, "+"), (RatioLit, "1113."), (Star, "*"), (RatioLit, "3_000.2e-4"), @@ -30,16 +31,16 @@ fn test_lexer_for_basic() -> ParseResult<()> { (NatLit, "0003"), (Star, "*"), (RatioLit, ".4"), - (Newline, newline), - (Symbol, "a"), - (Comma, ","), - (UBar, "_"), - (Comma, ","), + (Newline, newline), + (Symbol, "a"), + (Comma, ","), + (UBar, "_"), + (Comma, ","), (Spread, "..."), // EllipsisLit - (Symbol, "b"), - (Equal, "="), - (Symbol, "five_elem_tuple"), - (Newline, newline), + (Symbol, "b"), + (Equal, "="), + (Symbol, "five_elem_tuple"), + (Newline, newline), (Symbol, "f"), (Symbol, "x"), (Comma, ","), @@ -52,65 +53,65 @@ fn test_lexer_for_basic() -> ParseResult<()> { (Symbol, "y"), (Newline, newline), (Dedent, ""), - (Symbol, "if!"), + (Symbol, "if!"), (BoolLit, "True"), - (Comma, ","), - (Symbol, "do!"), - (Newline, newline), - (Indent, " "), - (Symbol, "print!"), + (Comma, ","), + (Symbol, "do!"), + (Newline, newline), + (Indent, " "), + (Symbol, "print!"), (StrLit, "\"\\\\hello, world\\\"\""), - (Newline, newline), + (Newline, newline), (NatLit, "10"), - (Dot, "."), - (Symbol, "times!"), - (Symbol, "do!"), - (Newline, newline), - (Indent, " "), - (Symbol, "if!"), + (Dot, "."), + (Symbol, "times!"), + (Symbol, "do!"), + (Newline, newline), + (Indent, " "), + (Symbol, "if!"), (Symbol, "x"), (Dot, "."), (Symbol, "y"), (Dot, "."), (Symbol, "z"), - (Comma, ","), - (Symbol, "do!"), - (Newline, newline), - (Indent, " "), - (Symbol, "print!"), + (Comma, ","), + (Symbol, "do!"), + (Newline, newline), + (Indent, " "), + (Symbol, "print!"), (StrLit, "\"\""), - (Newline, newline), - (Dedent, ""), - (Newline, newline), - (Newline, newline), - (Symbol, "Hello"), - (Equal, "="), - (Symbol, "S2c"), + (Newline, newline), + (Dedent, ""), + (Newline, newline), + (Newline, newline), + (Symbol, "Hello"), + (Equal, "="), + (Symbol, "S2c"), (StrLit, "\"hello\""), - (Newline, newline), - (Dedent, ""), - (Dedent, ""), - (Symbol, "aあ아"), - (Equal, "="), - (Newline, newline), - (Indent, " "), - (Newline, newline), + (Newline, newline), + (Dedent, ""), + (Dedent, ""), + (Symbol, "aあ아"), + (Equal, "="), + (Newline, newline), + (Indent, " "), + (Newline, newline), (StrLit, "\"aaa\""), - (Newline, newline), - (Dedent, ""), - (Symbol, "x"), - (Semi, ";"), - (Symbol, "x"), - (Semi, ";"), - (Semi, ";"), - (Symbol, "x"), - (Semi, ";"), - (Newline, newline), + (Newline, newline), + (Dedent, ""), + (Symbol, "x"), + (Semi, ";"), + (Symbol, "x"), + (Semi, ";"), + (Semi, ";"), + (Symbol, "x"), + (Semi, ";"), + (Newline, newline), (NatLit, "10"), - (Closed, ".."), - (Symbol, "twelve"), - (Semi, ";"), - (Newline, newline), + (Closed, ".."), + (Symbol, "twelve"), + (Semi, ";"), + (Newline, newline), (EOF, ""), ]; let mut tok: Token; @@ -271,7 +272,7 @@ fn test_lexer_for_advanced() -> ParseResult<()> { (StrLit, "\"math\""), (Newline, newline), (EOF, ""), - ]; + ]; let mut tok: Token; for (id, i) in token_array.into_iter().enumerate() { @@ -285,7 +286,7 @@ fn test_lexer_for_advanced() -> ParseResult<()> { #[test] fn tesop_te_prec() { - assert_eq!(Mod.precedence(), Some(160)); + assert_eq!(Mod.precedence(), Some(170)); assert_eq!(LParen.precedence(), Some(0)); assert_eq!(Illegal.precedence(), None); } From 14ad70ed3010228d1d8b3df9f8aa13ddf7aba4fd Mon Sep 17 00:00:00 2001 From: GreasySlug <9619abgoni@gmail.com> Date: Wed, 14 Sep 2022 10:58:00 +0900 Subject: [PATCH 04/11] Clean: file name was ambiguous so made it explicit --- compiler/erg_parser/tests/{test.rs => tokenize_test.rs} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename compiler/erg_parser/tests/{test.rs => tokenize_test.rs} (100%) diff --git a/compiler/erg_parser/tests/test.rs b/compiler/erg_parser/tests/tokenize_test.rs similarity index 100% rename from compiler/erg_parser/tests/test.rs rename to compiler/erg_parser/tests/tokenize_test.rs From d3fef97272d8e14cc1e8a75de82d45dfbd867264 Mon Sep 17 00:00:00 2001 From: GreasySlug <9619abgoni@gmail.com> Date: Wed, 14 Sep 2022 10:58:18 +0900 Subject: [PATCH 05/11] Test: add literal test --- .../erg_parser/tests/test3_literal_syntax.er | 27 +++++++ compiler/erg_parser/tests/tokenize_test.rs | 80 +++++++++++++++++++ 2 files changed, 107 insertions(+) create mode 100644 compiler/erg_parser/tests/test3_literal_syntax.er diff --git a/compiler/erg_parser/tests/test3_literal_syntax.er b/compiler/erg_parser/tests/test3_literal_syntax.er new file mode 100644 index 00000000..96d268cd --- /dev/null +++ b/compiler/erg_parser/tests/test3_literal_syntax.er @@ -0,0 +1,27 @@ +## Basic Literals ## +# Nat Literal +0, 1, 2, 100_000 + +# Int Literal: Positive numbers cannot be determined during tokenization +-1, -10, -100_000 + +# Ratio Literal +0.00, -0.0, .1, 400. + +# Str Literal +"", "a", "こんにちは", "\" \\ " + +# Boolean Litteral +True, False + +# None Literal +None + +# Ellipsis Literal +... + +# Infinite Literal +Inf, -Inf + +# Exponetial Literal +# 1e-34, 0.4e-10, 2.455+e5 diff --git a/compiler/erg_parser/tests/tokenize_test.rs b/compiler/erg_parser/tests/tokenize_test.rs index 25987d97..f8a91f74 100644 --- a/compiler/erg_parser/tests/tokenize_test.rs +++ b/compiler/erg_parser/tests/tokenize_test.rs @@ -11,6 +11,7 @@ use TokenKind::*; const FILE1: &str = "tests/test1_basic_syntax.er"; const FILE2: &str = "tests/test2_advanced_syntax.er"; +const FILE3: &str = "tests/test3_literal_syntax.er"; #[test] fn test_lexer_for_basic() -> ParseResult<()> { @@ -284,6 +285,85 @@ fn test_lexer_for_advanced() -> ParseResult<()> { Ok(()) } +#[test] +fn test_lexer_for_literals() -> ParseResult<()> { + let mut lexer = Lexer::new(Input::File(FILE3.into())); + let newline = "\n"; + let /*mut*/ token_array = vec![ + (Newline, newline), + (Newline, newline), + (NatLit, "0"), + (Comma, ","), + (NatLit, "1"), + (Comma, ","), + (NatLit, "2"), + (Comma, ","), + (NatLit, "100_000"), + (Newline, newline), + (Newline, newline), + (Newline, newline), + (IntLit, "-1"), + (Comma, ","), + (IntLit, "-10"), + (Comma, ","), + (IntLit, "-100_000"), + (Newline, newline), + (Newline, newline), + (Newline, newline), + (RatioLit, "0.00"), + (Comma, ","), + (RatioLit, "-0.0"), + (Comma, ","), + (RatioLit, ".1"), + (Comma, ","), + (RatioLit, "400."), + (Newline, newline), + (Newline, newline), + (Newline, newline), + (StrLit, "\"\""), + (Comma, ","), + (StrLit, "\"a\""), + (Comma, ","), + (StrLit, "\"こんにちは\""), + (Comma, ","), + (StrLit, "\"\\\" \\\\ \""), + (Newline, newline), + (Newline, newline), + (Newline, newline), + (BoolLit, "True"), + (Comma, ","), + (BoolLit, "False"), + (Newline, newline), + (Newline, newline), + (Newline, newline), + (NoneLit, "None"), + (Newline, newline), + (Newline, newline), + (Newline, newline), + (EllipsisLit, "..."), + (Newline, newline), + (Newline, newline), + (Newline, newline), + (InfLit, "Inf"), + (Comma, ","), + (PreMinus, "-"), + (InfLit, "Inf"), + (Newline, newline), + (Newline, newline), + (Newline, newline), + // (EOF, ""), + ]; + + let mut tok: Token; + for (id, i) in token_array.into_iter().enumerate() { + print!("{id:>03}: "); + tok = lexer.next().unwrap().unwrap(); + assert_eq!(tok, Token::from_str(i.0, i.1)); + println!("{tok}"); + } + Ok(()) +} + #[test] fn tesop_te_prec() { assert_eq!(Mod.precedence(), Some(170)); From 4304ae132527ba095fa97542bb75ee1f7d28c582 Mon Sep 17 00:00:00 2001 From: GreasySlug <9619abgoni@gmail.com> Date: Thu, 15 Sep 2022 08:27:18 +0900 Subject: [PATCH 06/11] Test: add not implemented test --- compiler/erg_parser/tests/test3_literal_syntax.er | 3 +++ compiler/erg_parser/tests/tokenize_test.rs | 4 ++++ 2 files changed, 7 insertions(+) diff --git a/compiler/erg_parser/tests/test3_literal_syntax.er b/compiler/erg_parser/tests/test3_literal_syntax.er index 96d268cd..2bbae620 100644 --- a/compiler/erg_parser/tests/test3_literal_syntax.er +++ b/compiler/erg_parser/tests/test3_literal_syntax.er @@ -23,5 +23,8 @@ None # Infinite Literal Inf, -Inf +# Not Implemeted Literal +NotImplemented + # Exponetial Literal # 1e-34, 0.4e-10, 2.455+e5 diff --git a/compiler/erg_parser/tests/tokenize_test.rs b/compiler/erg_parser/tests/tokenize_test.rs index f8a91f74..425801ff 100644 --- a/compiler/erg_parser/tests/tokenize_test.rs +++ b/compiler/erg_parser/tests/tokenize_test.rs @@ -351,6 +351,10 @@ fn test_lexer_for_literals() -> ParseResult<()> { (Newline, newline), (Newline, newline), (Newline, newline), + (NoImplLit, "NotImplemented"), + (Newline, newline), + (Newline, newline), + (Newline, newline), // (EOF, ""), ]; From 61105b9435d019e39f7d35f8cbda57d393454961 Mon Sep 17 00:00:00 2001 From: Shunsuke Shibayama Date: Thu, 15 Sep 2022 15:08:24 +0900 Subject: [PATCH 07/11] Update simple_if.er --- compiler/erg_parser/tests/simple_if.er | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/compiler/erg_parser/tests/simple_if.er b/compiler/erg_parser/tests/simple_if.er index 3a4c28c9..007bffce 100644 --- a/compiler/erg_parser/tests/simple_if.er +++ b/compiler/erg_parser/tests/simple_if.er @@ -17,4 +17,12 @@ print! if False: print!(if(True, () -> "a", () -> "b")) -# TODO: do/do! +if! True: + do! print! "hello" + do! print! "world" + +if! False: + do!: + print! "a" + do!: + print! "b" From c84418107f2bfda0fd37c2853cff171fdd3bf100 Mon Sep 17 00:00:00 2001 From: Shunsuke Shibayama Date: Thu, 15 Sep 2022 15:09:12 +0900 Subject: [PATCH 08/11] Update test1_basic_syntax.er --- compiler/erg_parser/tests/test1_basic_syntax.er | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/compiler/erg_parser/tests/test1_basic_syntax.er b/compiler/erg_parser/tests/test1_basic_syntax.er index a5032eb9..ffd77c3f 100644 --- a/compiler/erg_parser/tests/test1_basic_syntax.er +++ b/compiler/erg_parser/tests/test1_basic_syntax.er @@ -5,10 +5,10 @@ _a = 1_234 + 1113.* 3_000.2e-4 ** 0003 * .4 a, _, ...b = five_elem_tuple f x, y = x + y -if! True, do! +if! True, do!: print! "\\hello, world\"" - 10.times! do! - if! x.y.z, do! + 10.times! do!: + if! x.y.z, do!: print! "" # illegal indent # do_nothing! From 90149c0c9c0537386107d346bd3f880ca3432274 Mon Sep 17 00:00:00 2001 From: Shunsuke Shibayama Date: Thu, 15 Sep 2022 15:20:50 +0900 Subject: [PATCH 09/11] Update `parse_test.rs` --- compiler/erg_parser/tests/parse_test.rs | 31 +++++++++++++++++++------ 1 file changed, 24 insertions(+), 7 deletions(-) diff --git a/compiler/erg_parser/tests/parse_test.rs b/compiler/erg_parser/tests/parse_test.rs index 04aba987..c22ebc47 100644 --- a/compiler/erg_parser/tests/parse_test.rs +++ b/compiler/erg_parser/tests/parse_test.rs @@ -8,37 +8,37 @@ use erg_parser::ParserRunner; #[test] fn parse_dependent() -> Result<(), ParserRunnerErrors> { - parse_test_from_code("tests/dependent.er") + expect_success("tests/dependent.er") } #[test] fn parse_fib() -> Result<(), ParserRunnerErrors> { - parse_test_from_code("tests/fib.er") + expect_success("tests/fib.er") } #[test] fn parse_hello_world() -> Result<(), ParserRunnerErrors> { - parse_test_from_code("tests/hello_world.er") + expect_success("tests/hello_world.er") } #[test] fn parse_simple_if() -> Result<(), ParserRunnerErrors> { - parse_test_from_code("tests/simple_if.er") + expect_success("tests/simple_if.er") } #[test] fn parse_stack() -> Result<(), ParserRunnerErrors> { - parse_test_from_code("tests/stack.er") + expect_failure("tests/stack.er") } #[test] fn parse_test1_basic_syntax() -> Result<(), ParserRunnerErrors> { - parse_test_from_code("tests/test1_basic_syntax.er") + expect_success("tests/test1_basic_syntax.er") } #[test] fn parse_test2_advanced_syntax() -> Result<(), ParserRunnerErrors> { - parse_test_from_code("tests/test2_advanced_syntax.er") + expect_success("tests/test2_advanced_syntax.er") } fn parse_test_from_code(file_path: &'static str) -> Result<(), ParserRunnerErrors> { @@ -71,3 +71,20 @@ fn parse_test_from_code(file_path: &'static str) -> Result<(), ParserRunnerError } } } + +fn expect_success(file_path: &'static str) -> Result<(), ParserRunnerErrors> { + match parse_test_from_code(file_path) { + Ok(_) => Ok(()), + Err(e) => { + e.fmt_all_stderr(); + Err(e) + } + } +} + +fn expect_failure(file_path: &'static str) -> Result<(), ParserRunnerErrors> { + match parse_test_from_code(file_path) { + Ok(_) => Err(ParserRunnerErrors::empty()), + Err(_) => Ok(()), + } +} From 1b379767014c00beb089dca4cb27dfcf323b13b2 Mon Sep 17 00:00:00 2001 From: Shunsuke Shibayama Date: Thu, 15 Sep 2022 15:21:01 +0900 Subject: [PATCH 10/11] Update test2_advanced_syntax.er --- .../erg_parser/tests/test2_advanced_syntax.er | 20 +++++-------------- 1 file changed, 5 insertions(+), 15 deletions(-) diff --git a/compiler/erg_parser/tests/test2_advanced_syntax.er b/compiler/erg_parser/tests/test2_advanced_syntax.er index 271bd621..d434abda 100644 --- a/compiler/erg_parser/tests/test2_advanced_syntax.er +++ b/compiler/erg_parser/tests/test2_advanced_syntax.er @@ -1,28 +1,18 @@ # Check that a parser can pass the advanced syntax # 高度な文法をチェックする -# overloading (多重定義) -f x = 1 + x + 2 -f x, y = - 1 + x + y -f x, y, z = - 1 + x + y + z -assert 4 == f 1 -assert 4 == f 1, 1 -assert 3 == f 1, 1, 1 - -# pattern overloading +# mutliple patterns definition fib 0 = 0 fib 1 = 1 fib(n: Nat) -> Nat = fib(n-1) + fib(n-2) -# keyword arguments (キーワード引数) +# keyword arguments t = if True: - then: 1 - else: 2 + then := 1 + else := 2 assert t == 1 # import math = import "math" # {*} = "math" # use all -{pi} = import "math" +{pi, ...} = import "math" From 042ea70ec6fa6b333e36bfb04ee4fdb3507bae98 Mon Sep 17 00:00:00 2001 From: Shunsuke Shibayama Date: Thu, 15 Sep 2022 15:26:39 +0900 Subject: [PATCH 11/11] Update tokenize_test.rs --- compiler/erg_parser/tests/tokenize_test.rs | 76 ++-------------------- 1 file changed, 7 insertions(+), 69 deletions(-) diff --git a/compiler/erg_parser/tests/tokenize_test.rs b/compiler/erg_parser/tests/tokenize_test.rs index 425801ff..47744926 100644 --- a/compiler/erg_parser/tests/tokenize_test.rs +++ b/compiler/erg_parser/tests/tokenize_test.rs @@ -58,6 +58,7 @@ fn test_lexer_for_basic() -> ParseResult<()> { (BoolLit, "True"), (Comma, ","), (Symbol, "do!"), + (Colon, ":"), (Newline, newline), (Indent, " "), (Symbol, "print!"), @@ -67,6 +68,7 @@ fn test_lexer_for_basic() -> ParseResult<()> { (Dot, "."), (Symbol, "times!"), (Symbol, "do!"), + (Colon, ":"), (Newline, newline), (Indent, " "), (Symbol, "if!"), @@ -77,6 +79,7 @@ fn test_lexer_for_basic() -> ParseResult<()> { (Symbol, "z"), (Comma, ","), (Symbol, "do!"), + (Colon, ":"), (Newline, newline), (Indent, " "), (Symbol, "print!"), @@ -134,73 +137,6 @@ fn test_lexer_for_advanced() -> ParseResult<()> { (Newline, newline), (Newline, newline), (Newline, newline), - (Symbol, "f"), - (Symbol, "x"), - (Equal, "="), - (NatLit, "1"), - (Plus, "+"), - (Symbol, "x"), - (Plus, "+"), - (NatLit, "2"), - (Newline, newline), - (Symbol, "f"), - (Symbol, "x"), - (Comma, ","), - (Symbol, "y"), - (Equal, "="), - (Newline, newline), - (Indent, " "), - (NatLit, "1"), - (Plus, "+"), - (Symbol, "x"), - (Plus, "+"), - (Symbol, "y"), - (Newline, newline), - (Dedent, ""), - (Symbol, "f"), - (Symbol, "x"), - (Comma, ","), - (Symbol, "y"), - (Comma, ","), - (Symbol, "z"), - (Equal, "="), - (Newline, newline), - (Indent, " "), - (NatLit, "1"), - (Plus, "+"), - (Symbol, "x"), - (Plus, "+"), - (Symbol, "y"), - (Plus, "+"), - (Symbol, "z"), - (Newline, newline), - (Dedent, ""), - (Symbol, "assert"), - (NatLit, "4"), - (DblEq, "=="), - (Symbol, "f"), - (NatLit, "1"), - (Newline, newline), - (Symbol, "assert"), - (NatLit, "4"), - (DblEq, "=="), - (Symbol, "f"), - (NatLit, "1"), - (Comma, ","), - (NatLit, "1"), - (Newline, newline), - (Symbol, "assert"), - (NatLit, "3"), - (DblEq, "=="), - (Symbol, "f"), - (NatLit, "1"), - (Comma, ","), - (NatLit, "1"), - (Comma, ","), - (NatLit, "1"), - (Newline, newline), - (Newline, newline), - (Newline, newline), (Symbol, "fib"), (NatLit, "0"), (Equal, "="), @@ -244,11 +180,11 @@ fn test_lexer_for_advanced() -> ParseResult<()> { (Newline, newline), (Indent, " "), (Symbol, "then"), - (Colon, ":"), + (Walrus, ":="), (NatLit, "1"), (Newline, newline), (Symbol, "else"), - (Colon, ":"), + (Walrus, ":="), (NatLit, "2"), (Newline, newline), (Dedent, ""), @@ -267,6 +203,8 @@ fn test_lexer_for_advanced() -> ParseResult<()> { (Newline, newline), (LBrace, "{"), (Symbol, "pi"), + (Comma, ","), + (EllipsisLit, "..."), (RBrace, "}"), (Equal, "="), (Symbol, "import"),