Merge pull request #156 from GreasySlug/fix/test

Updated and added testing of tokenize by file input
This commit is contained in:
Shunsuke Shibayama 2022-09-15 15:27:30 +09:00 committed by GitHub
commit 0f15ba4e77
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
7 changed files with 385 additions and 144 deletions

View file

@ -8,37 +8,37 @@ use erg_parser::ParserRunner;
#[test] #[test]
fn parse_dependent() -> Result<(), ParserRunnerErrors> { fn parse_dependent() -> Result<(), ParserRunnerErrors> {
parse_test_from_code("tests/dependent.er") expect_success("tests/dependent.er")
} }
#[test] #[test]
fn parse_fib() -> Result<(), ParserRunnerErrors> { fn parse_fib() -> Result<(), ParserRunnerErrors> {
parse_test_from_code("tests/fib.er") expect_success("tests/fib.er")
} }
#[test] #[test]
fn parse_hello_world() -> Result<(), ParserRunnerErrors> { fn parse_hello_world() -> Result<(), ParserRunnerErrors> {
parse_test_from_code("tests/hello_world.er") expect_success("tests/hello_world.er")
} }
#[test] #[test]
fn parse_simple_if() -> Result<(), ParserRunnerErrors> { fn parse_simple_if() -> Result<(), ParserRunnerErrors> {
parse_test_from_code("tests/simple_if.er") expect_success("tests/simple_if.er")
} }
#[test] #[test]
fn parse_stack() -> Result<(), ParserRunnerErrors> { fn parse_stack() -> Result<(), ParserRunnerErrors> {
parse_test_from_code("tests/stack.er") expect_failure("tests/stack.er")
} }
#[test] #[test]
fn parse_test1_basic_syntax() -> Result<(), ParserRunnerErrors> { fn parse_test1_basic_syntax() -> Result<(), ParserRunnerErrors> {
parse_test_from_code("tests/test1_basic_syntax.er") expect_success("tests/test1_basic_syntax.er")
} }
#[test] #[test]
fn parse_test2_advanced_syntax() -> Result<(), ParserRunnerErrors> { fn parse_test2_advanced_syntax() -> Result<(), ParserRunnerErrors> {
parse_test_from_code("tests/test2_advanced_syntax.er") expect_success("tests/test2_advanced_syntax.er")
} }
fn parse_test_from_code(file_path: &'static str) -> Result<(), ParserRunnerErrors> { fn parse_test_from_code(file_path: &'static str) -> Result<(), ParserRunnerErrors> {
@ -71,3 +71,20 @@ fn parse_test_from_code(file_path: &'static str) -> Result<(), ParserRunnerError
} }
} }
} }
fn expect_success(file_path: &'static str) -> Result<(), ParserRunnerErrors> {
match parse_test_from_code(file_path) {
Ok(_) => Ok(()),
Err(e) => {
e.fmt_all_stderr();
Err(e)
}
}
}
fn expect_failure(file_path: &'static str) -> Result<(), ParserRunnerErrors> {
match parse_test_from_code(file_path) {
Ok(_) => Err(ParserRunnerErrors::empty()),
Err(_) => Ok(()),
}
}

View file

@ -17,4 +17,12 @@ print! if False:
print!(if(True, () -> "a", () -> "b")) print!(if(True, () -> "a", () -> "b"))
# TODO: do/do! if! True:
do! print! "hello"
do! print! "world"
if! False:
do!:
print! "a"
do!:
print! "b"

View file

@ -1,118 +0,0 @@
use std::iter::Iterator;
use erg_common::config::Input;
// use erg_compiler::parser;
use erg_parser::error::ParseResult;
use erg_parser::lex::Lexer;
use erg_parser::token::*;
use TokenKind::*;
const FILE1: &str = "tests/test1_basic_syntax.er";
#[test]
fn test_lexer() -> ParseResult<()> {
let mut lexer = Lexer::new(Input::File(FILE1.into()));
let newline = "\n";
let /*mut*/ token_array = vec![
(Symbol, "_a"),
(Equal, "="),
(IntLit, "1234"),
(Plus, "+"),
(RatioLit, "1113.0"),
(Plus, "+"),
(RatioLit, "0.30102"),
// (Symbol, "a"),
(Newline, newline),
(Symbol, "a"),
(Comma, ","),
(UBar, "_"),
(Comma, ","),
(Spread, "..."),
(Symbol, "b"),
(Equal, "="),
(Symbol, "five_elem_tuple"),
(Newline, newline),
(Symbol, "if!"),
(Symbol, "True"),
(Comma, ","),
(Symbol, "do!"),
(Newline, newline),
(Indent, " "),
(Symbol, "print!"),
// (LParen, "("),
(StrLit, "\\\\hello, world\\\""),
// (RParen, ")"),
(Newline, newline),
(IntLit, "10"),
(Dot, "."),
(Symbol, "times!"),
// (LParen, "("),
// (RParen, ")"),
(Symbol, "do!"),
(Newline, newline),
(Indent, " "),
(Symbol, "if!"),
(Symbol, "True"),
(Comma, ","),
(Symbol, "do!"),
(Newline, newline),
(Indent, " "),
(Symbol, "print!"),
(StrLit, ""),
(Newline, newline),
// (Comment, " illegal indent"),
// (Illegal, "DEDENT"),
// (Symbol, "do_nothing"),
(Dedent, ""),
(Newline, newline),
(Newline, newline),
(Symbol, "Hello"),
(Equal, "="),
(Symbol, "S2c"),
// (LParen, "("),
(StrLit, "hello"),
// (RParen, ")"),
(Newline, newline),
(Dedent, ""),
(Dedent, ""),
(Symbol, "aあ아"),
(Equal, "="),
(Newline, newline),
(Indent, " "),
(Newline, newline),
(StrLit, "aaa"),
(Newline, newline),
(Dedent, ""),
(Symbol, "x"),
(Semi, ";"),
(Symbol, "x"),
(Semi, ";"),
(Semi, ";"),
(Symbol, "x"),
(Semi, ";"),
(Newline, newline),
(IntLit, "10"),
(Closed, ".."),
(Symbol, "twelve"),
(Semi, ";"),
(Newline, newline),
(EOF, "EOF"),
];
let mut tok: Token;
for i in token_array.into_iter() {
tok = lexer.next().unwrap().unwrap();
assert_eq!(tok, Token::from_str(i.0, i.1));
println!("{tok}");
}
Ok(())
}
#[test]
fn tesop_te_prec() {
assert_eq!(Mod.precedence(), Some(160));
assert_eq!(LParen.precedence(), Some(0));
assert_eq!(Illegal.precedence(), None);
}

View file

@ -5,10 +5,10 @@ _a = 1_234 + 1113.* 3_000.2e-4 ** 0003 * .4
a, _, ...b = five_elem_tuple a, _, ...b = five_elem_tuple
f x, y = f x, y =
x + y x + y
if! True, do! if! True, do!:
print! "\\hello, world\"" print! "\\hello, world\""
10.times! do! 10.times! do!:
if! x.y.z, do! if! x.y.z, do!:
print! "" print! ""
# illegal indent # illegal indent
# do_nothing! # do_nothing!

View file

@ -1,28 +1,18 @@
# Check that a parser can pass the advanced syntax # Check that a parser can pass the advanced syntax
# 高度な文法をチェックする # 高度な文法をチェックする
# overloading (多重定義) # mutliple patterns definition
f x = 1 + x + 2
f x, y =
1 + x + y
f x, y, z =
1 + x + y + z
assert 4 == f 1
assert 4 == f 1, 1
assert 3 == f 1, 1, 1
# pattern overloading
fib 0 = 0 fib 0 = 0
fib 1 = 1 fib 1 = 1
fib(n: Nat) -> Nat = fib(n-1) + fib(n-2) fib(n: Nat) -> Nat = fib(n-1) + fib(n-2)
# keyword arguments (キーワード引数) # keyword arguments
t = if True: t = if True:
then: 1 then := 1
else: 2 else := 2
assert t == 1 assert t == 1
# import # import
math = import "math" math = import "math"
# {*} = "math" # use all # {*} = "math" # use all
{pi} = import "math" {pi, ...} = import "math"

View file

@ -0,0 +1,30 @@
## Basic Literals ##
# Nat Literal
0, 1, 2, 100_000
# Int Literal: Positive numbers cannot be determined during tokenization
-1, -10, -100_000
# Ratio Literal
0.00, -0.0, .1, 400.
# Str Literal
"", "a", "こんにちは", "\" \\ "
# Boolean Litteral
True, False
# None Literal
None
# Ellipsis Literal
...
# Infinite Literal
Inf, -Inf
# Not Implemeted Literal
NotImplemented
# Exponetial Literal
# 1e-34, 0.4e-10, 2.455+e5

View file

@ -0,0 +1,314 @@
use std::iter::Iterator;
use erg_common::config::Input;
// use erg_compiler::parser;
use erg_parser::error::ParseResult;
use erg_parser::lex::Lexer;
use erg_parser::token::*;
use TokenKind::*;
const FILE1: &str = "tests/test1_basic_syntax.er";
const FILE2: &str = "tests/test2_advanced_syntax.er";
const FILE3: &str = "tests/test3_literal_syntax.er";
#[test]
fn test_lexer_for_basic() -> ParseResult<()> {
let mut lexer = Lexer::new(Input::File(FILE1.into()));
let newline = "\n";
let /*mut*/ token_array = vec![
(Newline, newline),
(Newline, newline),
(Newline, newline),
(Symbol, "_a"),
(Equal, "="),
(NatLit, "1_234"),
(Plus, "+"),
(RatioLit, "1113."),
(Star, "*"),
(RatioLit, "3_000.2e-4"),
(Pow, "**"),
(NatLit, "0003"),
(Star, "*"),
(RatioLit, ".4"),
(Newline, newline),
(Symbol, "a"),
(Comma, ","),
(UBar, "_"),
(Comma, ","),
(Spread, "..."), // EllipsisLit
(Symbol, "b"),
(Equal, "="),
(Symbol, "five_elem_tuple"),
(Newline, newline),
(Symbol, "f"),
(Symbol, "x"),
(Comma, ","),
(Symbol, "y"),
(Equal, "="),
(Newline, newline),
(Indent, " "),
(Symbol, "x"),
(Plus, "+"),
(Symbol, "y"),
(Newline, newline),
(Dedent, ""),
(Symbol, "if!"),
(BoolLit, "True"),
(Comma, ","),
(Symbol, "do!"),
(Colon, ":"),
(Newline, newline),
(Indent, " "),
(Symbol, "print!"),
(StrLit, "\"\\\\hello, world\\\"\""),
(Newline, newline),
(NatLit, "10"),
(Dot, "."),
(Symbol, "times!"),
(Symbol, "do!"),
(Colon, ":"),
(Newline, newline),
(Indent, " "),
(Symbol, "if!"),
(Symbol, "x"),
(Dot, "."),
(Symbol, "y"),
(Dot, "."),
(Symbol, "z"),
(Comma, ","),
(Symbol, "do!"),
(Colon, ":"),
(Newline, newline),
(Indent, " "),
(Symbol, "print!"),
(StrLit, "\"\""),
(Newline, newline),
(Dedent, ""),
(Newline, newline),
(Newline, newline),
(Symbol, "Hello"),
(Equal, "="),
(Symbol, "S2c"),
(StrLit, "\"hello\""),
(Newline, newline),
(Dedent, ""),
(Dedent, ""),
(Symbol, "aあ아"),
(Equal, "="),
(Newline, newline),
(Indent, " "),
(Newline, newline),
(StrLit, "\"aaa\""),
(Newline, newline),
(Dedent, ""),
(Symbol, "x"),
(Semi, ";"),
(Symbol, "x"),
(Semi, ";"),
(Semi, ";"),
(Symbol, "x"),
(Semi, ";"),
(Newline, newline),
(NatLit, "10"),
(Closed, ".."),
(Symbol, "twelve"),
(Semi, ";"),
(Newline, newline),
(EOF, ""),
];
let mut tok: Token;
for (id, i) in token_array.into_iter().enumerate() {
tok = lexer.next().unwrap().unwrap();
assert_eq!(tok, Token::from_str(i.0, i.1));
print!("{id:>03}: ");
println!("{tok}");
}
Ok(())
}
#[test]
fn test_lexer_for_advanced() -> ParseResult<()> {
let mut lexer = Lexer::new(Input::File(FILE2.into()));
let newline = "\n";
let /*mut*/ token_array = vec![
(Newline, newline),
(Newline, newline),
(Newline, newline),
(Newline, newline),
(Symbol, "fib"),
(NatLit, "0"),
(Equal, "="),
(NatLit, "0"),
(Newline, newline),
(Symbol, "fib"),
(NatLit, "1"),
(Equal, "="),
(NatLit, "1"),
(Newline, newline),
(Symbol, "fib"),
(LParen, "("),
(Symbol, "n"),
(Colon, ":"),
(Symbol, "Nat"),
(RParen, ")"),
(FuncArrow, "->"),
(Symbol, "Nat"),
(Equal, "="),
(Symbol, "fib"),
(LParen, "("),
(Symbol, "n"),
(Minus, "-"),
(NatLit, "1"),
(RParen, ")"),
(Plus, "+"),
(Symbol, "fib"),
(LParen, "("),
(Symbol, "n"),
(Minus, "-"),
(NatLit, "2"),
(RParen, ")"),
(Newline, newline),
(Newline, newline),
(Newline, newline),
(Symbol, "t"),
(Equal, "="),
(Symbol, "if"),
(BoolLit, "True"),
(Colon, ":"),
(Newline, newline),
(Indent, " "),
(Symbol, "then"),
(Walrus, ":="),
(NatLit, "1"),
(Newline, newline),
(Symbol, "else"),
(Walrus, ":="),
(NatLit, "2"),
(Newline, newline),
(Dedent, ""),
(Symbol, "assert"),
(Symbol, "t"),
(DblEq, "=="),
(NatLit, "1"),
(Newline, newline),
(Newline, newline),
(Newline, newline),
(Symbol, "math"),
(Equal, "="),
(Symbol, "import"),
(StrLit, "\"math\""),
(Newline, newline),
(Newline, newline),
(LBrace, "{"),
(Symbol, "pi"),
(Comma, ","),
(EllipsisLit, "..."),
(RBrace, "}"),
(Equal, "="),
(Symbol, "import"),
(StrLit, "\"math\""),
(Newline, newline),
(EOF, ""),
];
let mut tok: Token;
for (id, i) in token_array.into_iter().enumerate() {
print!("{id:>03}: ");
tok = lexer.next().unwrap().unwrap();
assert_eq!(tok, Token::from_str(i.0, i.1));
println!("{tok}");
}
Ok(())
}
#[test]
fn test_lexer_for_literals() -> ParseResult<()> {
let mut lexer = Lexer::new(Input::File(FILE3.into()));
let newline = "\n";
let /*mut*/ token_array = vec![
(Newline, newline),
(Newline, newline),
(NatLit, "0"),
(Comma, ","),
(NatLit, "1"),
(Comma, ","),
(NatLit, "2"),
(Comma, ","),
(NatLit, "100_000"),
(Newline, newline),
(Newline, newline),
(Newline, newline),
(IntLit, "-1"),
(Comma, ","),
(IntLit, "-10"),
(Comma, ","),
(IntLit, "-100_000"),
(Newline, newline),
(Newline, newline),
(Newline, newline),
(RatioLit, "0.00"),
(Comma, ","),
(RatioLit, "-0.0"),
(Comma, ","),
(RatioLit, ".1"),
(Comma, ","),
(RatioLit, "400."),
(Newline, newline),
(Newline, newline),
(Newline, newline),
(StrLit, "\"\""),
(Comma, ","),
(StrLit, "\"a\""),
(Comma, ","),
(StrLit, "\"こんにちは\""),
(Comma, ","),
(StrLit, "\"\\\" \\\\ \""),
(Newline, newline),
(Newline, newline),
(Newline, newline),
(BoolLit, "True"),
(Comma, ","),
(BoolLit, "False"),
(Newline, newline),
(Newline, newline),
(Newline, newline),
(NoneLit, "None"),
(Newline, newline),
(Newline, newline),
(Newline, newline),
(EllipsisLit, "..."),
(Newline, newline),
(Newline, newline),
(Newline, newline),
(InfLit, "Inf"),
(Comma, ","),
(PreMinus, "-"),
(InfLit, "Inf"),
(Newline, newline),
(Newline, newline),
(Newline, newline),
(NoImplLit, "NotImplemented"),
(Newline, newline),
(Newline, newline),
(Newline, newline),
// (EOF, ""),
];
let mut tok: Token;
for (id, i) in token_array.into_iter().enumerate() {
print!("{id:>03}: ");
tok = lexer.next().unwrap().unwrap();
assert_eq!(tok, Token::from_str(i.0, i.1));
println!("{tok}");
}
Ok(())
}
#[test]
fn tesop_te_prec() {
assert_eq!(Mod.precedence(), Some(170));
assert_eq!(LParen.precedence(), Some(0));
assert_eq!(Illegal.precedence(), None);
}