diff --git a/compiler/erg_parser/tests/parse_test.rs b/compiler/erg_parser/tests/parse_test.rs new file mode 100644 index 00000000..36c439ad --- /dev/null +++ b/compiler/erg_parser/tests/parse_test.rs @@ -0,0 +1,63 @@ +use erg_common::config::{ErgConfig, Input}; +use erg_common::error::MultiErrorDisplay; +use erg_common::traits::Runnable; + +use erg_parser::error::ParserRunnerErrors; +use erg_parser::lex::Lexer; +use erg_parser::ParserRunner; + +#[test] +fn parse_dependent() -> Result<(), ParserRunnerErrors> { + parse_test_from_code("tests/dependent.er") +} + +#[test] +fn parse_fib() -> Result<(), ParserRunnerErrors> { + parse_test_from_code("tests/fib.er") +} + +#[test] +fn parse_hello_world() -> Result<(), ParserRunnerErrors> { + parse_test_from_code("tests/hello_world.er") +} + +#[test] +fn parse_simple_if() -> Result<(), ParserRunnerErrors> { + parse_test_from_code("tests/simple_if.er") +} + +#[test] +fn parse_stack() -> Result<(), ParserRunnerErrors> { + parse_test_from_code("tests/stack.er") +} + +#[test] +fn parse_test1_basic_syntax() -> Result<(), ParserRunnerErrors> { + parse_test_from_code("tests/test1_basic_syntax.er") +} + +#[test] +fn parse_test2_advanced_syntax() -> Result<(), ParserRunnerErrors> { + parse_test_from_code("tests/test2_advanced_syntax.er") +} + +fn parse_test_from_code(file_path: &'static str) -> Result<(), ParserRunnerErrors> { + let input = Input::File(file_path.into()); + let cfg = ErgConfig::new("exec", 1, false, None, 100, input.clone(), "", 2); + let lexer = Lexer::new(input.clone()); + let mut parser = ParserRunner::new(cfg); + match parser.parse_token_stream( + lexer + .lex() + .map_err(|errs| ParserRunnerErrors::convert(&input, errs))?, + ) { + Ok(module) => { + println!("{module}"); + Ok(()) + } + Err(e) => { + e.fmt_all_stderr(); + Err(e) + } + } +} diff --git a/compiler/erg_parser/tests/simple_if.er b/compiler/erg_parser/tests/simple_if.er new file mode 100644 index 00000000..3a4c28c9 --- /dev/null +++ b/compiler/erg_parser/tests/simple_if.er @@ -0,0 +1,20 @@ +result = if Bool.sample(): + 0 + 1 +print! result + +if True, () -> log "hello" + +if! False, () => print! "hello" + +if! True: + () => print! "hello" + () => print! "world" + +print! if False: + () -> "a" + () -> "b" + +print!(if(True, () -> "a", () -> "b")) + +# TODO: do/do! diff --git a/compiler/erg_parser/tests/test.rs b/compiler/erg_parser/tests/test.rs index 60d6fb76..cc4ea870 100644 --- a/compiler/erg_parser/tests/test.rs +++ b/compiler/erg_parser/tests/test.rs @@ -1,28 +1,21 @@ -extern crate erg_common; -extern crate erg_parser; +use std::iter::Iterator; -mod tests { - use std::iter::Iterator; +use erg_common::config::Input; - use erg_common::config::{ErgConfig, Input}; - use erg_common::error::MultiErrorDisplay; - use erg_common::traits::Runnable; +// use erg_compiler::parser; - // use erg_compiler::parser; +use erg_parser::error::ParseResult; +use erg_parser::lex::{Lexer}; +use erg_parser::token::*; +use TokenKind::*; - use erg_parser::error::*; - use erg_parser::lex::Lexer; - use erg_parser::token::*; - use erg_parser::ParserRunner; - use TokenKind::*; +const FILE1: &str = "tests/test1_basic_syntax.er"; - const FILE1: &str = "src/compiler/parser/tests/test1_basic_syntax.er"; - - #[test] - fn test_lexer() -> ParseResult<()> { - let mut lexer = Lexer::new(Input::File(FILE1.into())); - let newline = "\n"; - let /*mut*/ token_array = vec![ +#[test] +fn test_lexer() -> ParseResult<()> { + let mut lexer = Lexer::new(Input::File(FILE1.into())); + let newline = "\n"; + let /*mut*/ token_array = vec![ (Symbol, "_a"), (Equal, "="), (IntLit, "1234"), @@ -108,41 +101,18 @@ mod tests { (EOF, "EOF"), ]; - let mut tok: Token; - for i in token_array.into_iter() { - tok = lexer.next().unwrap().unwrap(); - assert_eq!(tok, Token::from_str(i.0, i.1)); - println!("{tok}"); - } - Ok(()) - } - - #[test] - fn tesop_te_prec() { - assert_eq!(Mod.precedence(), Some(160)); - assert_eq!(LParen.precedence(), Some(0)); - assert_eq!(Illegal.precedence(), None); - } - - #[test] - fn test_parser1() -> Result<(), ParserRunnerErrors> { - let input = Input::File(FILE1.into()); - let cfg = ErgConfig::new("exec", 1, false, None, 100, input.clone(), "", 2); - let lexer = Lexer::new(input.clone()); - let mut parser = ParserRunner::new(cfg); - match parser.parse_token_stream( - lexer - .lex() - .map_err(|errs| ParserRunnerErrors::convert(&input, errs))?, - ) { - Ok(module) => { - println!("{module}"); - Ok(()) - } - Err(e) => { - e.fmt_all_stderr(); - Err(e) - } - } + let mut tok: Token; + for i in token_array.into_iter() { + tok = lexer.next().unwrap().unwrap(); + assert_eq!(tok, Token::from_str(i.0, i.1)); + println!("{tok}"); } + Ok(()) +} + +#[test] +fn tesop_te_prec() { + assert_eq!(Mod.precedence(), Some(160)); + assert_eq!(LParen.precedence(), Some(0)); + assert_eq!(Illegal.precedence(), None); }