Move tests from parser.rs to appropriate parse_XX tests (#845)

* Move tests from parser.rs to appropriate parse_XX tests

* move to postgres tests

* move more tests
This commit is contained in:
Andrew Lamb 2023-04-10 09:48:23 -04:00 committed by GitHub
parent 784a19138f
commit 00d071286b
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
4 changed files with 79 additions and 119 deletions

View file

@ -6912,40 +6912,6 @@ mod tests {
}); });
} }
#[test]
fn test_parse_limit() {
let sql = "SELECT * FROM user LIMIT 1";
all_dialects().run_parser_method(sql, |parser| {
let ast = parser.parse_query().unwrap();
assert_eq!(ast.to_string(), sql.to_string());
});
let sql = "SELECT * FROM user LIMIT $1 OFFSET $2";
let dialects = TestedDialects {
dialects: vec![
Box::new(PostgreSqlDialect {}),
Box::new(ClickHouseDialect {}),
Box::new(GenericDialect {}),
Box::new(MsSqlDialect {}),
Box::new(SnowflakeDialect {}),
],
};
dialects.run_parser_method(sql, |parser| {
let ast = parser.parse_query().unwrap();
assert_eq!(ast.to_string(), sql.to_string());
});
let sql = "SELECT * FROM user LIMIT ? OFFSET ?";
let dialects = TestedDialects {
dialects: vec![Box::new(MySqlDialect {})],
};
dialects.run_parser_method(sql, |parser| {
let ast = parser.parse_query().unwrap();
assert_eq!(ast.to_string(), sql.to_string());
});
}
#[cfg(test)] #[cfg(test)]
mod test_parse_data_type { mod test_parse_data_type {
use crate::ast::{ use crate::ast::{
@ -7402,24 +7368,6 @@ mod tests {
); );
} }
#[test]
fn test_update_has_keyword() {
let sql = r#"UPDATE test SET name=$1,
value=$2,
where=$3,
create=$4,
is_default=$5,
classification=$6,
sort=$7
WHERE id=$8"#;
let pg_dialect = PostgreSqlDialect {};
let ast = Parser::parse_sql(&pg_dialect, sql).unwrap();
assert_eq!(
ast[0].to_string(),
r#"UPDATE test SET name = $1, value = $2, where = $3, create = $4, is_default = $5, classification = $6, sort = $7 WHERE id = $8"#
);
}
#[test] #[test]
fn test_tokenizer_error_loc() { fn test_tokenizer_error_loc() {
let sql = "foo '"; let sql = "foo '";
@ -7458,11 +7406,4 @@ mod tests {
)) ))
); );
} }
#[test]
fn test_update_in_with_subquery() {
let sql = r#"WITH "result" AS (UPDATE "Hero" SET "name" = 'Captain America', "number_of_movies" = "number_of_movies" + 1 WHERE "secret_identity" = 'Sam Wilson' RETURNING "id", "name", "secret_identity", "number_of_movies") SELECT * FROM "result""#;
let ast = Parser::parse_sql(&GenericDialect, sql).unwrap();
assert_eq!(ast[0].to_string(), sql);
}
} }

View file

@ -513,6 +513,11 @@ fn parse_simple_select() {
assert_eq!(Some(Expr::Value(number("5"))), select.limit); assert_eq!(Some(Expr::Value(number("5"))), select.limit);
} }
#[test]
fn parse_limit() {
verified_stmt("SELECT * FROM user LIMIT 1");
}
#[test] #[test]
fn parse_limit_is_not_an_alias() { fn parse_limit_is_not_an_alias() {
// In dialects supporting LIMIT it shouldn't be parsed as a table alias // In dialects supporting LIMIT it shouldn't be parsed as a table alias
@ -1559,65 +1564,6 @@ fn parse_select_group_by() {
); );
} }
#[test]
fn parse_select_group_by_grouping_sets() {
let dialects = TestedDialects {
dialects: vec![Box::new(GenericDialect {}), Box::new(PostgreSqlDialect {})],
};
let sql =
"SELECT brand, size, sum(sales) FROM items_sold GROUP BY size, GROUPING SETS ((brand), (size), ())";
let select = dialects.verified_only_select(sql);
assert_eq!(
vec![
Expr::Identifier(Ident::new("size")),
Expr::GroupingSets(vec![
vec![Expr::Identifier(Ident::new("brand"))],
vec![Expr::Identifier(Ident::new("size"))],
vec![],
]),
],
select.group_by
);
}
#[test]
fn parse_select_group_by_rollup() {
let dialects = TestedDialects {
dialects: vec![Box::new(GenericDialect {}), Box::new(PostgreSqlDialect {})],
};
let sql = "SELECT brand, size, sum(sales) FROM items_sold GROUP BY size, ROLLUP (brand, size)";
let select = dialects.verified_only_select(sql);
assert_eq!(
vec![
Expr::Identifier(Ident::new("size")),
Expr::Rollup(vec![
vec![Expr::Identifier(Ident::new("brand"))],
vec![Expr::Identifier(Ident::new("size"))],
]),
],
select.group_by
);
}
#[test]
fn parse_select_group_by_cube() {
let dialects = TestedDialects {
dialects: vec![Box::new(GenericDialect {}), Box::new(PostgreSqlDialect {})],
};
let sql = "SELECT brand, size, sum(sales) FROM items_sold GROUP BY size, CUBE (brand, size)";
let select = dialects.verified_only_select(sql);
assert_eq!(
vec![
Expr::Identifier(Ident::new("size")),
Expr::Cube(vec![
vec![Expr::Identifier(Ident::new("brand"))],
vec![Expr::Identifier(Ident::new("size"))],
]),
],
select.group_by
);
}
#[test] #[test]
fn parse_select_having() { fn parse_select_having() {
let sql = "SELECT foo FROM bar GROUP BY foo HAVING COUNT(*) > 1"; let sql = "SELECT foo FROM bar GROUP BY foo HAVING COUNT(*) > 1";

View file

@ -1134,6 +1134,7 @@ fn parse_limit_my_sql_syntax() {
"SELECT id, fname, lname FROM customer LIMIT 5, 10", "SELECT id, fname, lname FROM customer LIMIT 5, 10",
"SELECT id, fname, lname FROM customer LIMIT 10 OFFSET 5", "SELECT id, fname, lname FROM customer LIMIT 10 OFFSET 5",
); );
mysql_and_generic().verified_stmt("SELECT * FROM user LIMIT ? OFFSET ?");
} }
#[test] #[test]

View file

@ -2427,7 +2427,27 @@ fn parse_delimited_identifiers() {
pg().verified_stmt(r#"CREATE TABLE "foo" ("bar" "int")"#); pg().verified_stmt(r#"CREATE TABLE "foo" ("bar" "int")"#);
pg().verified_stmt(r#"ALTER TABLE foo ADD CONSTRAINT "bar" PRIMARY KEY (baz)"#); pg().verified_stmt(r#"ALTER TABLE foo ADD CONSTRAINT "bar" PRIMARY KEY (baz)"#);
//TODO verified_stmt(r#"UPDATE foo SET "bar" = 5"#); pg().verified_stmt(r#"UPDATE foo SET "bar" = 5"#);
}
#[test]
fn parse_update_has_keyword() {
pg().one_statement_parses_to(
r#"UPDATE test SET name=$1,
value=$2,
where=$3,
create=$4,
is_default=$5,
classification=$6,
sort=$7
WHERE id=$8"#,
r#"UPDATE test SET name = $1, value = $2, where = $3, create = $4, is_default = $5, classification = $6, sort = $7 WHERE id = $8"#
);
}
#[test]
fn parse_update_in_with_subquery() {
pg_and_generic().verified_stmt(r#"WITH "result" AS (UPDATE "Hero" SET "name" = 'Captain America', "number_of_movies" = "number_of_movies" + 1 WHERE "secret_identity" = 'Sam Wilson' RETURNING "id", "name", "secret_identity", "number_of_movies") SELECT * FROM "result""#);
} }
#[test] #[test]
@ -2789,3 +2809,55 @@ fn parse_incorrect_dollar_quoted_string() {
let sql = "SELECT $$$"; let sql = "SELECT $$$";
assert!(pg().parse_sql_statements(sql).is_err()); assert!(pg().parse_sql_statements(sql).is_err());
} }
#[test]
fn parse_select_group_by_grouping_sets() {
let select = pg_and_generic().verified_only_select(
"SELECT brand, size, sum(sales) FROM items_sold GROUP BY size, GROUPING SETS ((brand), (size), ())"
);
assert_eq!(
vec![
Expr::Identifier(Ident::new("size")),
Expr::GroupingSets(vec![
vec![Expr::Identifier(Ident::new("brand"))],
vec![Expr::Identifier(Ident::new("size"))],
vec![],
]),
],
select.group_by
);
}
#[test]
fn parse_select_group_by_rollup() {
let select = pg_and_generic().verified_only_select(
"SELECT brand, size, sum(sales) FROM items_sold GROUP BY size, ROLLUP (brand, size)",
);
assert_eq!(
vec![
Expr::Identifier(Ident::new("size")),
Expr::Rollup(vec![
vec![Expr::Identifier(Ident::new("brand"))],
vec![Expr::Identifier(Ident::new("size"))],
]),
],
select.group_by
);
}
#[test]
fn parse_select_group_by_cube() {
let select = pg_and_generic().verified_only_select(
"SELECT brand, size, sum(sales) FROM items_sold GROUP BY size, CUBE (brand, size)",
);
assert_eq!(
vec![
Expr::Identifier(Ident::new("size")),
Expr::Cube(vec![
vec![Expr::Identifier(Ident::new("brand"))],
vec![Expr::Identifier(Ident::new("size"))],
]),
],
select.group_by
);
}