Support some of pipe operators (#1759)

This commit is contained in:
Simon Vandel Sillesen 2025-05-02 05:13:47 +02:00 committed by GitHub
parent a5b9821d1d
commit e5d2215267
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
12 changed files with 427 additions and 21 deletions

View file

@ -66,23 +66,23 @@ pub use self::dml::{CreateIndex, CreateTable, Delete, IndexColumn, Insert};
pub use self::operator::{BinaryOperator, UnaryOperator};
pub use self::query::{
AfterMatchSkip, ConnectBy, Cte, CteAsMaterialized, Distinct, EmptyMatchesMode,
ExceptSelectItem, ExcludeSelectItem, ExprWithAlias, Fetch, ForClause, ForJson, ForXml,
FormatClause, GroupByExpr, GroupByWithModifier, IdentWithAlias, IlikeSelectItem,
InputFormatClause, Interpolate, InterpolateExpr, Join, JoinConstraint, JoinOperator,
JsonTableColumn, JsonTableColumnErrorHandling, JsonTableNamedColumn, JsonTableNestedColumn,
LateralView, LimitClause, LockClause, LockType, MatchRecognizePattern, MatchRecognizeSymbol,
Measure, NamedWindowDefinition, NamedWindowExpr, NonBlock, Offset, OffsetRows,
OpenJsonTableColumn, OrderBy, OrderByExpr, OrderByKind, OrderByOptions, PivotValueSource,
ProjectionSelect, Query, RenameSelectItem, RepetitionQuantifier, ReplaceSelectElement,
ReplaceSelectItem, RowsPerMatch, Select, SelectFlavor, SelectInto, SelectItem,
SelectItemQualifiedWildcardKind, SetExpr, SetOperator, SetQuantifier, Setting,
SymbolDefinition, Table, TableAlias, TableAliasColumnDef, TableFactor, TableFunctionArgs,
TableIndexHintForClause, TableIndexHintType, TableIndexHints, TableIndexType, TableSample,
TableSampleBucket, TableSampleKind, TableSampleMethod, TableSampleModifier,
TableSampleQuantity, TableSampleSeed, TableSampleSeedModifier, TableSampleUnit, TableVersion,
TableWithJoins, Top, TopQuantity, UpdateTableFromKind, ValueTableMode, Values,
WildcardAdditionalOptions, With, WithFill, XmlNamespaceDefinition, XmlPassingArgument,
XmlPassingClause, XmlTableColumn, XmlTableColumnOption,
ExceptSelectItem, ExcludeSelectItem, ExprWithAlias, ExprWithAliasAndOrderBy, Fetch, ForClause,
ForJson, ForXml, FormatClause, GroupByExpr, GroupByWithModifier, IdentWithAlias,
IlikeSelectItem, InputFormatClause, Interpolate, InterpolateExpr, Join, JoinConstraint,
JoinOperator, JsonTableColumn, JsonTableColumnErrorHandling, JsonTableNamedColumn,
JsonTableNestedColumn, LateralView, LimitClause, LockClause, LockType, MatchRecognizePattern,
MatchRecognizeSymbol, Measure, NamedWindowDefinition, NamedWindowExpr, NonBlock, Offset,
OffsetRows, OpenJsonTableColumn, OrderBy, OrderByExpr, OrderByKind, OrderByOptions,
PipeOperator, PivotValueSource, ProjectionSelect, Query, RenameSelectItem,
RepetitionQuantifier, ReplaceSelectElement, ReplaceSelectItem, RowsPerMatch, Select,
SelectFlavor, SelectInto, SelectItem, SelectItemQualifiedWildcardKind, SetExpr, SetOperator,
SetQuantifier, Setting, SymbolDefinition, Table, TableAlias, TableAliasColumnDef, TableFactor,
TableFunctionArgs, TableIndexHintForClause, TableIndexHintType, TableIndexHints,
TableIndexType, TableSample, TableSampleBucket, TableSampleKind, TableSampleMethod,
TableSampleModifier, TableSampleQuantity, TableSampleSeed, TableSampleSeedModifier,
TableSampleUnit, TableVersion, TableWithJoins, Top, TopQuantity, UpdateTableFromKind,
ValueTableMode, Values, WildcardAdditionalOptions, With, WithFill, XmlNamespaceDefinition,
XmlPassingArgument, XmlPassingClause, XmlTableColumn, XmlTableColumnOption,
};
pub use self::trigger::{

View file

@ -62,6 +62,9 @@ pub struct Query {
/// [ClickHouse](https://clickhouse.com/docs/en/sql-reference/statements/select/format)
/// (ClickHouse-specific)
pub format_clause: Option<FormatClause>,
/// Pipe operator
pub pipe_operators: Vec<PipeOperator>,
}
impl fmt::Display for Query {
@ -92,6 +95,9 @@ impl fmt::Display for Query {
if let Some(ref format) = self.format_clause {
write!(f, " {}", format)?;
}
for pipe_operator in &self.pipe_operators {
write!(f, " |> {}", pipe_operator)?;
}
Ok(())
}
}
@ -1004,6 +1010,26 @@ impl fmt::Display for ExprWithAlias {
}
}
/// An expression optionally followed by an alias and order by options.
///
/// Example:
/// ```sql
/// 42 AS myint ASC
/// ```
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
pub struct ExprWithAliasAndOrderBy {
pub expr: ExprWithAlias,
pub order_by: OrderByOptions,
}
impl fmt::Display for ExprWithAliasAndOrderBy {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}{}", self.expr, self.order_by)
}
}
/// Arguments to a table-valued function
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
@ -2513,6 +2539,135 @@ impl fmt::Display for OffsetRows {
}
}
/// Pipe syntax, first introduced in Google BigQuery.
/// Example:
///
/// ```sql
/// FROM Produce
/// |> WHERE sales > 0
/// |> AGGREGATE SUM(sales) AS total_sales, COUNT(*) AS num_sales
/// GROUP BY item;
/// ```
///
/// See <https://cloud.google.com/bigquery/docs/reference/standard-sql/pipe-syntax#pipe_syntax>
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
pub enum PipeOperator {
/// Limits the number of rows to return in a query, with an optional OFFSET clause to skip over rows.
///
/// Syntax: `|> LIMIT <n> [OFFSET <m>]`
///
/// See more at <https://cloud.google.com/bigquery/docs/reference/standard-sql/pipe-syntax#limit_pipe_operator>
Limit { expr: Expr, offset: Option<Expr> },
/// Filters the results of the input table.
///
/// Syntax: `|> WHERE <condition>`
///
/// See more at <https://cloud.google.com/bigquery/docs/reference/standard-sql/pipe-syntax#where_pipe_operator>
Where { expr: Expr },
/// `ORDER BY <expr> [ASC|DESC], ...`
OrderBy { exprs: Vec<OrderByExpr> },
/// Produces a new table with the listed columns, similar to the outermost SELECT clause in a table subquery in standard syntax.
///
/// Syntax `|> SELECT <expr> [[AS] alias], ...`
///
/// See more at <https://cloud.google.com/bigquery/docs/reference/standard-sql/pipe-syntax#select_pipe_operator>
Select { exprs: Vec<SelectItem> },
/// Propagates the existing table and adds computed columns, similar to SELECT *, new_column in standard syntax.
///
/// Syntax: `|> EXTEND <expr> [[AS] alias], ...`
///
/// See more at <https://cloud.google.com/bigquery/docs/reference/standard-sql/pipe-syntax#extend_pipe_operator>
Extend { exprs: Vec<SelectItem> },
/// Replaces the value of a column in the current table, similar to SELECT * REPLACE (expression AS column) in standard syntax.
///
/// Syntax: `|> SET <column> = <expression>, ...`
///
/// See more at <https://cloud.google.com/bigquery/docs/reference/standard-sql/pipe-syntax#set_pipe_operator>
Set { assignments: Vec<Assignment> },
/// Removes listed columns from the current table, similar to SELECT * EXCEPT (column) in standard syntax.
///
/// Syntax: `|> DROP <column>, ...`
///
/// See more at <https://cloud.google.com/bigquery/docs/reference/standard-sql/pipe-syntax#drop_pipe_operator>
Drop { columns: Vec<Ident> },
/// Introduces a table alias for the input table, similar to applying the AS alias clause on a table subquery in standard syntax.
///
/// Syntax: `|> AS <alias>`
///
/// See more at <https://cloud.google.com/bigquery/docs/reference/standard-sql/pipe-syntax#as_pipe_operator>
As { alias: Ident },
/// Performs aggregation on data across grouped rows or an entire table.
///
/// Syntax: `|> AGGREGATE <agg_expr> [[AS] alias], ...`
///
/// Syntax:
/// ```norust
/// |> AGGREGATE [<agg_expr> [[AS] alias], ...]
/// GROUP BY <grouping_expr> [AS alias], ...
/// ```
///
/// See more at <https://cloud.google.com/bigquery/docs/reference/standard-sql/pipe-syntax#aggregate_pipe_operator>
Aggregate {
full_table_exprs: Vec<ExprWithAliasAndOrderBy>,
group_by_expr: Vec<ExprWithAliasAndOrderBy>,
},
}
impl fmt::Display for PipeOperator {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
PipeOperator::Select { exprs } => {
write!(f, "SELECT {}", display_comma_separated(exprs.as_slice()))
}
PipeOperator::Extend { exprs } => {
write!(f, "EXTEND {}", display_comma_separated(exprs.as_slice()))
}
PipeOperator::Set { assignments } => {
write!(f, "SET {}", display_comma_separated(assignments.as_slice()))
}
PipeOperator::Drop { columns } => {
write!(f, "DROP {}", display_comma_separated(columns.as_slice()))
}
PipeOperator::As { alias } => {
write!(f, "AS {}", alias)
}
PipeOperator::Limit { expr, offset } => {
write!(f, "LIMIT {}", expr)?;
if let Some(offset) = offset {
write!(f, " OFFSET {}", offset)?;
}
Ok(())
}
PipeOperator::Aggregate {
full_table_exprs,
group_by_expr,
} => {
write!(f, "AGGREGATE")?;
if !full_table_exprs.is_empty() {
write!(
f,
" {}",
display_comma_separated(full_table_exprs.as_slice())
)?;
}
if !group_by_expr.is_empty() {
write!(f, " GROUP BY {}", display_comma_separated(group_by_expr))?;
}
Ok(())
}
PipeOperator::Where { expr } => {
write!(f, "WHERE {}", expr)
}
PipeOperator::OrderBy { exprs } => {
write!(f, "ORDER BY {}", display_comma_separated(exprs.as_slice()))
}
}
}
}
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]

View file

@ -98,10 +98,11 @@ impl Spanned for Query {
order_by,
limit_clause,
fetch,
locks: _, // todo
for_clause: _, // todo, mssql specific
settings: _, // todo, clickhouse specific
format_clause: _, // todo, clickhouse specific
locks: _, // todo
for_clause: _, // todo, mssql specific
settings: _, // todo, clickhouse specific
format_clause: _, // todo, clickhouse specific
pipe_operators: _, // todo bigquery specific
} = self;
union_spans(

View file

@ -136,6 +136,10 @@ impl Dialect for BigQueryDialect {
fn is_column_alias(&self, kw: &Keyword, _parser: &mut Parser) -> bool {
!RESERVED_FOR_COLUMN_ALIAS.contains(kw)
}
fn supports_pipe_operator(&self) -> bool {
true
}
}
impl BigQueryDialect {

View file

@ -518,6 +518,20 @@ pub trait Dialect: Debug + Any {
false
}
/// Return true if the dialect supports pipe operator.
///
/// Example:
/// ```sql
/// SELECT *
/// FROM table
/// |> limit 1
/// ```
///
/// See <https://cloud.google.com/bigquery/docs/pipe-syntax-guide#basic_syntax>
fn supports_pipe_operator(&self) -> bool {
false
}
/// Does the dialect support MySQL-style `'user'@'host'` grantee syntax?
fn supports_user_host_grantee(&self) -> bool {
false

View file

@ -83,6 +83,7 @@ define_keywords!(
ADMIN,
AFTER,
AGAINST,
AGGREGATE,
AGGREGATION,
ALERT,
ALGORITHM,
@ -338,6 +339,7 @@ define_keywords!(
EXPLAIN,
EXPLICIT,
EXPORT,
EXTEND,
EXTENDED,
EXTENSION,
EXTERNAL,

View file

@ -1149,6 +1149,25 @@ impl<'a> Parser<'a> {
self.parse_subexpr(self.dialect.prec_unknown())
}
pub fn parse_expr_with_alias_and_order_by(
&mut self,
) -> Result<ExprWithAliasAndOrderBy, ParserError> {
let expr = self.parse_expr()?;
fn validator(explicit: bool, kw: &Keyword, _parser: &mut Parser) -> bool {
explicit || !&[Keyword::ASC, Keyword::DESC, Keyword::GROUP].contains(kw)
}
let alias = self.parse_optional_alias_inner(None, validator)?;
let order_by = OrderByOptions {
asc: self.parse_asc_desc(),
nulls_first: None,
};
Ok(ExprWithAliasAndOrderBy {
expr: ExprWithAlias { expr, alias },
order_by,
})
}
/// Parse tokens until the precedence changes.
pub fn parse_subexpr(&mut self, precedence: u8) -> Result<Expr, ParserError> {
let _guard = self.recursion_counter.try_decrease()?;
@ -10571,6 +10590,7 @@ impl<'a> Parser<'a> {
for_clause: None,
settings: None,
format_clause: None,
pipe_operators: vec![],
}
.into())
} else if self.parse_keyword(Keyword::UPDATE) {
@ -10584,6 +10604,7 @@ impl<'a> Parser<'a> {
for_clause: None,
settings: None,
format_clause: None,
pipe_operators: vec![],
}
.into())
} else if self.parse_keyword(Keyword::DELETE) {
@ -10597,6 +10618,7 @@ impl<'a> Parser<'a> {
for_clause: None,
settings: None,
format_clause: None,
pipe_operators: vec![],
}
.into())
} else {
@ -10637,6 +10659,12 @@ impl<'a> Parser<'a> {
None
};
let pipe_operators = if self.dialect.supports_pipe_operator() {
self.parse_pipe_operators()?
} else {
Vec::new()
};
Ok(Query {
with,
body,
@ -10647,11 +10675,98 @@ impl<'a> Parser<'a> {
for_clause,
settings,
format_clause,
pipe_operators,
}
.into())
}
}
fn parse_pipe_operators(&mut self) -> Result<Vec<PipeOperator>, ParserError> {
let mut pipe_operators = Vec::new();
while self.consume_token(&Token::VerticalBarRightAngleBracket) {
let kw = self.expect_one_of_keywords(&[
Keyword::SELECT,
Keyword::EXTEND,
Keyword::SET,
Keyword::DROP,
Keyword::AS,
Keyword::WHERE,
Keyword::LIMIT,
Keyword::AGGREGATE,
Keyword::ORDER,
])?;
match kw {
Keyword::SELECT => {
let exprs = self.parse_comma_separated(Parser::parse_select_item)?;
pipe_operators.push(PipeOperator::Select { exprs })
}
Keyword::EXTEND => {
let exprs = self.parse_comma_separated(Parser::parse_select_item)?;
pipe_operators.push(PipeOperator::Extend { exprs })
}
Keyword::SET => {
let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
pipe_operators.push(PipeOperator::Set { assignments })
}
Keyword::DROP => {
let columns = self.parse_identifiers()?;
pipe_operators.push(PipeOperator::Drop { columns })
}
Keyword::AS => {
let alias = self.parse_identifier()?;
pipe_operators.push(PipeOperator::As { alias })
}
Keyword::WHERE => {
let expr = self.parse_expr()?;
pipe_operators.push(PipeOperator::Where { expr })
}
Keyword::LIMIT => {
let expr = self.parse_expr()?;
let offset = if self.parse_keyword(Keyword::OFFSET) {
Some(self.parse_expr()?)
} else {
None
};
pipe_operators.push(PipeOperator::Limit { expr, offset })
}
Keyword::AGGREGATE => {
let full_table_exprs = if self.peek_keyword(Keyword::GROUP) {
vec![]
} else {
self.parse_comma_separated(|parser| {
parser.parse_expr_with_alias_and_order_by()
})?
};
let group_by_expr = if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
self.parse_comma_separated(|parser| {
parser.parse_expr_with_alias_and_order_by()
})?
} else {
vec![]
};
pipe_operators.push(PipeOperator::Aggregate {
full_table_exprs,
group_by_expr,
})
}
Keyword::ORDER => {
self.expect_one_of_keywords(&[Keyword::BY])?;
let exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?;
pipe_operators.push(PipeOperator::OrderBy { exprs })
}
unhandled => {
return Err(ParserError::ParserError(format!(
"`expect_one_of_keywords` further up allowed unhandled keyword: {unhandled:?}"
)))
}
}
}
Ok(pipe_operators)
}
fn parse_settings(&mut self) -> Result<Option<Vec<Setting>>, ParserError> {
let settings = if dialect_of!(self is ClickHouseDialect|GenericDialect)
&& self.parse_keyword(Keyword::SETTINGS)
@ -12122,6 +12237,7 @@ impl<'a> Parser<'a> {
for_clause: None,
settings: None,
format_clause: None,
pipe_operators: vec![],
}),
alias,
})

View file

@ -246,6 +246,8 @@ pub enum Token {
ShiftLeftVerticalBar,
/// `|>> PostgreSQL/Redshift geometrical binary operator (Is strictly above?)
VerticalBarShiftRight,
/// `|> BigQuery pipe operator
VerticalBarRightAngleBracket,
/// `#>>`, extracts JSON sub-object at the specified path as text
HashLongArrow,
/// jsonb @> jsonb -> boolean: Test whether left json contains the right json
@ -359,6 +361,7 @@ impl fmt::Display for Token {
Token::AmpersandRightAngleBracket => f.write_str("&>"),
Token::AmpersandLeftAngleBracketVerticalBar => f.write_str("&<|"),
Token::VerticalBarAmpersandRightAngleBracket => f.write_str("|&>"),
Token::VerticalBarRightAngleBracket => f.write_str("|>"),
Token::TwoWayArrow => f.write_str("<->"),
Token::LeftAngleBracketCaret => f.write_str("<^"),
Token::RightAngleBracketCaret => f.write_str(">^"),
@ -1403,6 +1406,9 @@ impl<'a> Tokenizer<'a> {
_ => self.start_binop_opt(chars, "|>", None),
}
}
Some('>') if self.dialect.supports_pipe_operator() => {
self.consume_for_binop(chars, "|>", Token::VerticalBarRightAngleBracket)
}
// Bitshift '|' operator
_ => self.start_binop(chars, "|", Token::Pipe),
}

View file

@ -489,6 +489,7 @@ fn parse_update_set_from() {
for_clause: None,
settings: None,
format_clause: None,
pipe_operators: vec![],
}),
alias: Some(TableAlias {
name: Ident::new("t2"),
@ -4310,6 +4311,7 @@ fn parse_create_table_as_table() {
for_clause: None,
settings: None,
format_clause: None,
pipe_operators: vec![],
});
match verified_stmt(sql1) {
@ -4335,6 +4337,7 @@ fn parse_create_table_as_table() {
for_clause: None,
settings: None,
format_clause: None,
pipe_operators: vec![],
});
match verified_stmt(sql2) {
@ -6332,6 +6335,7 @@ fn parse_interval_and_or_xor() {
for_clause: None,
settings: None,
format_clause: None,
pipe_operators: vec![],
}))];
assert_eq!(actual_ast, expected_ast);
@ -9467,6 +9471,7 @@ fn parse_merge() {
for_clause: None,
settings: None,
format_clause: None,
pipe_operators: vec![],
}),
alias: Some(TableAlias {
name: Ident {
@ -11344,6 +11349,7 @@ fn parse_unload() {
order_by: None,
settings: None,
format_clause: None,
pipe_operators: vec![],
}),
to: Ident {
value: "s3://...".to_string(),
@ -12564,6 +12570,7 @@ fn test_extract_seconds_ok() {
for_clause: None,
settings: None,
format_clause: None,
pipe_operators: vec![],
}))];
assert_eq!(actual_ast, expected_ast);
@ -14641,6 +14648,7 @@ fn test_select_from_first() {
for_clause: None,
settings: None,
format_clause: None,
pipe_operators: vec![],
};
assert_eq!(expected, ast);
assert_eq!(ast.to_string(), q);
@ -15020,6 +15028,82 @@ fn parse_set_names() {
dialects.verified_stmt("SET NAMES UTF8 COLLATE bogus");
}
#[test]
fn parse_pipeline_operator() {
let dialects = all_dialects_where(|d| d.supports_pipe_operator());
// select pipe operator
dialects.verified_stmt("SELECT * FROM users |> SELECT id");
dialects.verified_stmt("SELECT * FROM users |> SELECT id, name");
dialects.verified_query_with_canonical(
"SELECT * FROM users |> SELECT id user_id",
"SELECT * FROM users |> SELECT id AS user_id",
);
dialects.verified_stmt("SELECT * FROM users |> SELECT id AS user_id");
// extend pipe operator
dialects.verified_stmt("SELECT * FROM users |> EXTEND id + 1 AS new_id");
dialects.verified_stmt("SELECT * FROM users |> EXTEND id AS new_id, name AS new_name");
dialects.verified_query_with_canonical(
"SELECT * FROM users |> EXTEND id user_id",
"SELECT * FROM users |> EXTEND id AS user_id",
);
// set pipe operator
dialects.verified_stmt("SELECT * FROM users |> SET id = id + 1");
dialects.verified_stmt("SELECT * FROM users |> SET id = id + 1, name = name + ' Doe'");
// drop pipe operator
dialects.verified_stmt("SELECT * FROM users |> DROP id");
dialects.verified_stmt("SELECT * FROM users |> DROP id, name");
// as pipe operator
dialects.verified_stmt("SELECT * FROM users |> AS new_users");
// limit pipe operator
dialects.verified_stmt("SELECT * FROM users |> LIMIT 10");
dialects.verified_stmt("SELECT * FROM users |> LIMIT 10 OFFSET 5");
dialects.verified_stmt("SELECT * FROM users |> LIMIT 10 |> LIMIT 5");
dialects.verified_stmt("SELECT * FROM users |> LIMIT 10 |> WHERE true");
// where pipe operator
dialects.verified_stmt("SELECT * FROM users |> WHERE id = 1");
dialects.verified_stmt("SELECT * FROM users |> WHERE id = 1 AND name = 'John'");
dialects.verified_stmt("SELECT * FROM users |> WHERE id = 1 OR name = 'John'");
// aggregate pipe operator full table
dialects.verified_stmt("SELECT * FROM users |> AGGREGATE COUNT(*)");
dialects.verified_query_with_canonical(
"SELECT * FROM users |> AGGREGATE COUNT(*) total_users",
"SELECT * FROM users |> AGGREGATE COUNT(*) AS total_users",
);
dialects.verified_stmt("SELECT * FROM users |> AGGREGATE COUNT(*) AS total_users");
dialects.verified_stmt("SELECT * FROM users |> AGGREGATE COUNT(*), MIN(id)");
// aggregate pipe opeprator with grouping
dialects.verified_stmt(
"SELECT * FROM users |> AGGREGATE SUM(o_totalprice) AS price, COUNT(*) AS cnt GROUP BY EXTRACT(YEAR FROM o_orderdate) AS year",
);
dialects.verified_stmt(
"SELECT * FROM users |> AGGREGATE GROUP BY EXTRACT(YEAR FROM o_orderdate) AS year",
);
dialects
.verified_stmt("SELECT * FROM users |> AGGREGATE GROUP BY EXTRACT(YEAR FROM o_orderdate)");
dialects.verified_stmt("SELECT * FROM users |> AGGREGATE GROUP BY a, b");
dialects.verified_stmt("SELECT * FROM users |> AGGREGATE SUM(c) GROUP BY a, b");
dialects.verified_stmt("SELECT * FROM users |> AGGREGATE SUM(c) ASC");
// order by pipe operator
dialects.verified_stmt("SELECT * FROM users |> ORDER BY id ASC");
dialects.verified_stmt("SELECT * FROM users |> ORDER BY id DESC");
dialects.verified_stmt("SELECT * FROM users |> ORDER BY id DESC, name ASC");
// many pipes
dialects.verified_stmt(
"SELECT * FROM CustomerOrders |> AGGREGATE SUM(cost) AS total_cost GROUP BY customer_id, state, item_type |> EXTEND COUNT(*) OVER (PARTITION BY customer_id) AS num_orders |> WHERE num_orders > 1 |> AGGREGATE AVG(total_cost) AS average GROUP BY state DESC, item_type ASC",
);
}
#[test]
fn parse_multiple_set_statements() -> Result<(), ParserError> {
let dialects = all_dialects_where(|d| d.supports_comma_separated_set_assignments());

View file

@ -114,6 +114,7 @@ fn parse_create_procedure() {
order_by: None,
settings: None,
format_clause: None,
pipe_operators: vec![],
body: Box::new(SetExpr::Select(Box::new(Select {
select_token: AttachedToken::empty(),
distinct: None,
@ -1252,6 +1253,7 @@ fn parse_substring_in_select() {
for_clause: None,
settings: None,
format_clause: None,
pipe_operators: vec![],
}),
query
);
@ -1354,6 +1356,8 @@ fn parse_mssql_declare() {
order_by: None,
settings: None,
format_clause: None,
pipe_operators: vec![],
body: Box::new(SetExpr::Select(Box::new(Select {
select_token: AttachedToken::empty(),
distinct: None,

View file

@ -1113,6 +1113,7 @@ fn parse_escaped_quote_identifiers_with_escape() {
for_clause: None,
settings: None,
format_clause: None,
pipe_operators: vec![],
}))
);
}
@ -1165,6 +1166,7 @@ fn parse_escaped_quote_identifiers_with_no_escape() {
for_clause: None,
settings: None,
format_clause: None,
pipe_operators: vec![],
}))
);
}
@ -1211,6 +1213,7 @@ fn parse_escaped_backticks_with_escape() {
for_clause: None,
settings: None,
format_clause: None,
pipe_operators: vec![],
}))
);
}
@ -1261,6 +1264,7 @@ fn parse_escaped_backticks_with_no_escape() {
for_clause: None,
settings: None,
format_clause: None,
pipe_operators: vec![],
}))
);
}
@ -1436,6 +1440,7 @@ fn parse_simple_insert() {
for_clause: None,
settings: None,
format_clause: None,
pipe_operators: vec![],
})),
source
);
@ -1484,6 +1489,7 @@ fn parse_ignore_insert() {
for_clause: None,
settings: None,
format_clause: None,
pipe_operators: vec![],
})),
source
);
@ -1532,6 +1538,7 @@ fn parse_priority_insert() {
for_clause: None,
settings: None,
format_clause: None,
pipe_operators: vec![],
})),
source
);
@ -1577,6 +1584,7 @@ fn parse_priority_insert() {
for_clause: None,
settings: None,
format_clause: None,
pipe_operators: vec![],
})),
source
);
@ -1624,6 +1632,7 @@ fn parse_insert_as() {
for_clause: None,
settings: None,
format_clause: None,
pipe_operators: vec![],
})),
source
);
@ -1686,6 +1695,7 @@ fn parse_insert_as() {
for_clause: None,
settings: None,
format_clause: None,
pipe_operators: vec![],
})),
source
);
@ -1735,6 +1745,7 @@ fn parse_replace_insert() {
for_clause: None,
settings: None,
format_clause: None,
pipe_operators: vec![],
})),
source
);
@ -1775,6 +1786,7 @@ fn parse_empty_row_insert() {
for_clause: None,
settings: None,
format_clause: None,
pipe_operators: vec![],
})),
source
);
@ -1839,6 +1851,7 @@ fn parse_insert_with_on_duplicate_update() {
for_clause: None,
settings: None,
format_clause: None,
pipe_operators: vec![],
})),
source
);
@ -2745,6 +2758,7 @@ fn parse_substring_in_select() {
for_clause: None,
settings: None,
format_clause: None,
pipe_operators: vec![],
}),
query
);
@ -3051,6 +3065,7 @@ fn parse_hex_string_introducer() {
for_clause: None,
settings: None,
format_clause: None,
pipe_operators: vec![],
}))
)
}

View file

@ -1326,6 +1326,7 @@ fn parse_copy_to() {
for_clause: None,
settings: None,
format_clause: None,
pipe_operators: vec![],
})),
to: true,
target: CopyTarget::File {
@ -2994,6 +2995,7 @@ fn parse_array_subquery_expr() {
for_clause: None,
settings: None,
format_clause: None,
pipe_operators: vec![],
})),
filter: None,
null_treatment: None,
@ -4785,6 +4787,7 @@ fn test_simple_postgres_insert_with_alias() {
for_clause: None,
settings: None,
format_clause: None,
pipe_operators: vec![],
})),
assignments: vec![],
partitioned: None,
@ -4856,6 +4859,7 @@ fn test_simple_postgres_insert_with_alias() {
for_clause: None,
settings: None,
format_clause: None,
pipe_operators: vec![],
})),
assignments: vec![],
partitioned: None,
@ -4925,6 +4929,7 @@ fn test_simple_insert_with_quoted_alias() {
for_clause: None,
settings: None,
format_clause: None,
pipe_operators: vec![],
})),
assignments: vec![],
partitioned: None,