Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 12 additions & 1 deletion src/ast/query.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2860,6 +2860,8 @@ impl fmt::Display for OrderBy {
pub struct OrderByExpr {
/// The expression to order by.
pub expr: Expr,
/// Optional PostgreSQL `USING <operator>` clause.
pub using_operator: Option<ObjectName>,
/// Ordering options such as `ASC`/`DESC` and `NULLS` behavior.
pub options: OrderByOptions,
/// Optional `WITH FILL` clause (ClickHouse extension) which specifies how to fill gaps.
Expand All @@ -2870,6 +2872,7 @@ impl From<Ident> for OrderByExpr {
fn from(ident: Ident) -> Self {
OrderByExpr {
expr: Expr::Identifier(ident),
using_operator: None,
options: OrderByOptions::default(),
with_fill: None,
}
Expand All @@ -2878,7 +2881,15 @@ impl From<Ident> for OrderByExpr {

impl fmt::Display for OrderByExpr {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}{}", self.expr, self.options)?;
write!(f, "{}", self.expr)?;
if let Some(using_operator) = &self.using_operator {
if using_operator.0.len() > 1 {
write!(f, " USING OPERATOR({using_operator})")?;
} else {
write!(f, " USING {using_operator}")?;
}
}
write!(f, "{}", self.options)?;
if let Some(ref with_fill) = self.with_fill {
write!(f, " {with_fill}")?
}
Expand Down
1 change: 1 addition & 0 deletions src/ast/spans.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2087,6 +2087,7 @@ impl Spanned for OrderByExpr {
fn span(&self) -> Span {
let OrderByExpr {
expr,
using_operator: _,
options: _,
with_fill,
} = self;
Expand Down
258 changes: 163 additions & 95 deletions src/parser/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -508,10 +508,10 @@ impl<'a> Parser<'a> {
Token::EOF => break,

// end of statement
Token::Word(word) => {
if expecting_statement_delimiter && word.keyword == Keyword::END {
break;
}
Token::Word(word)
if expecting_statement_delimiter && word.keyword == Keyword::END =>
{
break;
}
_ => {}
}
Expand Down Expand Up @@ -1298,41 +1298,40 @@ impl<'a> Parser<'a> {

let next_token = self.next_token();
match next_token.token {
t @ (Token::Word(_) | Token::SingleQuotedString(_)) => {
if self.peek_token_ref().token == Token::Period {
let mut id_parts: Vec<Ident> = vec![match t {
Token::Word(w) => w.into_ident(next_token.span),
Token::SingleQuotedString(s) => Ident::with_quote('\'', s),
_ => {
return Err(ParserError::ParserError(
"Internal parser error: unexpected token type".to_string(),
))
t @ (Token::Word(_) | Token::SingleQuotedString(_))
if self.peek_token_ref().token == Token::Period =>
{
let mut id_parts: Vec<Ident> = vec![match t {
Token::Word(w) => w.into_ident(next_token.span),
Token::SingleQuotedString(s) => Ident::with_quote('\'', s),
_ => {
return Err(ParserError::ParserError(
"Internal parser error: unexpected token type".to_string(),
))
}
}];

while self.consume_token(&Token::Period) {
let next_token = self.next_token();
match next_token.token {
Token::Word(w) => id_parts.push(w.into_ident(next_token.span)),
Token::SingleQuotedString(s) => {
// SQLite has single-quoted identifiers
id_parts.push(Ident::with_quote('\'', s))
}
}];

while self.consume_token(&Token::Period) {
let next_token = self.next_token();
match next_token.token {
Token::Word(w) => id_parts.push(w.into_ident(next_token.span)),
Token::SingleQuotedString(s) => {
// SQLite has single-quoted identifiers
id_parts.push(Ident::with_quote('\'', s))
}
Token::Placeholder(s) => {
// Snowflake uses $1, $2, etc. for positional column references
// in staged data queries like: SELECT t.$1 FROM @stage t
id_parts.push(Ident::new(s))
}
Token::Mul => {
return Ok(Expr::QualifiedWildcard(
ObjectName::from(id_parts),
AttachedToken(next_token),
));
}
_ => {
return self
.expected("an identifier or a '*' after '.'", next_token);
}
Token::Placeholder(s) => {
// Snowflake uses $1, $2, etc. for positional column references
// in staged data queries like: SELECT t.$1 FROM @stage t
id_parts.push(Ident::new(s))
}
Token::Mul => {
return Ok(Expr::QualifiedWildcard(
ObjectName::from(id_parts),
AttachedToken(next_token),
));
}
_ => {
return self.expected("an identifier or a '*' after '.'", next_token);
}
}
}
Expand Down Expand Up @@ -4990,10 +4989,10 @@ impl<'a> Parser<'a> {
loop {
match &self.peek_nth_token_ref(0).token {
Token::EOF => break,
Token::Word(w) => {
if w.quote_style.is_none() && terminal_keywords.contains(&w.keyword) {
break;
}
Token::Word(w)
if w.quote_style.is_none() && terminal_keywords.contains(&w.keyword) =>
{
break;
}
_ => {}
}
Expand Down Expand Up @@ -8173,70 +8172,60 @@ impl<'a> Parser<'a> {
Keyword::LINES,
Keyword::NULL,
]) {
Some(Keyword::FIELDS) => {
if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) {
Some(Keyword::FIELDS)
if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) =>
{
row_delimiters.push(HiveRowDelimiter {
delimiter: HiveDelimiter::FieldsTerminatedBy,
char: self.parse_identifier()?,
});

if self.parse_keywords(&[Keyword::ESCAPED, Keyword::BY]) {
row_delimiters.push(HiveRowDelimiter {
delimiter: HiveDelimiter::FieldsTerminatedBy,
delimiter: HiveDelimiter::FieldsEscapedBy,
char: self.parse_identifier()?,
});

if self.parse_keywords(&[Keyword::ESCAPED, Keyword::BY]) {
row_delimiters.push(HiveRowDelimiter {
delimiter: HiveDelimiter::FieldsEscapedBy,
char: self.parse_identifier()?,
});
}
} else {
break;
}
}
Some(Keyword::COLLECTION) => {
Some(Keyword::COLLECTION)
if self.parse_keywords(&[
Keyword::ITEMS,
Keyword::TERMINATED,
Keyword::BY,
]) {
row_delimiters.push(HiveRowDelimiter {
delimiter: HiveDelimiter::CollectionItemsTerminatedBy,
char: self.parse_identifier()?,
});
} else {
break;
}
]) =>
{
row_delimiters.push(HiveRowDelimiter {
delimiter: HiveDelimiter::CollectionItemsTerminatedBy,
char: self.parse_identifier()?,
});
}
Some(Keyword::MAP) => {
Some(Keyword::MAP)
if self.parse_keywords(&[
Keyword::KEYS,
Keyword::TERMINATED,
Keyword::BY,
]) {
row_delimiters.push(HiveRowDelimiter {
delimiter: HiveDelimiter::MapKeysTerminatedBy,
char: self.parse_identifier()?,
});
} else {
break;
}
]) =>
{
row_delimiters.push(HiveRowDelimiter {
delimiter: HiveDelimiter::MapKeysTerminatedBy,
char: self.parse_identifier()?,
});
}
Some(Keyword::LINES) => {
if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) {
row_delimiters.push(HiveRowDelimiter {
delimiter: HiveDelimiter::LinesTerminatedBy,
char: self.parse_identifier()?,
});
} else {
break;
}
Some(Keyword::LINES)
if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) =>
{
row_delimiters.push(HiveRowDelimiter {
delimiter: HiveDelimiter::LinesTerminatedBy,
char: self.parse_identifier()?,
});
}
Some(Keyword::NULL) => {
if self.parse_keywords(&[Keyword::DEFINED, Keyword::AS]) {
row_delimiters.push(HiveRowDelimiter {
delimiter: HiveDelimiter::NullDefinedAs,
char: self.parse_identifier()?,
});
} else {
break;
}
Some(Keyword::NULL)
if self.parse_keywords(&[Keyword::DEFINED, Keyword::AS]) =>
{
row_delimiters.push(HiveRowDelimiter {
delimiter: HiveDelimiter::NullDefinedAs,
char: self.parse_identifier()?,
});
}
_ => {
break;
Expand Down Expand Up @@ -18083,7 +18072,32 @@ impl<'a> Parser<'a> {
None
};

let options = self.parse_order_by_options()?;
let using_operator = if !with_operator_class
&& dialect_of!(self is PostgreSqlDialect)
&& self.parse_keyword(Keyword::USING)
{
Some(self.parse_order_by_using_operator()?)
} else {
None
};

let options = if using_operator.is_some() {
if self
.peek_one_of_keywords(&[Keyword::ASC, Keyword::DESC])
.is_some()
{
return parser_err!(
"ASC/DESC cannot be used together with USING in ORDER BY".to_string(),
self.peek_token_ref().span.start
);
}
OrderByOptions {
asc: None,
nulls_first: self.parse_order_by_nulls_first_last(),
}
} else {
self.parse_order_by_options()?
};

let with_fill = if self.dialect.supports_with_fill()
&& self.parse_keywords(&[Keyword::WITH, Keyword::FILL])
Expand All @@ -18096,23 +18110,76 @@ impl<'a> Parser<'a> {
Ok((
OrderByExpr {
expr,
using_operator,
options,
with_fill,
},
operator_class,
))
}

fn parse_order_by_options(&mut self) -> Result<OrderByOptions, ParserError> {
let asc = self.parse_asc_desc();
fn parse_order_by_using_operator(&mut self) -> Result<ObjectName, ParserError> {
if self.parse_keyword(Keyword::OPERATOR) {
self.expect_token(&Token::LParen)?;
let operator_name = self.parse_operator_name()?;
let Some(last_part) = operator_name.0.last() else {
return self.expected_ref("an operator name", self.peek_token_ref());
};
let operator = last_part.to_string();
if !Self::is_valid_order_by_using_operator_symbol(&operator) {
return self.expected_ref("an operator name", self.peek_token_ref());
}
self.expect_token(&Token::RParen)?;
return Ok(operator_name);
}

let nulls_first = if self.parse_keywords(&[Keyword::NULLS, Keyword::FIRST]) {
let token = self.next_token();
let operator = token.token.to_string();
if Self::is_valid_order_by_using_operator_symbol(&operator) {
Ok(ObjectName::from(vec![Ident::new(operator)]))
} else {
self.expected_ref("an ordering operator after USING", &token)
}
}

fn is_valid_order_by_using_operator_symbol(symbol: &str) -> bool {
!symbol.is_empty()
&& symbol.chars().all(|c| {
matches!(
c,
'+' | '-'
| '*'
| '/'
| '<'
| '>'
| '='
| '~'
| '!'
| '@'
| '#'
| '%'
| '^'
| '&'
| '|'
| '`'
| '?'
)
})
}

fn parse_order_by_nulls_first_last(&mut self) -> Option<bool> {
if self.parse_keywords(&[Keyword::NULLS, Keyword::FIRST]) {
Some(true)
} else if self.parse_keywords(&[Keyword::NULLS, Keyword::LAST]) {
Some(false)
} else {
None
};
}
}

fn parse_order_by_options(&mut self) -> Result<OrderByOptions, ParserError> {
let asc = self.parse_asc_desc();
let nulls_first = self.parse_order_by_nulls_first_last();

Ok(OrderByOptions { asc, nulls_first })
}
Expand Down Expand Up @@ -20309,6 +20376,7 @@ mod tests {
asc: None,
nulls_first: None,
},
using_operator: None,
with_fill: None,
},
operator_class: None,
Expand Down
2 changes: 2 additions & 0 deletions tests/sqlparser_bigquery.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2721,6 +2721,7 @@ fn test_export_data() {
asc: None,
nulls_first: None,
},
using_operator: None,
with_fill: None,
},]),
interpolate: None,
Expand Down Expand Up @@ -2827,6 +2828,7 @@ fn test_export_data() {
asc: None,
nulls_first: None,
},
using_operator: None,
with_fill: None,
},]),
interpolate: None,
Expand Down
Loading