1
0
Fork 0

Merging upstream version 9.0.6.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-02-13 14:51:47 +01:00
parent e369f04a93
commit 69b4fb4368
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
31 changed files with 694 additions and 196 deletions

View file

@ -131,6 +131,7 @@ class Parser:
TokenType.ALTER,
TokenType.ALWAYS,
TokenType.ANTI,
TokenType.APPLY,
TokenType.BEGIN,
TokenType.BOTH,
TokenType.BUCKET,
@ -190,6 +191,7 @@ class Parser:
TokenType.TABLE,
TokenType.TABLE_FORMAT,
TokenType.TEMPORARY,
TokenType.TRANSIENT,
TokenType.TOP,
TokenType.TRAILING,
TokenType.TRUNCATE,
@ -204,7 +206,7 @@ class Parser:
*TYPE_TOKENS,
}
TABLE_ALIAS_TOKENS = ID_VAR_TOKENS - {TokenType.NATURAL}
TABLE_ALIAS_TOKENS = ID_VAR_TOKENS - {TokenType.NATURAL, TokenType.APPLY}
TRIM_TYPES = {TokenType.LEADING, TokenType.TRAILING, TokenType.BOTH}
@ -685,6 +687,7 @@ class Parser:
def _parse_create(self):
replace = self._match(TokenType.OR) and self._match(TokenType.REPLACE)
temporary = self._match(TokenType.TEMPORARY)
transient = self._match(TokenType.TRANSIENT)
unique = self._match(TokenType.UNIQUE)
materialized = self._match(TokenType.MATERIALIZED)
@ -723,6 +726,7 @@ class Parser:
exists=exists,
properties=properties,
temporary=temporary,
transient=transient,
replace=replace,
unique=unique,
materialized=materialized,
@ -1057,8 +1061,8 @@ class Parser:
return self._parse_set_operations(this) if this else None
def _parse_with(self):
if not self._match(TokenType.WITH):
def _parse_with(self, skip_with_token=False):
if not skip_with_token and not self._match(TokenType.WITH):
return None
recursive = self._match(TokenType.RECURSIVE)
@ -1167,28 +1171,53 @@ class Parser:
return self.expression(exp.From, expressions=self._parse_csv(self._parse_table))
def _parse_lateral(self):
if not self._match(TokenType.LATERAL):
outer_apply = self._match_pair(TokenType.OUTER, TokenType.APPLY)
cross_apply = self._match_pair(TokenType.CROSS, TokenType.APPLY)
if outer_apply or cross_apply:
this = self._parse_select(table=True)
view = None
outer = not cross_apply
elif self._match(TokenType.LATERAL):
this = self._parse_select(table=True)
view = self._match(TokenType.VIEW)
outer = self._match(TokenType.OUTER)
else:
return None
subquery = self._parse_select(table=True)
if not this:
this = self._parse_function()
if subquery:
return self.expression(exp.Lateral, this=subquery)
table_alias = self._parse_id_var(any_token=False)
self._match(TokenType.VIEW)
outer = self._match(TokenType.OUTER)
columns = None
if self._match(TokenType.ALIAS):
columns = self._parse_csv(self._parse_id_var)
elif self._match(TokenType.L_PAREN):
columns = self._parse_csv(self._parse_id_var)
self._match(TokenType.R_PAREN)
return self.expression(
expression = self.expression(
exp.Lateral,
this=self._parse_function(),
this=this,
view=view,
outer=outer,
alias=self.expression(
exp.TableAlias,
this=self._parse_id_var(any_token=False),
columns=(self._parse_csv(self._parse_id_var) if self._match(TokenType.ALIAS) else None),
this=table_alias,
columns=columns,
),
)
if outer_apply or cross_apply:
return self.expression(
exp.Join,
this=expression,
side=None if cross_apply else "LEFT",
)
return expression
def _parse_join_side_and_kind(self):
return (
self._match(TokenType.NATURAL) and self._prev,
@ -1196,10 +1225,10 @@ class Parser:
self._match_set(self.JOIN_KINDS) and self._prev,
)
def _parse_join(self):
def _parse_join(self, skip_join_token=False):
natural, side, kind = self._parse_join_side_and_kind()
if not self._match(TokenType.JOIN):
if not skip_join_token and not self._match(TokenType.JOIN):
return None
kwargs = {"this": self._parse_table()}
@ -1425,13 +1454,13 @@ class Parser:
unpivot=unpivot,
)
def _parse_where(self):
if not self._match(TokenType.WHERE):
def _parse_where(self, skip_where_token=False):
if not skip_where_token and not self._match(TokenType.WHERE):
return None
return self.expression(exp.Where, this=self._parse_conjunction())
def _parse_group(self):
if not self._match(TokenType.GROUP_BY):
def _parse_group(self, skip_group_by_token=False):
if not skip_group_by_token and not self._match(TokenType.GROUP_BY):
return None
return self.expression(
exp.Group,
@ -1457,8 +1486,8 @@ class Parser:
return self.expression(exp.Tuple, expressions=grouping_set)
return self._parse_id_var()
def _parse_having(self):
if not self._match(TokenType.HAVING):
def _parse_having(self, skip_having_token=False):
if not skip_having_token and not self._match(TokenType.HAVING):
return None
return self.expression(exp.Having, this=self._parse_conjunction())
@ -1467,8 +1496,8 @@ class Parser:
return None
return self.expression(exp.Qualify, this=self._parse_conjunction())
def _parse_order(self, this=None):
if not self._match(TokenType.ORDER_BY):
def _parse_order(self, this=None, skip_order_token=False):
if not skip_order_token and not self._match(TokenType.ORDER_BY):
return this
return self.expression(exp.Order, this=this, expressions=self._parse_csv(self._parse_ordered))
@ -1502,7 +1531,11 @@ class Parser:
def _parse_limit(self, this=None, top=False):
if self._match(TokenType.TOP if top else TokenType.LIMIT):
return self.expression(exp.Limit, this=this, expression=self._parse_number())
limit_paren = self._match(TokenType.L_PAREN)
limit_exp = self.expression(exp.Limit, this=this, expression=self._parse_number())
if limit_paren:
self._match(TokenType.R_PAREN)
return limit_exp
if self._match(TokenType.FETCH):
direction = self._match_set((TokenType.FIRST, TokenType.NEXT))
direction = self._prev.text if direction else "FIRST"
@ -2136,7 +2169,7 @@ class Parser:
return self.expression(exp.Cast if strict else exp.TryCast, this=this, to=to)
def _parse_convert(self, strict):
this = self._parse_field()
this = self._parse_column()
if self._match(TokenType.USING):
to = self.expression(exp.CharacterSet, this=self._parse_var())
elif self._match(TokenType.COMMA):