Merging upstream version 17.9.1.
Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
parent
2bf6699c56
commit
9777880e00
87 changed files with 45907 additions and 42511 deletions
|
@ -185,6 +185,8 @@ class Parser(metaclass=_Parser):
|
|||
TokenType.VARIANT,
|
||||
TokenType.OBJECT,
|
||||
TokenType.INET,
|
||||
TokenType.IPADDRESS,
|
||||
TokenType.IPPREFIX,
|
||||
TokenType.ENUM,
|
||||
*NESTED_TYPE_TOKENS,
|
||||
}
|
||||
|
@ -603,6 +605,7 @@ class Parser(metaclass=_Parser):
|
|||
"FALLBACK": lambda self, **kwargs: self._parse_fallback(**kwargs),
|
||||
"FORMAT": lambda self: self._parse_property_assignment(exp.FileFormatProperty),
|
||||
"FREESPACE": lambda self: self._parse_freespace(),
|
||||
"HEAP": lambda self: self.expression(exp.HeapProperty),
|
||||
"IMMUTABLE": lambda self: self.expression(
|
||||
exp.StabilityProperty, this=exp.Literal.string("IMMUTABLE")
|
||||
),
|
||||
|
@ -832,6 +835,7 @@ class Parser(metaclass=_Parser):
|
|||
UNNEST_COLUMN_ONLY: bool = False
|
||||
ALIAS_POST_TABLESAMPLE: bool = False
|
||||
STRICT_STRING_CONCAT = False
|
||||
NORMALIZE_FUNCTIONS = "upper"
|
||||
NULL_ORDERING: str = "nulls_are_small"
|
||||
SHOW_TRIE: t.Dict = {}
|
||||
SET_TRIE: t.Dict = {}
|
||||
|
@ -1187,7 +1191,7 @@ class Parser(metaclass=_Parser):
|
|||
|
||||
exists = self._parse_exists(not_=True)
|
||||
this = None
|
||||
expression = None
|
||||
expression: t.Optional[exp.Expression] = None
|
||||
indexes = None
|
||||
no_schema_binding = None
|
||||
begin = None
|
||||
|
@ -1207,12 +1211,16 @@ class Parser(metaclass=_Parser):
|
|||
extend_props(self._parse_properties())
|
||||
|
||||
self._match(TokenType.ALIAS)
|
||||
begin = self._match(TokenType.BEGIN)
|
||||
return_ = self._match_text_seq("RETURN")
|
||||
expression = self._parse_statement()
|
||||
|
||||
if return_:
|
||||
expression = self.expression(exp.Return, this=expression)
|
||||
if self._match(TokenType.COMMAND):
|
||||
expression = self._parse_as_command(self._prev)
|
||||
else:
|
||||
begin = self._match(TokenType.BEGIN)
|
||||
return_ = self._match_text_seq("RETURN")
|
||||
expression = self._parse_statement()
|
||||
|
||||
if return_:
|
||||
expression = self.expression(exp.Return, this=expression)
|
||||
elif create_token.token_type == TokenType.INDEX:
|
||||
this = self._parse_index(index=self._parse_id_var())
|
||||
elif create_token.token_type in self.DB_CREATABLES:
|
||||
|
@ -1692,6 +1700,7 @@ class Parser(metaclass=_Parser):
|
|||
return self.expression(exp.Describe, this=this, kind=kind)
|
||||
|
||||
def _parse_insert(self) -> exp.Insert:
|
||||
comments = ensure_list(self._prev_comments)
|
||||
overwrite = self._match(TokenType.OVERWRITE)
|
||||
ignore = self._match(TokenType.IGNORE)
|
||||
local = self._match_text_seq("LOCAL")
|
||||
|
@ -1709,6 +1718,7 @@ class Parser(metaclass=_Parser):
|
|||
alternative = self._match_texts(self.INSERT_ALTERNATIVES) and self._prev.text
|
||||
|
||||
self._match(TokenType.INTO)
|
||||
comments += ensure_list(self._prev_comments)
|
||||
self._match(TokenType.TABLE)
|
||||
this = self._parse_table(schema=True)
|
||||
|
||||
|
@ -1716,6 +1726,7 @@ class Parser(metaclass=_Parser):
|
|||
|
||||
return self.expression(
|
||||
exp.Insert,
|
||||
comments=comments,
|
||||
this=this,
|
||||
exists=self._parse_exists(),
|
||||
partition=self._parse_partition(),
|
||||
|
@ -1840,6 +1851,7 @@ class Parser(metaclass=_Parser):
|
|||
# This handles MySQL's "Multiple-Table Syntax"
|
||||
# https://dev.mysql.com/doc/refman/8.0/en/delete.html
|
||||
tables = None
|
||||
comments = self._prev_comments
|
||||
if not self._match(TokenType.FROM, advance=False):
|
||||
tables = self._parse_csv(self._parse_table) or None
|
||||
|
||||
|
@ -1847,6 +1859,7 @@ class Parser(metaclass=_Parser):
|
|||
|
||||
return self.expression(
|
||||
exp.Delete,
|
||||
comments=comments,
|
||||
tables=tables,
|
||||
this=self._match(TokenType.FROM) and self._parse_table(joins=True),
|
||||
using=self._match(TokenType.USING) and self._parse_table(joins=True),
|
||||
|
@ -1856,11 +1869,13 @@ class Parser(metaclass=_Parser):
|
|||
)
|
||||
|
||||
def _parse_update(self) -> exp.Update:
|
||||
comments = self._prev_comments
|
||||
this = self._parse_table(alias_tokens=self.UPDATE_ALIAS_TOKENS)
|
||||
expressions = self._match(TokenType.SET) and self._parse_csv(self._parse_equality)
|
||||
returning = self._parse_returning()
|
||||
return self.expression(
|
||||
exp.Update,
|
||||
comments=comments,
|
||||
**{ # type: ignore
|
||||
"this": this,
|
||||
"expressions": expressions,
|
||||
|
@ -2235,7 +2250,12 @@ class Parser(metaclass=_Parser):
|
|||
return None
|
||||
|
||||
if not this:
|
||||
this = self._parse_function() or self._parse_id_var(any_token=False)
|
||||
this = (
|
||||
self._parse_unnest()
|
||||
or self._parse_function()
|
||||
or self._parse_id_var(any_token=False)
|
||||
)
|
||||
|
||||
while self._match(TokenType.DOT):
|
||||
this = exp.Dot(
|
||||
this=this,
|
||||
|
@ -3341,7 +3361,10 @@ class Parser(metaclass=_Parser):
|
|||
args = self._parse_csv(lambda: self._parse_lambda(alias=alias))
|
||||
|
||||
if function and not anonymous:
|
||||
this = self.validate_expression(function(args), args)
|
||||
func = self.validate_expression(function(args), args)
|
||||
if not self.NORMALIZE_FUNCTIONS:
|
||||
func.meta["name"] = this
|
||||
this = func
|
||||
else:
|
||||
this = self.expression(exp.Anonymous, this=this, expressions=args)
|
||||
|
||||
|
@ -3842,13 +3865,11 @@ class Parser(metaclass=_Parser):
|
|||
args = self._parse_csv(self._parse_conjunction)
|
||||
|
||||
index = self._index
|
||||
if not self._match(TokenType.R_PAREN):
|
||||
if not self._match(TokenType.R_PAREN) and args:
|
||||
# postgres: STRING_AGG([DISTINCT] expression, separator [ORDER BY expression1 {ASC | DESC} [, ...]])
|
||||
return self.expression(
|
||||
exp.GroupConcat,
|
||||
this=seq_get(args, 0),
|
||||
separator=self._parse_order(this=seq_get(args, 1)),
|
||||
)
|
||||
# bigquery: STRING_AGG([DISTINCT] expression [, separator] [ORDER BY key [{ASC | DESC}] [, ... ]] [LIMIT n])
|
||||
args[-1] = self._parse_limit(this=self._parse_order(this=args[-1]))
|
||||
return self.expression(exp.GroupConcat, this=args[0], separator=seq_get(args, 1))
|
||||
|
||||
# Checks if we can parse an order clause: WITHIN GROUP (ORDER BY <order_by_expression_list> [ASC | DESC]).
|
||||
# This is done "manually", instead of letting _parse_window parse it into an exp.WithinGroup node, so that
|
||||
|
@ -4172,7 +4193,7 @@ class Parser(metaclass=_Parser):
|
|||
|
||||
self._match_r_paren()
|
||||
|
||||
return self.expression(
|
||||
window = self.expression(
|
||||
exp.Window,
|
||||
this=this,
|
||||
partition_by=partition,
|
||||
|
@ -4183,6 +4204,12 @@ class Parser(metaclass=_Parser):
|
|||
first=first,
|
||||
)
|
||||
|
||||
# This covers Oracle's FIRST/LAST syntax: aggregate KEEP (...) OVER (...)
|
||||
if self._match_set(self.WINDOW_BEFORE_PAREN_TOKENS, advance=False):
|
||||
return self._parse_window(window, alias=alias)
|
||||
|
||||
return window
|
||||
|
||||
def _parse_window_spec(self) -> t.Dict[str, t.Optional[str | exp.Expression]]:
|
||||
self._match(TokenType.BETWEEN)
|
||||
|
||||
|
@ -4276,19 +4303,19 @@ class Parser(metaclass=_Parser):
|
|||
def _parse_null(self) -> t.Optional[exp.Expression]:
|
||||
if self._match(TokenType.NULL):
|
||||
return self.PRIMARY_PARSERS[TokenType.NULL](self, self._prev)
|
||||
return None
|
||||
return self._parse_placeholder()
|
||||
|
||||
def _parse_boolean(self) -> t.Optional[exp.Expression]:
|
||||
if self._match(TokenType.TRUE):
|
||||
return self.PRIMARY_PARSERS[TokenType.TRUE](self, self._prev)
|
||||
if self._match(TokenType.FALSE):
|
||||
return self.PRIMARY_PARSERS[TokenType.FALSE](self, self._prev)
|
||||
return None
|
||||
return self._parse_placeholder()
|
||||
|
||||
def _parse_star(self) -> t.Optional[exp.Expression]:
|
||||
if self._match(TokenType.STAR):
|
||||
return self.PRIMARY_PARSERS[TokenType.STAR](self, self._prev)
|
||||
return None
|
||||
return self._parse_placeholder()
|
||||
|
||||
def _parse_parameter(self) -> exp.Parameter:
|
||||
wrapped = self._match(TokenType.L_BRACE)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue