1
0
Fork 0

Merging upstream version 11.4.1.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-02-13 15:46:19 +01:00
parent ecb42ec17f
commit 63746a3e92
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
89 changed files with 35352 additions and 33081 deletions

View file

@ -96,6 +96,7 @@ class Parser(metaclass=_Parser):
NO_PAREN_FUNCTIONS = {
TokenType.CURRENT_DATE: exp.CurrentDate,
TokenType.CURRENT_DATETIME: exp.CurrentDate,
TokenType.CURRENT_TIME: exp.CurrentTime,
TokenType.CURRENT_TIMESTAMP: exp.CurrentTimestamp,
}
@ -198,7 +199,6 @@ class Parser(metaclass=_Parser):
TokenType.COMMIT,
TokenType.COMPOUND,
TokenType.CONSTRAINT,
TokenType.CURRENT_TIME,
TokenType.DEFAULT,
TokenType.DELETE,
TokenType.DESCRIBE,
@ -370,8 +370,9 @@ class Parser(metaclass=_Parser):
LAMBDAS = {
TokenType.ARROW: lambda self, expressions: self.expression(
exp.Lambda,
this=self._parse_conjunction().transform(
self._replace_lambda, {node.name for node in expressions}
this=self._replace_lambda(
self._parse_conjunction(),
{node.name for node in expressions},
),
expressions=expressions,
),
@ -441,6 +442,7 @@ class Parser(metaclass=_Parser):
exp.With: lambda self: self._parse_with(),
exp.Window: lambda self: self._parse_named_window(),
exp.Qualify: lambda self: self._parse_qualify(),
exp.Returning: lambda self: self._parse_returning(),
"JOIN_TYPE": lambda self: self._parse_join_side_and_kind(),
}
@ -460,6 +462,7 @@ class Parser(metaclass=_Parser):
TokenType.LOAD_DATA: lambda self: self._parse_load_data(),
TokenType.MERGE: lambda self: self._parse_merge(),
TokenType.ROLLBACK: lambda self: self._parse_commit_or_rollback(),
TokenType.SET: lambda self: self._parse_set(),
TokenType.UNCACHE: lambda self: self._parse_uncache(),
TokenType.UPDATE: lambda self: self._parse_update(),
TokenType.USE: lambda self: self.expression(
@ -656,15 +659,15 @@ class Parser(metaclass=_Parser):
}
FUNCTION_PARSERS: t.Dict[str, t.Callable] = {
"CAST": lambda self: self._parse_cast(self.STRICT_CAST),
"CONVERT": lambda self: self._parse_convert(self.STRICT_CAST),
"TRY_CONVERT": lambda self: self._parse_convert(False),
"EXTRACT": lambda self: self._parse_extract(),
"POSITION": lambda self: self._parse_position(),
"STRING_AGG": lambda self: self._parse_string_agg(),
"SUBSTRING": lambda self: self._parse_substring(),
"TRIM": lambda self: self._parse_trim(),
"CAST": lambda self: self._parse_cast(self.STRICT_CAST),
"TRY_CAST": lambda self: self._parse_cast(False),
"STRING_AGG": lambda self: self._parse_string_agg(),
"TRY_CONVERT": lambda self: self._parse_convert(False),
}
QUERY_MODIFIER_PARSERS = {
@ -684,13 +687,28 @@ class Parser(metaclass=_Parser):
"sample": lambda self: self._parse_table_sample(as_modifier=True),
}
SET_PARSERS = {
"GLOBAL": lambda self: self._parse_set_item_assignment("GLOBAL"),
"LOCAL": lambda self: self._parse_set_item_assignment("LOCAL"),
"SESSION": lambda self: self._parse_set_item_assignment("SESSION"),
"TRANSACTION": lambda self: self._parse_set_transaction(),
}
SHOW_PARSERS: t.Dict[str, t.Callable] = {}
SET_PARSERS: t.Dict[str, t.Callable] = {}
MODIFIABLES = (exp.Subquery, exp.Subqueryable, exp.Table)
TRANSACTION_KIND = {"DEFERRED", "IMMEDIATE", "EXCLUSIVE"}
TRANSACTION_CHARACTERISTICS = {
"ISOLATION LEVEL REPEATABLE READ",
"ISOLATION LEVEL READ COMMITTED",
"ISOLATION LEVEL READ UNCOMMITTED",
"ISOLATION LEVEL SERIALIZABLE",
"READ WRITE",
"READ ONLY",
}
INSERT_ALTERNATIVES = {"ABORT", "FAIL", "IGNORE", "REPLACE", "ROLLBACK"}
WINDOW_ALIAS_TOKENS = ID_VAR_TOKENS - {TokenType.ROWS}
@ -1775,11 +1793,12 @@ class Parser(metaclass=_Parser):
self, alias_tokens: t.Optional[t.Collection[TokenType]] = None
) -> t.Optional[exp.Expression]:
any_token = self._match(TokenType.ALIAS)
alias = self._parse_id_var(
any_token=any_token, tokens=alias_tokens or self.TABLE_ALIAS_TOKENS
alias = (
self._parse_id_var(any_token=any_token, tokens=alias_tokens or self.TABLE_ALIAS_TOKENS)
or self._parse_string_as_identifier()
)
index = self._index
index = self._index
if self._match(TokenType.L_PAREN):
columns = self._parse_csv(self._parse_function_parameter)
self._match_r_paren() if columns else self._retreat(index)
@ -2046,7 +2065,12 @@ class Parser(metaclass=_Parser):
def _parse_table_parts(self, schema: bool = False) -> exp.Expression:
catalog = None
db = None
table = (not schema and self._parse_function()) or self._parse_id_var(any_token=False)
table = (
(not schema and self._parse_function())
or self._parse_id_var(any_token=False)
or self._parse_string_as_identifier()
)
while self._match(TokenType.DOT):
if catalog:
@ -2085,6 +2109,8 @@ class Parser(metaclass=_Parser):
subquery = self._parse_select(table=True)
if subquery:
if not subquery.args.get("pivots"):
subquery.set("pivots", self._parse_pivots())
return subquery
this = self._parse_table_parts(schema=schema)
@ -3370,9 +3396,9 @@ class Parser(metaclass=_Parser):
def _parse_window(
self, this: t.Optional[exp.Expression], alias: bool = False
) -> t.Optional[exp.Expression]:
if self._match(TokenType.FILTER):
where = self._parse_wrapped(self._parse_where)
this = self.expression(exp.Filter, this=this, expression=where)
if self._match_pair(TokenType.FILTER, TokenType.L_PAREN):
this = self.expression(exp.Filter, this=this, expression=self._parse_where())
self._match_r_paren()
# T-SQL allows the OVER (...) syntax after WITHIN GROUP.
# https://learn.microsoft.com/en-us/sql/t-sql/functions/percentile-disc-transact-sql?view=sql-server-ver16
@ -3504,6 +3530,9 @@ class Parser(metaclass=_Parser):
return self.PRIMARY_PARSERS[TokenType.STRING](self, self._prev)
return self._parse_placeholder()
def _parse_string_as_identifier(self) -> t.Optional[exp.Expression]:
return exp.to_identifier(self._match(TokenType.STRING) and self._prev.text, quoted=True)
def _parse_number(self) -> t.Optional[exp.Expression]:
if self._match(TokenType.NUMBER):
return self.PRIMARY_PARSERS[TokenType.NUMBER](self, self._prev)
@ -3778,23 +3807,6 @@ class Parser(metaclass=_Parser):
)
return self._parse_as_command(start)
def _parse_show(self) -> t.Optional[exp.Expression]:
parser = self._find_parser(self.SHOW_PARSERS, self._show_trie) # type: ignore
if parser:
return parser(self)
self._advance()
return self.expression(exp.Show, this=self._prev.text.upper())
def _default_parse_set_item(self) -> exp.Expression:
return self.expression(
exp.SetItem,
this=self._parse_statement(),
)
def _parse_set_item(self) -> t.Optional[exp.Expression]:
parser = self._find_parser(self.SET_PARSERS, self._set_trie) # type: ignore
return parser(self) if parser else self._default_parse_set_item()
def _parse_merge(self) -> exp.Expression:
self._match(TokenType.INTO)
target = self._parse_table()
@ -3861,8 +3873,71 @@ class Parser(metaclass=_Parser):
expressions=whens,
)
def _parse_show(self) -> t.Optional[exp.Expression]:
parser = self._find_parser(self.SHOW_PARSERS, self._show_trie) # type: ignore
if parser:
return parser(self)
self._advance()
return self.expression(exp.Show, this=self._prev.text.upper())
def _parse_set_item_assignment(
self, kind: t.Optional[str] = None
) -> t.Optional[exp.Expression]:
index = self._index
if kind in {"GLOBAL", "SESSION"} and self._match_text_seq("TRANSACTION"):
return self._parse_set_transaction(global_=kind == "GLOBAL")
left = self._parse_primary() or self._parse_id_var()
if not self._match_texts(("=", "TO")):
self._retreat(index)
return None
right = self._parse_statement() or self._parse_id_var()
this = self.expression(
exp.EQ,
this=left,
expression=right,
)
return self.expression(
exp.SetItem,
this=this,
kind=kind,
)
def _parse_set_transaction(self, global_: bool = False) -> exp.Expression:
self._match_text_seq("TRANSACTION")
characteristics = self._parse_csv(
lambda: self._parse_var_from_options(self.TRANSACTION_CHARACTERISTICS)
)
return self.expression(
exp.SetItem,
expressions=characteristics,
kind="TRANSACTION",
**{"global": global_}, # type: ignore
)
def _parse_set_item(self) -> t.Optional[exp.Expression]:
parser = self._find_parser(self.SET_PARSERS, self._set_trie) # type: ignore
return parser(self) if parser else self._parse_set_item_assignment(kind=None)
def _parse_set(self) -> exp.Expression:
return self.expression(exp.Set, expressions=self._parse_csv(self._parse_set_item))
index = self._index
set_ = self.expression(exp.Set, expressions=self._parse_csv(self._parse_set_item))
if self._curr:
self._retreat(index)
return self._parse_as_command(self._prev)
return set_
def _parse_var_from_options(self, options: t.Collection[str]) -> t.Optional[exp.Expression]:
for option in options:
if self._match_text_seq(*option.split(" ")):
return exp.Var(this=option)
return None
def _parse_as_command(self, start: Token) -> exp.Command:
while self._curr:
@ -3874,6 +3949,9 @@ class Parser(metaclass=_Parser):
def _find_parser(
self, parsers: t.Dict[str, t.Callable], trie: t.Dict
) -> t.Optional[t.Callable]:
if not self._curr:
return None
index = self._index
this = []
while True:
@ -3973,7 +4051,16 @@ class Parser(metaclass=_Parser):
return this
def _replace_lambda(self, node, lambda_variables):
if isinstance(node, exp.Column):
if node.name in lambda_variables:
return node.this
for column in node.find_all(exp.Column):
if column.parts[0].name in lambda_variables:
dot_or_id = column.to_dot() if column.table else column.this
parent = column.parent
while isinstance(parent, exp.Dot):
if not isinstance(parent.parent, exp.Dot):
parent.replace(dot_or_id)
break
parent = parent.parent
else:
column.replace(dot_or_id)
return node