1
0
Fork 0

Merging upstream version 11.0.1.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-02-13 15:23:26 +01:00
parent fdac67ef7f
commit ba0f3f0bfa
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
112 changed files with 126100 additions and 230 deletions

View file

@ -194,6 +194,7 @@ class Parser(metaclass=_Parser):
TokenType.INTERVAL,
TokenType.LAZY,
TokenType.LEADING,
TokenType.LEFT,
TokenType.LOCAL,
TokenType.MATERIALIZED,
TokenType.MERGE,
@ -208,6 +209,7 @@ class Parser(metaclass=_Parser):
TokenType.PRECEDING,
TokenType.RANGE,
TokenType.REFERENCES,
TokenType.RIGHT,
TokenType.ROW,
TokenType.ROWS,
TokenType.SCHEMA,
@ -237,8 +239,10 @@ class Parser(metaclass=_Parser):
TABLE_ALIAS_TOKENS = ID_VAR_TOKENS - {
TokenType.APPLY,
TokenType.LEFT,
TokenType.NATURAL,
TokenType.OFFSET,
TokenType.RIGHT,
TokenType.WINDOW,
}
@ -258,6 +262,8 @@ class Parser(metaclass=_Parser):
TokenType.IDENTIFIER,
TokenType.INDEX,
TokenType.ISNULL,
TokenType.ILIKE,
TokenType.LIKE,
TokenType.MERGE,
TokenType.OFFSET,
TokenType.PRIMARY_KEY,
@ -971,13 +977,14 @@ class Parser(metaclass=_Parser):
if create_token.token_type in (TokenType.FUNCTION, TokenType.PROCEDURE):
this = self._parse_user_defined_function(kind=create_token.token_type)
properties = self._parse_properties()
if self._match(TokenType.ALIAS):
begin = self._match(TokenType.BEGIN)
return_ = self._match_text_seq("RETURN")
expression = self._parse_statement()
if return_:
expression = self.expression(exp.Return, this=expression)
self._match(TokenType.ALIAS)
begin = self._match(TokenType.BEGIN)
return_ = self._match_text_seq("RETURN")
expression = self._parse_statement()
if return_:
expression = self.expression(exp.Return, this=expression)
elif create_token.token_type == TokenType.INDEX:
this = self._parse_index()
elif create_token.token_type in (
@ -2163,7 +2170,9 @@ class Parser(metaclass=_Parser):
) -> t.Optional[exp.Expression]:
if self._match(TokenType.TOP if top else TokenType.LIMIT):
limit_paren = self._match(TokenType.L_PAREN)
limit_exp = self.expression(exp.Limit, this=this, expression=self._parse_number())
limit_exp = self.expression(
exp.Limit, this=this, expression=self._parse_number() if top else self._parse_term()
)
if limit_paren:
self._match_r_paren()
@ -2740,8 +2749,23 @@ class Parser(metaclass=_Parser):
kind: exp.Expression
if self._match(TokenType.AUTO_INCREMENT):
kind = exp.AutoIncrementColumnConstraint()
if self._match_set((TokenType.AUTO_INCREMENT, TokenType.IDENTITY)):
start = None
increment = None
if self._match(TokenType.L_PAREN, advance=False):
args = self._parse_wrapped_csv(self._parse_bitwise)
start = seq_get(args, 0)
increment = seq_get(args, 1)
elif self._match_text_seq("START"):
start = self._parse_bitwise()
self._match_text_seq("INCREMENT")
increment = self._parse_bitwise()
if start and increment:
kind = exp.GeneratedAsIdentityColumnConstraint(start=start, increment=increment)
else:
kind = exp.AutoIncrementColumnConstraint()
elif self._match(TokenType.CHECK):
constraint = self._parse_wrapped(self._parse_conjunction)
kind = self.expression(exp.CheckColumnConstraint, this=constraint)
@ -3294,8 +3318,8 @@ class Parser(metaclass=_Parser):
if not self._match(TokenType.EXCEPT):
return None
if self._match(TokenType.L_PAREN, advance=False):
return self._parse_wrapped_id_vars()
return self._parse_csv(self._parse_id_var)
return self._parse_wrapped_csv(self._parse_column)
return self._parse_csv(self._parse_column)
def _parse_replace(self) -> t.Optional[t.List[t.Optional[exp.Expression]]]:
if not self._match(TokenType.REPLACE):
@ -3442,7 +3466,7 @@ class Parser(metaclass=_Parser):
def _parse_alter(self) -> t.Optional[exp.Expression]:
if not self._match(TokenType.TABLE):
return None
return self._parse_as_command(self._prev)
exists = self._parse_exists()
this = self._parse_table(schema=True)