1
0
Fork 0

Merging upstream version 11.2.3.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-02-13 15:40:23 +01:00
parent c6f7c6bbe1
commit 428b7dd76f
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
93 changed files with 33054 additions and 31671 deletions

View file

@ -144,6 +144,7 @@ class Parser(metaclass=_Parser):
TokenType.IMAGE,
TokenType.VARIANT,
TokenType.OBJECT,
TokenType.INET,
*NESTED_TYPE_TOKENS,
}
@ -509,73 +510,82 @@ class Parser(metaclass=_Parser):
}
PROPERTY_PARSERS = {
"AFTER": lambda self: self._parse_afterjournal(
no=self._prev.text.upper() == "NO", dual=self._prev.text.upper() == "DUAL"
),
"ALGORITHM": lambda self: self._parse_property_assignment(exp.AlgorithmProperty),
"AUTO_INCREMENT": lambda self: self._parse_property_assignment(exp.AutoIncrementProperty),
"BEFORE": lambda self: self._parse_journal(
no=self._prev.text.upper() == "NO", dual=self._prev.text.upper() == "DUAL"
),
"BLOCKCOMPRESSION": lambda self: self._parse_blockcompression(),
"CHARACTER SET": lambda self: self._parse_character_set(),
"CHECKSUM": lambda self: self._parse_checksum(),
"CLUSTER BY": lambda self: self.expression(
exp.Cluster, expressions=self._parse_csv(self._parse_ordered)
),
"LOCATION": lambda self: self._parse_property_assignment(exp.LocationProperty),
"PARTITION BY": lambda self: self._parse_partitioned_by(),
"PARTITIONED BY": lambda self: self._parse_partitioned_by(),
"PARTITIONED_BY": lambda self: self._parse_partitioned_by(),
"COMMENT": lambda self: self._parse_property_assignment(exp.SchemaCommentProperty),
"STORED": lambda self: self._parse_property_assignment(exp.FileFormatProperty),
"DISTKEY": lambda self: self._parse_distkey(),
"DISTSTYLE": lambda self: self._parse_property_assignment(exp.DistStyleProperty),
"SORTKEY": lambda self: self._parse_sortkey(),
"LIKE": lambda self: self._parse_create_like(),
"RETURNS": lambda self: self._parse_returns(),
"ROW": lambda self: self._parse_row(),
"COLLATE": lambda self: self._parse_property_assignment(exp.CollateProperty),
"FORMAT": lambda self: self._parse_property_assignment(exp.FileFormatProperty),
"TABLE_FORMAT": lambda self: self._parse_property_assignment(exp.TableFormatProperty),
"USING": lambda self: self._parse_property_assignment(exp.TableFormatProperty),
"LANGUAGE": lambda self: self._parse_property_assignment(exp.LanguageProperty),
"EXECUTE": lambda self: self._parse_property_assignment(exp.ExecuteAsProperty),
"COMMENT": lambda self: self._parse_property_assignment(exp.SchemaCommentProperty),
"DATABLOCKSIZE": lambda self: self._parse_datablocksize(
default=self._prev.text.upper() == "DEFAULT"
),
"DEFINER": lambda self: self._parse_definer(),
"DETERMINISTIC": lambda self: self.expression(
exp.VolatilityProperty, this=exp.Literal.string("IMMUTABLE")
),
"DISTKEY": lambda self: self._parse_distkey(),
"DISTSTYLE": lambda self: self._parse_property_assignment(exp.DistStyleProperty),
"EXECUTE": lambda self: self._parse_property_assignment(exp.ExecuteAsProperty),
"EXTERNAL": lambda self: self.expression(exp.ExternalProperty),
"FALLBACK": lambda self: self._parse_fallback(no=self._prev.text.upper() == "NO"),
"FORMAT": lambda self: self._parse_property_assignment(exp.FileFormatProperty),
"FREESPACE": lambda self: self._parse_freespace(),
"GLOBAL": lambda self: self._parse_temporary(global_=True),
"IMMUTABLE": lambda self: self.expression(
exp.VolatilityProperty, this=exp.Literal.string("IMMUTABLE")
),
"STABLE": lambda self: self.expression(
exp.VolatilityProperty, this=exp.Literal.string("STABLE")
),
"VOLATILE": lambda self: self.expression(
exp.VolatilityProperty, this=exp.Literal.string("VOLATILE")
),
"WITH": lambda self: self._parse_with_property(),
"TBLPROPERTIES": lambda self: self._parse_wrapped_csv(self._parse_property),
"FALLBACK": lambda self: self._parse_fallback(no=self._prev.text.upper() == "NO"),
"LOG": lambda self: self._parse_log(no=self._prev.text.upper() == "NO"),
"BEFORE": lambda self: self._parse_journal(
no=self._prev.text.upper() == "NO", dual=self._prev.text.upper() == "DUAL"
),
"JOURNAL": lambda self: self._parse_journal(
no=self._prev.text.upper() == "NO", dual=self._prev.text.upper() == "DUAL"
),
"AFTER": lambda self: self._parse_afterjournal(
no=self._prev.text.upper() == "NO", dual=self._prev.text.upper() == "DUAL"
),
"LANGUAGE": lambda self: self._parse_property_assignment(exp.LanguageProperty),
"LIKE": lambda self: self._parse_create_like(),
"LOCAL": lambda self: self._parse_afterjournal(no=False, dual=False, local=True),
"NOT": lambda self: self._parse_afterjournal(no=False, dual=False, local=False),
"CHECKSUM": lambda self: self._parse_checksum(),
"FREESPACE": lambda self: self._parse_freespace(),
"LOCATION": lambda self: self._parse_property_assignment(exp.LocationProperty),
"LOCK": lambda self: self._parse_locking(),
"LOCKING": lambda self: self._parse_locking(),
"LOG": lambda self: self._parse_log(no=self._prev.text.upper() == "NO"),
"MATERIALIZED": lambda self: self.expression(exp.MaterializedProperty),
"MAX": lambda self: self._parse_datablocksize(),
"MAXIMUM": lambda self: self._parse_datablocksize(),
"MERGEBLOCKRATIO": lambda self: self._parse_mergeblockratio(
no=self._prev.text.upper() == "NO", default=self._prev.text.upper() == "DEFAULT"
),
"MIN": lambda self: self._parse_datablocksize(),
"MINIMUM": lambda self: self._parse_datablocksize(),
"MAX": lambda self: self._parse_datablocksize(),
"MAXIMUM": lambda self: self._parse_datablocksize(),
"DATABLOCKSIZE": lambda self: self._parse_datablocksize(
default=self._prev.text.upper() == "DEFAULT"
"MULTISET": lambda self: self.expression(exp.SetProperty, multi=True),
"NO": lambda self: self._parse_noprimaryindex(),
"NOT": lambda self: self._parse_afterjournal(no=False, dual=False, local=False),
"ON": lambda self: self._parse_oncommit(),
"PARTITION BY": lambda self: self._parse_partitioned_by(),
"PARTITIONED BY": lambda self: self._parse_partitioned_by(),
"PARTITIONED_BY": lambda self: self._parse_partitioned_by(),
"RETURNS": lambda self: self._parse_returns(),
"ROW": lambda self: self._parse_row(),
"SET": lambda self: self.expression(exp.SetProperty, multi=False),
"SORTKEY": lambda self: self._parse_sortkey(),
"STABLE": lambda self: self.expression(
exp.VolatilityProperty, this=exp.Literal.string("STABLE")
),
"BLOCKCOMPRESSION": lambda self: self._parse_blockcompression(),
"ALGORITHM": lambda self: self._parse_property_assignment(exp.AlgorithmProperty),
"DEFINER": lambda self: self._parse_definer(),
"LOCK": lambda self: self._parse_locking(),
"LOCKING": lambda self: self._parse_locking(),
"STORED": lambda self: self._parse_property_assignment(exp.FileFormatProperty),
"TABLE_FORMAT": lambda self: self._parse_property_assignment(exp.TableFormatProperty),
"TBLPROPERTIES": lambda self: self._parse_wrapped_csv(self._parse_property),
"TEMPORARY": lambda self: self._parse_temporary(global_=False),
"TRANSIENT": lambda self: self.expression(exp.TransientProperty),
"USING": lambda self: self._parse_property_assignment(exp.TableFormatProperty),
"VOLATILE": lambda self: self.expression(
exp.VolatilityProperty, this=exp.Literal.string("VOLATILE")
),
"WITH": lambda self: self._parse_with_property(),
}
CONSTRAINT_PARSERS = {
@ -979,15 +989,7 @@ class Parser(metaclass=_Parser):
replace = self._prev.text.upper() == "REPLACE" or self._match_pair(
TokenType.OR, TokenType.REPLACE
)
set_ = self._match(TokenType.SET) # Teradata
multiset = self._match_text_seq("MULTISET") # Teradata
global_temporary = self._match_text_seq("GLOBAL", "TEMPORARY") # Teradata
volatile = self._match(TokenType.VOLATILE) # Teradata
temporary = self._match(TokenType.TEMPORARY)
transient = self._match_text_seq("TRANSIENT")
external = self._match_text_seq("EXTERNAL")
unique = self._match(TokenType.UNIQUE)
materialized = self._match(TokenType.MATERIALIZED)
if self._match_pair(TokenType.TABLE, TokenType.FUNCTION, advance=False):
self._match(TokenType.TABLE)
@ -1005,16 +1007,17 @@ class Parser(metaclass=_Parser):
exists = self._parse_exists(not_=True)
this = None
expression = None
data = None
statistics = None
no_primary_index = None
indexes = None
no_schema_binding = None
begin = None
if create_token.token_type in (TokenType.FUNCTION, TokenType.PROCEDURE):
this = self._parse_user_defined_function(kind=create_token.token_type)
properties = self._parse_properties()
temp_properties = self._parse_properties()
if properties and temp_properties:
properties.expressions.extend(temp_properties.expressions)
elif temp_properties:
properties = temp_properties
self._match(TokenType.ALIAS)
begin = self._match(TokenType.BEGIN)
@ -1036,7 +1039,7 @@ class Parser(metaclass=_Parser):
if self._match(TokenType.COMMA):
temp_properties = self._parse_properties(before=True)
if properties and temp_properties:
properties.expressions.append(temp_properties.expressions)
properties.expressions.extend(temp_properties.expressions)
elif temp_properties:
properties = temp_properties
@ -1045,7 +1048,7 @@ class Parser(metaclass=_Parser):
# exp.Properties.Location.POST_SCHEMA and POST_WITH
temp_properties = self._parse_properties()
if properties and temp_properties:
properties.expressions.append(temp_properties.expressions)
properties.expressions.extend(temp_properties.expressions)
elif temp_properties:
properties = temp_properties
@ -1059,24 +1062,19 @@ class Parser(metaclass=_Parser):
):
temp_properties = self._parse_properties()
if properties and temp_properties:
properties.expressions.append(temp_properties.expressions)
properties.expressions.extend(temp_properties.expressions)
elif temp_properties:
properties = temp_properties
expression = self._parse_ddl_select()
if create_token.token_type == TokenType.TABLE:
if self._match_text_seq("WITH", "DATA"):
data = True
elif self._match_text_seq("WITH", "NO", "DATA"):
data = False
if self._match_text_seq("AND", "STATISTICS"):
statistics = True
elif self._match_text_seq("AND", "NO", "STATISTICS"):
statistics = False
no_primary_index = self._match_text_seq("NO", "PRIMARY", "INDEX")
# exp.Properties.Location.POST_EXPRESSION
temp_properties = self._parse_properties()
if properties and temp_properties:
properties.expressions.extend(temp_properties.expressions)
elif temp_properties:
properties = temp_properties
indexes = []
while True:
@ -1086,7 +1084,7 @@ class Parser(metaclass=_Parser):
if self._match(TokenType.PARTITION_BY, advance=False):
temp_properties = self._parse_properties()
if properties and temp_properties:
properties.expressions.append(temp_properties.expressions)
properties.expressions.extend(temp_properties.expressions)
elif temp_properties:
properties = temp_properties
@ -1102,22 +1100,11 @@ class Parser(metaclass=_Parser):
exp.Create,
this=this,
kind=create_token.text,
unique=unique,
expression=expression,
set=set_,
multiset=multiset,
global_temporary=global_temporary,
volatile=volatile,
exists=exists,
properties=properties,
temporary=temporary,
transient=transient,
external=external,
replace=replace,
unique=unique,
materialized=materialized,
data=data,
statistics=statistics,
no_primary_index=no_primary_index,
indexes=indexes,
no_schema_binding=no_schema_binding,
begin=begin,
@ -1196,15 +1183,21 @@ class Parser(metaclass=_Parser):
def _parse_with_property(
self,
) -> t.Union[t.Optional[exp.Expression], t.List[t.Optional[exp.Expression]]]:
self._match(TokenType.WITH)
if self._match(TokenType.L_PAREN, advance=False):
return self._parse_wrapped_csv(self._parse_property)
if self._match_text_seq("JOURNAL"):
return self._parse_withjournaltable()
if self._match_text_seq("DATA"):
return self._parse_withdata(no=False)
elif self._match_text_seq("NO", "DATA"):
return self._parse_withdata(no=True)
if not self._next:
return None
if self._next.text.upper() == "JOURNAL":
return self._parse_withjournaltable()
return self._parse_withisolatedloading()
# https://dev.mysql.com/doc/refman/8.0/en/create-view.html
@ -1221,7 +1214,7 @@ class Parser(metaclass=_Parser):
return exp.DefinerProperty(this=f"{user}@{host}")
def _parse_withjournaltable(self) -> exp.Expression:
self._match_text_seq("WITH", "JOURNAL", "TABLE")
self._match(TokenType.TABLE)
self._match(TokenType.EQ)
return self.expression(exp.WithJournalTableProperty, this=self._parse_table_parts())
@ -1319,7 +1312,6 @@ class Parser(metaclass=_Parser):
)
def _parse_withisolatedloading(self) -> exp.Expression:
self._match(TokenType.WITH)
no = self._match_text_seq("NO")
concurrent = self._match_text_seq("CONCURRENT")
self._match_text_seq("ISOLATED", "LOADING")
@ -1397,6 +1389,24 @@ class Parser(metaclass=_Parser):
this=self._parse_schema() or self._parse_bracket(self._parse_field()),
)
def _parse_withdata(self, no=False) -> exp.Expression:
if self._match_text_seq("AND", "STATISTICS"):
statistics = True
elif self._match_text_seq("AND", "NO", "STATISTICS"):
statistics = False
else:
statistics = None
return self.expression(exp.WithDataProperty, no=no, statistics=statistics)
def _parse_noprimaryindex(self) -> exp.Expression:
self._match_text_seq("PRIMARY", "INDEX")
return exp.NoPrimaryIndexProperty()
def _parse_oncommit(self) -> exp.Expression:
self._match_text_seq("COMMIT", "PRESERVE", "ROWS")
return exp.OnCommitProperty()
def _parse_distkey(self) -> exp.Expression:
return self.expression(exp.DistKeyProperty, this=self._parse_wrapped(self._parse_id_var))
@ -1450,6 +1460,10 @@ class Parser(metaclass=_Parser):
return self.expression(exp.ReturnsProperty, this=value, is_table=is_table)
def _parse_temporary(self, global_=False) -> exp.Expression:
self._match(TokenType.TEMPORARY) # in case calling from "GLOBAL"
return self.expression(exp.TemporaryProperty, global_=global_)
def _parse_describe(self) -> exp.Expression:
kind = self._match_set(self.CREATABLES) and self._prev.text
this = self._parse_table()
@ -2042,6 +2056,9 @@ class Parser(metaclass=_Parser):
if alias:
this.set("alias", alias)
if not this.args.get("pivots"):
this.set("pivots", self._parse_pivots())
if self._match_pair(TokenType.WITH, TokenType.L_PAREN):
this.set(
"hints",
@ -2182,7 +2199,12 @@ class Parser(metaclass=_Parser):
self._match_r_paren()
return self.expression(exp.Pivot, expressions=expressions, field=field, unpivot=unpivot)
pivot = self.expression(exp.Pivot, expressions=expressions, field=field, unpivot=unpivot)
if not self._match_set((TokenType.PIVOT, TokenType.UNPIVOT), advance=False):
pivot.set("alias", self._parse_table_alias())
return pivot
def _parse_where(self, skip_where_token: bool = False) -> t.Optional[exp.Expression]:
if not skip_where_token and not self._match(TokenType.WHERE):
@ -3783,12 +3805,13 @@ class Parser(metaclass=_Parser):
return None
def _match_set(self, types):
def _match_set(self, types, advance=True):
if not self._curr:
return None
if self._curr.token_type in types:
self._advance()
if advance:
self._advance()
return True
return None
@ -3816,9 +3839,10 @@ class Parser(metaclass=_Parser):
if expression and self._prev_comments:
expression.comments = self._prev_comments
def _match_texts(self, texts):
def _match_texts(self, texts, advance=True):
if self._curr and self._curr.text.upper() in texts:
self._advance()
if advance:
self._advance()
return True
return False