1
0
Fork 0

Merging upstream version 18.5.1.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-02-13 21:02:36 +01:00
parent ad94fdbf21
commit 11b24b93ea
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
67 changed files with 32690 additions and 32450 deletions

View file

@ -820,7 +820,9 @@ class Parser(metaclass=_Parser):
SHOW_PARSERS: t.Dict[str, t.Callable] = {}
TYPE_LITERAL_PARSERS: t.Dict[exp.DataType.Type, t.Callable] = {}
TYPE_LITERAL_PARSERS = {
exp.DataType.Type.JSON: lambda self, this, _: self.expression(exp.ParseJSON, this=this),
}
MODIFIABLES = (exp.Subquery, exp.Subqueryable, exp.Table)
@ -848,6 +850,8 @@ class Parser(metaclass=_Parser):
WINDOW_BEFORE_PAREN_TOKENS = {TokenType.OVER}
WINDOW_SIDES = {"FOLLOWING", "PRECEDING"}
FETCH_TOKENS = ID_VAR_TOKENS - {TokenType.ROW, TokenType.ROWS, TokenType.PERCENT}
ADD_CONSTRAINT_TOKENS = {TokenType.CONSTRAINT, TokenType.PRIMARY_KEY, TokenType.FOREIGN_KEY}
DISTINCT_TOKENS = {TokenType.DISTINCT}
@ -863,8 +867,6 @@ class Parser(metaclass=_Parser):
LOG_BASE_FIRST = True
LOG_DEFAULTS_TO_LN = False
SUPPORTS_USER_DEFINED_TYPES = True
# Whether or not ADD is present for each column added by ALTER TABLE
ALTER_TABLE_ADD_COLUMN_KEYWORD = True
@ -892,6 +894,7 @@ class Parser(metaclass=_Parser):
UNNEST_COLUMN_ONLY: bool = False
ALIAS_POST_TABLESAMPLE: bool = False
STRICT_STRING_CONCAT = False
SUPPORTS_USER_DEFINED_TYPES = True
NORMALIZE_FUNCTIONS = "upper"
NULL_ORDERING: str = "nulls_are_small"
SHOW_TRIE: t.Dict = {}
@ -2692,7 +2695,7 @@ class Parser(metaclass=_Parser):
expressions = self._parse_csv(self._parse_primary)
else:
expressions = None
num = self._parse_number()
num = self._parse_primary()
if self._match_text_seq("BUCKET"):
bucket_numerator = self._parse_number()
@ -2914,6 +2917,10 @@ class Parser(metaclass=_Parser):
)
connect = self._parse_conjunction()
self.NO_PAREN_FUNCTION_PARSERS.pop("PRIOR")
if not start and self._match(TokenType.START_WITH):
start = self._parse_conjunction()
return self.expression(exp.Connect, start=start, connect=connect)
def _parse_order(
@ -2985,7 +2992,7 @@ class Parser(metaclass=_Parser):
direction = self._match_set((TokenType.FIRST, TokenType.NEXT))
direction = self._prev.text if direction else "FIRST"
count = self._parse_number()
count = self._parse_field(tokens=self.FETCH_TOKENS)
percent = self._match(TokenType.PERCENT)
self._match_set((TokenType.ROW, TokenType.ROWS))
@ -3272,7 +3279,12 @@ class Parser(metaclass=_Parser):
if tokens[0].token_type in self.TYPE_TOKENS:
self._prev = tokens[0]
elif self.SUPPORTS_USER_DEFINED_TYPES:
return exp.DataType.build(identifier.name, udt=True)
type_name = identifier.name
while self._match(TokenType.DOT):
type_name = f"{type_name}.{self._advance_any() and self._prev.text}"
return exp.DataType.build(type_name, udt=True)
else:
return None
else:
@ -3816,7 +3828,9 @@ class Parser(metaclass=_Parser):
def _parse_unique(self) -> exp.UniqueColumnConstraint:
self._match_text_seq("KEY")
return self.expression(
exp.UniqueColumnConstraint, this=self._parse_schema(self._parse_id_var(any_token=False))
exp.UniqueColumnConstraint,
this=self._parse_schema(self._parse_id_var(any_token=False)),
index_type=self._match(TokenType.USING) and self._advance_any() and self._prev.text,
)
def _parse_key_constraint_options(self) -> t.List[str]: