1
0
Fork 0

Merging upstream version 10.0.8.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-02-13 14:54:32 +01:00
parent 407314e8d2
commit efc1e37108
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
67 changed files with 2461 additions and 840 deletions

View file

@ -105,12 +105,9 @@ class TokenType(AutoName):
OBJECT = auto()
# keywords
ADD_FILE = auto()
ALIAS = auto()
ALWAYS = auto()
ALL = auto()
ALTER = auto()
ANALYZE = auto()
ANTI = auto()
ANY = auto()
APPLY = auto()
@ -124,14 +121,14 @@ class TokenType(AutoName):
BUCKET = auto()
BY_DEFAULT = auto()
CACHE = auto()
CALL = auto()
CASCADE = auto()
CASE = auto()
CHARACTER_SET = auto()
CHECK = auto()
CLUSTER_BY = auto()
COLLATE = auto()
COMMAND = auto()
COMMENT = auto()
COMMENT_ON = auto()
COMMIT = auto()
CONSTRAINT = auto()
CREATE = auto()
@ -149,7 +146,9 @@ class TokenType(AutoName):
DETERMINISTIC = auto()
DISTINCT = auto()
DISTINCT_FROM = auto()
DISTKEY = auto()
DISTRIBUTE_BY = auto()
DISTSTYLE = auto()
DIV = auto()
DROP = auto()
ELSE = auto()
@ -159,7 +158,6 @@ class TokenType(AutoName):
EXCEPT = auto()
EXECUTE = auto()
EXISTS = auto()
EXPLAIN = auto()
FALSE = auto()
FETCH = auto()
FILTER = auto()
@ -216,7 +214,6 @@ class TokenType(AutoName):
OFFSET = auto()
ON = auto()
ONLY = auto()
OPTIMIZE = auto()
OPTIONS = auto()
ORDER_BY = auto()
ORDERED = auto()
@ -258,6 +255,7 @@ class TokenType(AutoName):
SHOW = auto()
SIMILAR_TO = auto()
SOME = auto()
SORTKEY = auto()
SORT_BY = auto()
STABLE = auto()
STORED = auto()
@ -268,9 +266,8 @@ class TokenType(AutoName):
TRANSIENT = auto()
TOP = auto()
THEN = auto()
TRUE = auto()
TRAILING = auto()
TRUNCATE = auto()
TRUE = auto()
UNBOUNDED = auto()
UNCACHE = auto()
UNION = auto()
@ -280,7 +277,6 @@ class TokenType(AutoName):
USE = auto()
USING = auto()
VALUES = auto()
VACUUM = auto()
VIEW = auto()
VOLATILE = auto()
WHEN = auto()
@ -420,7 +416,6 @@ class Tokenizer(metaclass=_Tokenizer):
KEYWORDS = {
"/*+": TokenType.HINT,
"*/": TokenType.HINT,
"==": TokenType.EQ,
"::": TokenType.DCOLON,
"||": TokenType.DPIPE,
@ -435,15 +430,7 @@ class Tokenizer(metaclass=_Tokenizer):
"#>": TokenType.HASH_ARROW,
"#>>": TokenType.DHASH_ARROW,
"<->": TokenType.LR_ARROW,
"ADD ARCHIVE": TokenType.ADD_FILE,
"ADD ARCHIVES": TokenType.ADD_FILE,
"ADD FILE": TokenType.ADD_FILE,
"ADD FILES": TokenType.ADD_FILE,
"ADD JAR": TokenType.ADD_FILE,
"ADD JARS": TokenType.ADD_FILE,
"ALL": TokenType.ALL,
"ALTER": TokenType.ALTER,
"ANALYZE": TokenType.ANALYZE,
"AND": TokenType.AND,
"ANTI": TokenType.ANTI,
"ANY": TokenType.ANY,
@ -455,10 +442,10 @@ class Tokenizer(metaclass=_Tokenizer):
"BETWEEN": TokenType.BETWEEN,
"BOTH": TokenType.BOTH,
"BUCKET": TokenType.BUCKET,
"CALL": TokenType.CALL,
"CACHE": TokenType.CACHE,
"UNCACHE": TokenType.UNCACHE,
"CASE": TokenType.CASE,
"CASCADE": TokenType.CASCADE,
"CHARACTER SET": TokenType.CHARACTER_SET,
"CHECK": TokenType.CHECK,
"CLUSTER BY": TokenType.CLUSTER_BY,
@ -479,7 +466,9 @@ class Tokenizer(metaclass=_Tokenizer):
"DETERMINISTIC": TokenType.DETERMINISTIC,
"DISTINCT": TokenType.DISTINCT,
"DISTINCT FROM": TokenType.DISTINCT_FROM,
"DISTKEY": TokenType.DISTKEY,
"DISTRIBUTE BY": TokenType.DISTRIBUTE_BY,
"DISTSTYLE": TokenType.DISTSTYLE,
"DIV": TokenType.DIV,
"DROP": TokenType.DROP,
"ELSE": TokenType.ELSE,
@ -489,7 +478,6 @@ class Tokenizer(metaclass=_Tokenizer):
"EXCEPT": TokenType.EXCEPT,
"EXECUTE": TokenType.EXECUTE,
"EXISTS": TokenType.EXISTS,
"EXPLAIN": TokenType.EXPLAIN,
"FALSE": TokenType.FALSE,
"FETCH": TokenType.FETCH,
"FILTER": TokenType.FILTER,
@ -541,7 +529,6 @@ class Tokenizer(metaclass=_Tokenizer):
"OFFSET": TokenType.OFFSET,
"ON": TokenType.ON,
"ONLY": TokenType.ONLY,
"OPTIMIZE": TokenType.OPTIMIZE,
"OPTIONS": TokenType.OPTIONS,
"OR": TokenType.OR,
"ORDER BY": TokenType.ORDER_BY,
@ -579,6 +566,7 @@ class Tokenizer(metaclass=_Tokenizer):
"SET": TokenType.SET,
"SHOW": TokenType.SHOW,
"SOME": TokenType.SOME,
"SORTKEY": TokenType.SORTKEY,
"SORT BY": TokenType.SORT_BY,
"STABLE": TokenType.STABLE,
"STORED": TokenType.STORED,
@ -592,7 +580,6 @@ class Tokenizer(metaclass=_Tokenizer):
"THEN": TokenType.THEN,
"TRUE": TokenType.TRUE,
"TRAILING": TokenType.TRAILING,
"TRUNCATE": TokenType.TRUNCATE,
"UNBOUNDED": TokenType.UNBOUNDED,
"UNION": TokenType.UNION,
"UNPIVOT": TokenType.UNPIVOT,
@ -600,7 +587,6 @@ class Tokenizer(metaclass=_Tokenizer):
"UPDATE": TokenType.UPDATE,
"USE": TokenType.USE,
"USING": TokenType.USING,
"VACUUM": TokenType.VACUUM,
"VALUES": TokenType.VALUES,
"VIEW": TokenType.VIEW,
"VOLATILE": TokenType.VOLATILE,
@ -659,6 +645,14 @@ class Tokenizer(metaclass=_Tokenizer):
"UNIQUE": TokenType.UNIQUE,
"STRUCT": TokenType.STRUCT,
"VARIANT": TokenType.VARIANT,
"ALTER": TokenType.COMMAND,
"ANALYZE": TokenType.COMMAND,
"CALL": TokenType.COMMAND,
"EXPLAIN": TokenType.COMMAND,
"OPTIMIZE": TokenType.COMMAND,
"PREPARE": TokenType.COMMAND,
"TRUNCATE": TokenType.COMMAND,
"VACUUM": TokenType.COMMAND,
}
WHITE_SPACE = {
@ -670,20 +664,11 @@ class Tokenizer(metaclass=_Tokenizer):
}
COMMANDS = {
TokenType.ALTER,
TokenType.ADD_FILE,
TokenType.ANALYZE,
TokenType.BEGIN,
TokenType.CALL,
TokenType.COMMENT_ON,
TokenType.COMMIT,
TokenType.EXPLAIN,
TokenType.OPTIMIZE,
TokenType.COMMAND,
TokenType.EXECUTE,
TokenType.FETCH,
TokenType.SET,
TokenType.SHOW,
TokenType.TRUNCATE,
TokenType.VACUUM,
TokenType.ROLLBACK,
}
# handle numeric literals like in hive (3L = BIGINT)
@ -885,6 +870,7 @@ class Tokenizer(metaclass=_Tokenizer):
if comment_start_line == self._prev_token_line:
if self._prev_token_comment is None:
self.tokens[-1].comment = self._comment
self._prev_token_comment = self._comment
self._comment = None