1
0
Fork 0

Merging upstream version 10.6.0.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-02-13 15:08:15 +01:00
parent fe1b1057f7
commit 2153103f81
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
36 changed files with 1007 additions and 270 deletions

View file

@ -22,6 +22,7 @@ class TokenType(AutoName):
DCOLON = auto()
SEMICOLON = auto()
STAR = auto()
BACKSLASH = auto()
SLASH = auto()
LT = auto()
LTE = auto()
@ -157,18 +158,14 @@ class TokenType(AutoName):
DELETE = auto()
DESC = auto()
DESCRIBE = auto()
DETERMINISTIC = auto()
DISTINCT = auto()
DISTINCT_FROM = auto()
DISTKEY = auto()
DISTRIBUTE_BY = auto()
DISTSTYLE = auto()
DIV = auto()
DROP = auto()
ELSE = auto()
ENCODE = auto()
END = auto()
ENGINE = auto()
ESCAPE = auto()
EXCEPT = auto()
EXECUTE = auto()
@ -182,10 +179,11 @@ class TokenType(AutoName):
FOR = auto()
FOREIGN_KEY = auto()
FORMAT = auto()
FROM = auto()
FULL = auto()
FUNCTION = auto()
FROM = auto()
GENERATED = auto()
GLOB = auto()
GLOBAL = auto()
GROUP_BY = auto()
GROUPING_SETS = auto()
@ -195,7 +193,6 @@ class TokenType(AutoName):
IF = auto()
IGNORE_NULLS = auto()
ILIKE = auto()
IMMUTABLE = auto()
IN = auto()
INDEX = auto()
INNER = auto()
@ -217,8 +214,8 @@ class TokenType(AutoName):
LIMIT = auto()
LOAD_DATA = auto()
LOCAL = auto()
LOCATION = auto()
MAP = auto()
MATCH_RECOGNIZE = auto()
MATERIALIZED = auto()
MERGE = auto()
MOD = auto()
@ -242,7 +239,6 @@ class TokenType(AutoName):
OVERWRITE = auto()
PARTITION = auto()
PARTITION_BY = auto()
PARTITIONED_BY = auto()
PERCENT = auto()
PIVOT = auto()
PLACEHOLDER = auto()
@ -258,7 +254,6 @@ class TokenType(AutoName):
REPLACE = auto()
RESPECT_NULLS = auto()
REFERENCES = auto()
RETURNS = auto()
RIGHT = auto()
RLIKE = auto()
ROLLBACK = auto()
@ -277,10 +272,7 @@ class TokenType(AutoName):
SOME = auto()
SORTKEY = auto()
SORT_BY = auto()
STABLE = auto()
STORED = auto()
STRUCT = auto()
TABLE_FORMAT = auto()
TABLE_SAMPLE = auto()
TEMPORARY = auto()
TOP = auto()
@ -414,6 +406,7 @@ class Tokenizer(metaclass=_Tokenizer):
"+": TokenType.PLUS,
";": TokenType.SEMICOLON,
"/": TokenType.SLASH,
"\\": TokenType.BACKSLASH,
"*": TokenType.STAR,
"~": TokenType.TILDA,
"?": TokenType.PLACEHOLDER,
@ -448,9 +441,11 @@ class Tokenizer(metaclass=_Tokenizer):
},
**{
f"{prefix}{key}": TokenType.BLOCK_END
for key in ("}}", "%}", "#}")
for key in ("%}", "#}")
for prefix in ("", "+", "-")
},
"+}}": TokenType.BLOCK_END,
"-}}": TokenType.BLOCK_END,
"/*+": TokenType.HINT,
"==": TokenType.EQ,
"::": TokenType.DCOLON,
@ -503,17 +498,13 @@ class Tokenizer(metaclass=_Tokenizer):
"DELETE": TokenType.DELETE,
"DESC": TokenType.DESC,
"DESCRIBE": TokenType.DESCRIBE,
"DETERMINISTIC": TokenType.DETERMINISTIC,
"DISTINCT": TokenType.DISTINCT,
"DISTINCT FROM": TokenType.DISTINCT_FROM,
"DISTKEY": TokenType.DISTKEY,
"DISTRIBUTE BY": TokenType.DISTRIBUTE_BY,
"DISTSTYLE": TokenType.DISTSTYLE,
"DIV": TokenType.DIV,
"DROP": TokenType.DROP,
"ELSE": TokenType.ELSE,
"END": TokenType.END,
"ENGINE": TokenType.ENGINE,
"ESCAPE": TokenType.ESCAPE,
"EXCEPT": TokenType.EXCEPT,
"EXECUTE": TokenType.EXECUTE,
@ -530,13 +521,13 @@ class Tokenizer(metaclass=_Tokenizer):
"FORMAT": TokenType.FORMAT,
"FROM": TokenType.FROM,
"GENERATED": TokenType.GENERATED,
"GLOB": TokenType.GLOB,
"GROUP BY": TokenType.GROUP_BY,
"GROUPING SETS": TokenType.GROUPING_SETS,
"HAVING": TokenType.HAVING,
"IDENTITY": TokenType.IDENTITY,
"IF": TokenType.IF,
"ILIKE": TokenType.ILIKE,
"IMMUTABLE": TokenType.IMMUTABLE,
"IGNORE NULLS": TokenType.IGNORE_NULLS,
"IN": TokenType.IN,
"INDEX": TokenType.INDEX,
@ -548,7 +539,6 @@ class Tokenizer(metaclass=_Tokenizer):
"IS": TokenType.IS,
"ISNULL": TokenType.ISNULL,
"JOIN": TokenType.JOIN,
"LANGUAGE": TokenType.LANGUAGE,
"LATERAL": TokenType.LATERAL,
"LAZY": TokenType.LAZY,
"LEADING": TokenType.LEADING,
@ -557,7 +547,6 @@ class Tokenizer(metaclass=_Tokenizer):
"LIMIT": TokenType.LIMIT,
"LOAD DATA": TokenType.LOAD_DATA,
"LOCAL": TokenType.LOCAL,
"LOCATION": TokenType.LOCATION,
"MATERIALIZED": TokenType.MATERIALIZED,
"MERGE": TokenType.MERGE,
"NATURAL": TokenType.NATURAL,
@ -582,8 +571,8 @@ class Tokenizer(metaclass=_Tokenizer):
"OVERWRITE": TokenType.OVERWRITE,
"PARTITION": TokenType.PARTITION,
"PARTITION BY": TokenType.PARTITION_BY,
"PARTITIONED BY": TokenType.PARTITIONED_BY,
"PARTITIONED_BY": TokenType.PARTITIONED_BY,
"PARTITIONED BY": TokenType.PARTITION_BY,
"PARTITIONED_BY": TokenType.PARTITION_BY,
"PERCENT": TokenType.PERCENT,
"PIVOT": TokenType.PIVOT,
"PRECEDING": TokenType.PRECEDING,
@ -596,7 +585,6 @@ class Tokenizer(metaclass=_Tokenizer):
"REPLACE": TokenType.REPLACE,
"RESPECT NULLS": TokenType.RESPECT_NULLS,
"REFERENCES": TokenType.REFERENCES,
"RETURNS": TokenType.RETURNS,
"RIGHT": TokenType.RIGHT,
"RLIKE": TokenType.RLIKE,
"ROLLBACK": TokenType.ROLLBACK,
@ -613,11 +601,7 @@ class Tokenizer(metaclass=_Tokenizer):
"SOME": TokenType.SOME,
"SORTKEY": TokenType.SORTKEY,
"SORT BY": TokenType.SORT_BY,
"STABLE": TokenType.STABLE,
"STORED": TokenType.STORED,
"TABLE": TokenType.TABLE,
"TABLE_FORMAT": TokenType.TABLE_FORMAT,
"TBLPROPERTIES": TokenType.PROPERTIES,
"TABLESAMPLE": TokenType.TABLE_SAMPLE,
"TEMP": TokenType.TEMPORARY,
"TEMPORARY": TokenType.TEMPORARY,