Merging upstream version 7.1.3.
Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
parent
964bd62de9
commit
e6b3d2fe54
42 changed files with 1430 additions and 253 deletions
|
@ -123,6 +123,7 @@ class TokenType(AutoName):
|
|||
CLUSTER_BY = auto()
|
||||
COLLATE = auto()
|
||||
COMMENT = auto()
|
||||
COMMENT_ON = auto()
|
||||
COMMIT = auto()
|
||||
CONSTRAINT = auto()
|
||||
CREATE = auto()
|
||||
|
@ -133,13 +134,14 @@ class TokenType(AutoName):
|
|||
CURRENT_ROW = auto()
|
||||
CURRENT_TIME = auto()
|
||||
CURRENT_TIMESTAMP = auto()
|
||||
DIV = auto()
|
||||
DEFAULT = auto()
|
||||
DELETE = auto()
|
||||
DESC = auto()
|
||||
DESCRIBE = auto()
|
||||
DETERMINISTIC = auto()
|
||||
DISTINCT = auto()
|
||||
DISTRIBUTE_BY = auto()
|
||||
DIV = auto()
|
||||
DROP = auto()
|
||||
ELSE = auto()
|
||||
END = auto()
|
||||
|
@ -189,6 +191,8 @@ class TokenType(AutoName):
|
|||
LEFT = auto()
|
||||
LIKE = auto()
|
||||
LIMIT = auto()
|
||||
LOAD_DATA = auto()
|
||||
LOCAL = auto()
|
||||
LOCATION = auto()
|
||||
MAP = auto()
|
||||
MATERIALIZED = auto()
|
||||
|
@ -196,6 +200,7 @@ class TokenType(AutoName):
|
|||
NATURAL = auto()
|
||||
NEXT = auto()
|
||||
NO_ACTION = auto()
|
||||
NOTNULL = auto()
|
||||
NULL = auto()
|
||||
NULLS_FIRST = auto()
|
||||
NULLS_LAST = auto()
|
||||
|
@ -436,13 +441,14 @@ class Tokenizer(metaclass=_Tokenizer):
|
|||
"CURRENT_DATE": TokenType.CURRENT_DATE,
|
||||
"CURRENT ROW": TokenType.CURRENT_ROW,
|
||||
"CURRENT_TIMESTAMP": TokenType.CURRENT_TIMESTAMP,
|
||||
"DIV": TokenType.DIV,
|
||||
"DEFAULT": TokenType.DEFAULT,
|
||||
"DELETE": TokenType.DELETE,
|
||||
"DESC": TokenType.DESC,
|
||||
"DESCRIBE": TokenType.DESCRIBE,
|
||||
"DETERMINISTIC": TokenType.DETERMINISTIC,
|
||||
"DISTINCT": TokenType.DISTINCT,
|
||||
"DISTRIBUTE BY": TokenType.DISTRIBUTE_BY,
|
||||
"DIV": TokenType.DIV,
|
||||
"DROP": TokenType.DROP,
|
||||
"ELSE": TokenType.ELSE,
|
||||
"END": TokenType.END,
|
||||
|
@ -487,12 +493,15 @@ class Tokenizer(metaclass=_Tokenizer):
|
|||
"LEFT": TokenType.LEFT,
|
||||
"LIKE": TokenType.LIKE,
|
||||
"LIMIT": TokenType.LIMIT,
|
||||
"LOAD DATA": TokenType.LOAD_DATA,
|
||||
"LOCAL": TokenType.LOCAL,
|
||||
"LOCATION": TokenType.LOCATION,
|
||||
"MATERIALIZED": TokenType.MATERIALIZED,
|
||||
"NATURAL": TokenType.NATURAL,
|
||||
"NEXT": TokenType.NEXT,
|
||||
"NO ACTION": TokenType.NO_ACTION,
|
||||
"NOT": TokenType.NOT,
|
||||
"NOTNULL": TokenType.NOTNULL,
|
||||
"NULL": TokenType.NULL,
|
||||
"NULLS FIRST": TokenType.NULLS_FIRST,
|
||||
"NULLS LAST": TokenType.NULLS_LAST,
|
||||
|
@ -530,6 +539,7 @@ class Tokenizer(metaclass=_Tokenizer):
|
|||
"ROLLUP": TokenType.ROLLUP,
|
||||
"ROW": TokenType.ROW,
|
||||
"ROWS": TokenType.ROWS,
|
||||
"SCHEMA": TokenType.SCHEMA,
|
||||
"SEED": TokenType.SEED,
|
||||
"SELECT": TokenType.SELECT,
|
||||
"SEMI": TokenType.SEMI,
|
||||
|
@ -629,6 +639,7 @@ class Tokenizer(metaclass=_Tokenizer):
|
|||
TokenType.ANALYZE,
|
||||
TokenType.BEGIN,
|
||||
TokenType.CALL,
|
||||
TokenType.COMMENT_ON,
|
||||
TokenType.COMMIT,
|
||||
TokenType.EXPLAIN,
|
||||
TokenType.OPTIMIZE,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue