1
0
Fork 0

Merging upstream version 17.12.0.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-02-13 20:55:29 +01:00
parent aa315e6009
commit aae08e0bb3
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
64 changed files with 12465 additions and 11885 deletions
sqlglot

View file

@ -110,6 +110,7 @@ class TokenType(AutoName):
JSON = auto()
JSONB = auto()
TIME = auto()
TIMETZ = auto()
TIMESTAMP = auto()
TIMESTAMPTZ = auto()
TIMESTAMPLTZ = auto()
@ -151,6 +152,11 @@ class TokenType(AutoName):
IPADDRESS = auto()
IPPREFIX = auto()
ENUM = auto()
ENUM8 = auto()
ENUM16 = auto()
FIXEDSTRING = auto()
LOWCARDINALITY = auto()
NESTED = auto()
# keywords
ALIAS = auto()
@ -659,6 +665,7 @@ class Tokenizer(metaclass=_Tokenizer):
"TINYINT": TokenType.TINYINT,
"SHORT": TokenType.SMALLINT,
"SMALLINT": TokenType.SMALLINT,
"INT128": TokenType.INT128,
"INT2": TokenType.SMALLINT,
"INTEGER": TokenType.INT,
"INT": TokenType.INT,
@ -699,6 +706,7 @@ class Tokenizer(metaclass=_Tokenizer):
"BYTEA": TokenType.VARBINARY,
"VARBINARY": TokenType.VARBINARY,
"TIME": TokenType.TIME,
"TIMETZ": TokenType.TIMETZ,
"TIMESTAMP": TokenType.TIMESTAMP,
"TIMESTAMPTZ": TokenType.TIMESTAMPTZ,
"TIMESTAMPLTZ": TokenType.TIMESTAMPLTZ,
@ -879,6 +887,11 @@ class Tokenizer(metaclass=_Tokenizer):
def _add(self, token_type: TokenType, text: t.Optional[str] = None) -> None:
self._prev_token_line = self._line
if self._comments and token_type == TokenType.SEMICOLON and self.tokens:
self.tokens[-1].comments.extend(self._comments)
self._comments = []
self.tokens.append(
Token(
token_type,