1
0
Fork 0

Merging upstream version 11.5.2.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-02-13 15:50:57 +01:00
parent b9525af810
commit 9782c88c58
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
106 changed files with 25262 additions and 24200 deletions

View file

@ -163,6 +163,7 @@ class TokenType(AutoName):
CURRENT_ROW = auto()
CURRENT_TIME = auto()
CURRENT_TIMESTAMP = auto()
CURRENT_USER = auto()
DEFAULT = auto()
DELETE = auto()
DESC = auto()
@ -506,6 +507,7 @@ class Tokenizer(metaclass=_Tokenizer):
"CURRENT ROW": TokenType.CURRENT_ROW,
"CURRENT_TIME": TokenType.CURRENT_TIME,
"CURRENT_TIMESTAMP": TokenType.CURRENT_TIMESTAMP,
"CURRENT_USER": TokenType.CURRENT_USER,
"DATABASE": TokenType.DATABASE,
"DEFAULT": TokenType.DEFAULT,
"DELETE": TokenType.DELETE,
@ -908,7 +910,7 @@ class Tokenizer(metaclass=_Tokenizer):
if not word:
if self._char in self.SINGLE_TOKENS:
self._add(self.SINGLE_TOKENS[self._char]) # type: ignore
self._add(self.SINGLE_TOKENS[self._char], text=self._char) # type: ignore
return
self._scan_var()
return
@ -921,7 +923,8 @@ class Tokenizer(metaclass=_Tokenizer):
return
self._advance(size - 1)
self._add(self.KEYWORDS[word.upper()])
word = word.upper()
self._add(self.KEYWORDS[word], text=word)
def _scan_comment(self, comment_start: str) -> bool:
if comment_start not in self._COMMENTS: # type: ignore
@ -946,7 +949,7 @@ class Tokenizer(metaclass=_Tokenizer):
# Leading comment is attached to the succeeding token, whilst trailing comment to the preceding.
# Multiple consecutive comments are preserved by appending them to the current comments list.
if comment_start_line == self._prev_token_line:
if comment_start_line == self._prev_token_line or self._end:
self.tokens[-1].comments.extend(self._comments)
self._comments = []
self._prev_token_line = self._line