1
0
Fork 0

Adding upstream version 11.7.1.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-02-13 15:51:35 +01:00
parent b4e0e3422e
commit 82a8846a46
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
144 changed files with 44104 additions and 39367 deletions

View file

@ -14,6 +14,7 @@ class TestTokens(unittest.TestCase):
("foo", []),
("foo /*comment 1*/ /*comment 2*/", ["comment 1", "comment 2"]),
("foo\n-- comment", [" comment"]),
("1 /*/2 */", ["/2 "]),
]
for sql, comment in sql_comment:
@ -22,14 +23,17 @@ class TestTokens(unittest.TestCase):
def test_token_line(self):
tokens = Tokenizer().tokenize(
"""SELECT /*
line break
*/
'x
y',
x"""
line break
*/
'x
y',
x"""
)
self.assertEqual(tokens[1].line, 5)
self.assertEqual(tokens[1].col, 3)
self.assertEqual(tokens[-1].line, 6)
self.assertEqual(tokens[-1].col, 1)
def test_command(self):
tokens = Tokenizer().tokenize("SHOW;")
@ -46,6 +50,10 @@ class TestTokens(unittest.TestCase):
self.assertEqual(tokens[2].token_type, TokenType.SHOW)
self.assertEqual(tokens[3].token_type, TokenType.SEMICOLON)
def test_error_msg(self):
with self.assertRaisesRegex(ValueError, "Error tokenizing 'select.*"):
Tokenizer().tokenize("select /*")
def test_jinja(self):
tokenizer = Tokenizer()