Merging upstream version 26.16.4.
Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
parent
c0dccc98e9
commit
542c175872
57 changed files with 54950 additions and 53265 deletions
|
@ -2432,3 +2432,56 @@ OPTIONS (
|
|||
f"SELECT * FROM t1, UNNEST([1, 2]) AS hit WITH OFFSET {join_ops} JOIN foo",
|
||||
f"SELECT * FROM t1, UNNEST([1, 2]) AS hit WITH OFFSET AS offset {join_ops} JOIN foo",
|
||||
)
|
||||
|
||||
def test_identifier_meta(self):
|
||||
ast = parse_one(
|
||||
"SELECT a, b FROM test_schema.test_table_a UNION ALL SELECT c, d FROM test_catalog.test_schema.test_table_b",
|
||||
dialect="bigquery",
|
||||
)
|
||||
for identifier in ast.find_all(exp.Identifier):
|
||||
self.assertEqual(set(identifier.meta), {"line", "col", "start", "end"})
|
||||
|
||||
self.assertEqual(
|
||||
ast.this.args["from"].this.args["this"].meta,
|
||||
{"line": 1, "col": 41, "start": 29, "end": 40},
|
||||
)
|
||||
self.assertEqual(
|
||||
ast.this.args["from"].this.args["db"].meta,
|
||||
{"line": 1, "col": 28, "start": 17, "end": 27},
|
||||
)
|
||||
self.assertEqual(
|
||||
ast.expression.args["from"].this.args["this"].meta,
|
||||
{"line": 1, "col": 106, "start": 94, "end": 105},
|
||||
)
|
||||
self.assertEqual(
|
||||
ast.expression.args["from"].this.args["db"].meta,
|
||||
{"line": 1, "col": 93, "start": 82, "end": 92},
|
||||
)
|
||||
self.assertEqual(
|
||||
ast.expression.args["from"].this.args["catalog"].meta,
|
||||
{"line": 1, "col": 81, "start": 69, "end": 80},
|
||||
)
|
||||
|
||||
information_schema_sql = "SELECT a, b FROM region.INFORMATION_SCHEMA.COLUMNS"
|
||||
ast = parse_one(information_schema_sql, dialect="bigquery")
|
||||
meta = ast.args["from"].this.this.meta
|
||||
self.assertEqual(meta, {"line": 1, "col": 50, "start": 24, "end": 49})
|
||||
assert (
|
||||
information_schema_sql[meta["start"] : meta["end"] + 1] == "INFORMATION_SCHEMA.COLUMNS"
|
||||
)
|
||||
|
||||
def test_quoted_identifier_meta(self):
|
||||
sql = "SELECT `a` FROM `test_schema`.`test_table_a`"
|
||||
ast = parse_one(sql, dialect="bigquery")
|
||||
db_meta = ast.args["from"].this.args["db"].meta
|
||||
self.assertEqual(sql[db_meta["start"] : db_meta["end"] + 1], "`test_schema`")
|
||||
table_meta = ast.args["from"].this.this.meta
|
||||
self.assertEqual(sql[table_meta["start"] : table_meta["end"] + 1], "`test_table_a`")
|
||||
|
||||
information_schema_sql = "SELECT a, b FROM `region.INFORMATION_SCHEMA.COLUMNS`"
|
||||
ast = parse_one(information_schema_sql, dialect="bigquery")
|
||||
table_meta = ast.args["from"].this.this.meta
|
||||
assert (
|
||||
information_schema_sql[table_meta["start"] : table_meta["end"] + 1]
|
||||
== "`region.INFORMATION_SCHEMA.COLUMNS`"
|
||||
)
|
||||
|
|
|
@ -35,6 +35,7 @@ class TestDatabricks(Validator):
|
|||
self.validate_identity("SELECT ${x} FROM ${y} WHERE ${z} > 1")
|
||||
self.validate_identity("CREATE TABLE foo (x DATE GENERATED ALWAYS AS (CAST(y AS DATE)))")
|
||||
self.validate_identity("TRUNCATE TABLE t1 PARTITION(age = 10, name = 'test', address)")
|
||||
self.validate_identity("SELECT PARSE_JSON('{}')")
|
||||
self.validate_identity(
|
||||
"CREATE TABLE IF NOT EXISTS db.table (a TIMESTAMP, b BOOLEAN GENERATED ALWAYS AS (NOT a IS NULL)) USING DELTA"
|
||||
)
|
||||
|
@ -56,7 +57,10 @@ class TestDatabricks(Validator):
|
|||
self.validate_identity(
|
||||
"COPY INTO target FROM `s3://link` FILEFORMAT = AVRO VALIDATE = ALL FILES = ('file1', 'file2') FORMAT_OPTIONS ('opt1'='true', 'opt2'='test') COPY_OPTIONS ('mergeSchema'='true')"
|
||||
)
|
||||
self.validate_identity("SELECT PARSE_JSON('{}')")
|
||||
self.validate_identity(
|
||||
"SELECT TIMESTAMP '2025-04-29 18.47.18'::DATE",
|
||||
"SELECT CAST(CAST('2025-04-29 18.47.18' AS DATE) AS TIMESTAMP)",
|
||||
)
|
||||
self.validate_identity(
|
||||
"SELECT DATE_FORMAT(CAST(FROM_UTC_TIMESTAMP(foo, 'America/Los_Angeles') AS TIMESTAMP), 'yyyy-MM-dd HH:mm:ss') AS foo FROM t",
|
||||
"SELECT DATE_FORMAT(CAST(FROM_UTC_TIMESTAMP(CAST(foo AS TIMESTAMP), 'America/Los_Angeles') AS TIMESTAMP), 'yyyy-MM-dd HH:mm:ss') AS foo FROM t",
|
||||
|
@ -161,6 +165,8 @@ class TestDatabricks(Validator):
|
|||
# https://docs.databricks.com/sql/language-manual/functions/colonsign.html
|
||||
def test_json(self):
|
||||
self.validate_identity("SELECT c1:price, c1:price.foo, c1:price.bar[1]")
|
||||
self.validate_identity("SELECT TRY_CAST(c1:price AS ARRAY<VARIANT>)")
|
||||
self.validate_identity("""SELECT TRY_CAST(c1:["foo bar"]["baz qux"] AS ARRAY<VARIANT>)""")
|
||||
self.validate_identity(
|
||||
"""SELECT c1:item[1].price FROM VALUES ('{ "item": [ { "model" : "basic", "price" : 6.12 }, { "model" : "medium", "price" : 9.24 } ] }') AS T(c1)"""
|
||||
)
|
||||
|
|
|
@ -298,6 +298,7 @@ class TestDuckDB(Validator):
|
|||
self.validate_identity("SUMMARIZE tbl").assert_is(exp.Summarize)
|
||||
self.validate_identity("SUMMARIZE SELECT * FROM tbl").assert_is(exp.Summarize)
|
||||
self.validate_identity("CREATE TABLE tbl_summary AS SELECT * FROM (SUMMARIZE tbl)")
|
||||
self.validate_identity("SELECT STAR(tbl, exclude := [foo])")
|
||||
self.validate_identity("UNION_VALUE(k1 := 1)").find(exp.PropertyEQ).this.assert_is(
|
||||
exp.Identifier
|
||||
)
|
||||
|
|
|
@ -31,6 +31,8 @@ class TestPostgres(Validator):
|
|||
self.assertIsInstance(expr, exp.Alter)
|
||||
self.assertEqual(expr.sql(dialect="postgres"), alter_table_only)
|
||||
|
||||
self.validate_identity("SELECT EXTRACT(QUARTER FROM CAST('2025-04-26' AS DATE))")
|
||||
self.validate_identity("SELECT DATE_TRUNC('QUARTER', CAST('2025-04-26' AS DATE))")
|
||||
self.validate_identity("STRING_TO_ARRAY('xx~^~yy~^~zz', '~^~', 'yy')")
|
||||
self.validate_identity("SELECT x FROM t WHERE CAST($1 AS TEXT) = 'ok'")
|
||||
self.validate_identity("SELECT * FROM t TABLESAMPLE SYSTEM (50) REPEATABLE (55)")
|
||||
|
@ -372,6 +374,14 @@ FROM json_data, field_ids""",
|
|||
pretty=True,
|
||||
)
|
||||
|
||||
self.validate_all(
|
||||
"SELECT CURRENT_TIMESTAMP + INTERVAL '-3 MONTH'",
|
||||
read={
|
||||
"mysql": "SELECT DATE_ADD(CURRENT_TIMESTAMP, INTERVAL -1 QUARTER)",
|
||||
"postgres": "SELECT CURRENT_TIMESTAMP + INTERVAL '-3 MONTH'",
|
||||
"tsql": "SELECT DATEADD(QUARTER, -1, GETDATE())",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT ARRAY[]::INT[] AS foo",
|
||||
write={
|
||||
|
|
|
@ -11,6 +11,7 @@ class TestSnowflake(Validator):
|
|||
dialect = "snowflake"
|
||||
|
||||
def test_snowflake(self):
|
||||
self.validate_identity("SELECT GET(a, b)")
|
||||
self.assertEqual(
|
||||
# Ensures we don't fail when generating ParseJSON with the `safe` arg set to `True`
|
||||
self.validate_identity("""SELECT TRY_PARSE_JSON('{"x: 1}')""").sql(),
|
||||
|
@ -22,7 +23,7 @@ class TestSnowflake(Validator):
|
|||
self.assertEqual(expr.sql(dialect="snowflake"), "SELECT APPROX_TOP_K(C4, 3, 5) FROM t")
|
||||
|
||||
self.validate_identity("INSERT INTO test VALUES (x'48FAF43B0AFCEF9B63EE3A93EE2AC2')")
|
||||
self.validate_identity("exclude := [foo]")
|
||||
self.validate_identity("SELECT STAR(tbl, exclude := [foo])")
|
||||
self.validate_identity("SELECT CAST([1, 2, 3] AS VECTOR(FLOAT, 3))")
|
||||
self.validate_identity("SELECT CONNECT_BY_ROOT test AS test_column_alias")
|
||||
self.validate_identity("SELECT number").selects[0].assert_is(exp.Column)
|
||||
|
@ -110,6 +111,10 @@ class TestSnowflake(Validator):
|
|||
"SELECT 1 put",
|
||||
"SELECT 1 AS put",
|
||||
)
|
||||
self.validate_identity(
|
||||
"SELECT 1 get",
|
||||
"SELECT 1 AS get",
|
||||
)
|
||||
self.validate_identity(
|
||||
"WITH t (SELECT 1 AS c) SELECT c FROM t",
|
||||
"WITH t AS (SELECT 1 AS c) SELECT c FROM t",
|
||||
|
@ -2465,6 +2470,33 @@ SINGLE = TRUE""",
|
|||
check_command_warning=True,
|
||||
)
|
||||
|
||||
def test_get_from_stage(self):
|
||||
self.validate_identity('GET @"my_DB"."schEMA1"."MYstage" \'file:///dir/tmp.csv\'')
|
||||
self.validate_identity("GET @s1/test 'file:///dir/tmp.csv'")
|
||||
|
||||
# GET with file path and stage ref containing spaces (wrapped in single quotes)
|
||||
ast = parse_one("GET '@s1/my folder' 'file://my file.txt'", read="snowflake")
|
||||
self.assertIsInstance(ast, exp.Get)
|
||||
self.assertEqual(ast.args["target"], exp.Var(this="'@s1/my folder'"))
|
||||
self.assertEqual(ast.this, exp.Literal(this="file://my file.txt", is_string=True))
|
||||
self.assertEqual(ast.sql("snowflake"), "GET '@s1/my folder' 'file://my file.txt'")
|
||||
|
||||
# expression with additional properties
|
||||
ast = parse_one("GET @stage1/folder 'file:///tmp/my.txt' PARALLEL = 1", read="snowflake")
|
||||
self.assertIsInstance(ast, exp.Get)
|
||||
self.assertEqual(ast.args["target"], exp.Var(this="@stage1/folder"))
|
||||
self.assertEqual(ast.this, exp.Literal(this="file:///tmp/my.txt", is_string=True))
|
||||
properties = ast.args.get("properties")
|
||||
props_dict = {prop.this.this: prop.args["value"].this for prop in properties.expressions}
|
||||
self.assertEqual(props_dict, {"PARALLEL": "1"})
|
||||
|
||||
# the unquoted URI variant is not fully supported yet
|
||||
self.validate_identity("GET @%table file:///dir/tmp.csv", check_command_warning=True)
|
||||
self.validate_identity(
|
||||
"GET @s1/test file:///dir/tmp.csv PARALLEL=1",
|
||||
check_command_warning=True,
|
||||
)
|
||||
|
||||
def test_querying_semi_structured_data(self):
|
||||
self.validate_identity("SELECT $1")
|
||||
self.validate_identity("SELECT $1.elem")
|
||||
|
@ -2578,3 +2610,11 @@ SINGLE = TRUE""",
|
|||
self.assertEqual(expr.find(exp.Placeholder), exp.Placeholder(this="1"))
|
||||
self.validate_identity("SELECT :1, :2")
|
||||
self.validate_identity("SELECT :1 + :2")
|
||||
|
||||
def test_max_by_min_by(self):
|
||||
max_by = self.validate_identity("MAX_BY(DISTINCT selected_col, filtered_col)")
|
||||
min_by = self.validate_identity("MIN_BY(DISTINCT selected_col, filtered_col)")
|
||||
|
||||
for node in (max_by, min_by):
|
||||
self.assertEqual(len(node.this.expressions), 1)
|
||||
self.assertIsInstance(node.expression, exp.Column)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue