1
0
Fork 0

Adding upstream version 25.6.1.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-02-13 21:42:48 +01:00
parent 4e506fbac7
commit f9dae8e951
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
69 changed files with 46817 additions and 45778 deletions

View file

@ -138,7 +138,6 @@ LANGUAGE js AS
self.validate_identity("SELECT CAST(CURRENT_DATE AS STRING FORMAT 'DAY') AS current_day")
self.validate_identity("SAFE_CAST(encrypted_value AS STRING FORMAT 'BASE64')")
self.validate_identity("CAST(encrypted_value AS STRING FORMAT 'BASE64')")
self.validate_identity("CAST(STRUCT<a INT64>(1) AS STRUCT<a INT64>)")
self.validate_identity("STRING_AGG(a)")
self.validate_identity("STRING_AGG(a, ' & ')")
self.validate_identity("STRING_AGG(DISTINCT a, ' & ')")
@ -162,12 +161,9 @@ LANGUAGE js AS
self.validate_identity("x <> ''")
self.validate_identity("DATE_TRUNC(col, WEEK(MONDAY))")
self.validate_identity("SELECT b'abc'")
self.validate_identity("""SELECT * FROM UNNEST(ARRAY<STRUCT<x INT64>>[])""")
self.validate_identity("SELECT AS STRUCT 1 AS a, 2 AS b")
self.validate_identity("SELECT DISTINCT AS STRUCT 1 AS a, 2 AS b")
self.validate_identity("SELECT AS VALUE STRUCT(1 AS a, 2 AS b)")
self.validate_identity("SELECT STRUCT<ARRAY<STRING>>(['2023-01-17'])")
self.validate_identity("SELECT STRUCT<STRING>((SELECT a FROM b.c LIMIT 1)).*")
self.validate_identity("SELECT * FROM q UNPIVOT(values FOR quarter IN (b, c))")
self.validate_identity("""CREATE TABLE x (a STRUCT<values ARRAY<INT64>>)""")
self.validate_identity("""CREATE TABLE x (a STRUCT<b STRING OPTIONS (description='b')>)""")
@ -1427,6 +1423,12 @@ WHERE
transpile("DATE_ADD(x, day)", read="bigquery")
def test_warnings(self):
with self.assertLogs(parser_logger) as cm:
self.validate_identity(
"/* some comment */ DECLARE foo DATE DEFAULT DATE_SUB(current_date, INTERVAL 2 day)"
)
self.assertIn("contains unsupported syntax", cm.output[0])
with self.assertLogs(helper_logger) as cm:
self.validate_identity(
"WITH cte(c) AS (SELECT * FROM t) SELECT * FROM cte",
@ -1607,7 +1609,7 @@ WHERE
"SELECT * FROM GAP_FILL(TABLE device_data, ts_column => 'time', bucket_width => INTERVAL '1' MINUTE, value_columns => [('signal', 'null')], origin => CAST('2023-11-01 09:30:01' AS DATETIME)) ORDER BY time"
)
self.validate_identity(
"SELECT * FROM GAP_FILL(TABLE (SELECT * FROM UNNEST(ARRAY<STRUCT<device_id INT64, time DATETIME, signal INT64, state STRING>>[STRUCT(1, CAST('2023-11-01 09:34:01' AS DATETIME), 74, 'INACTIVE'), STRUCT(2, CAST('2023-11-01 09:36:00' AS DATETIME), 77, 'ACTIVE'), STRUCT(3, CAST('2023-11-01 09:37:00' AS DATETIME), 78, 'ACTIVE'), STRUCT(4, CAST('2023-11-01 09:38:01' AS DATETIME), 80, 'ACTIVE')])), ts_column => 'time', bucket_width => INTERVAL '1' MINUTE, value_columns => [('signal', 'linear')]) ORDER BY time"
"SELECT * FROM GAP_FILL(TABLE device_data, ts_column => 'time', bucket_width => INTERVAL '1' MINUTE, value_columns => [('signal', 'locf')]) ORDER BY time"
)
def test_models(self):
@ -1757,3 +1759,49 @@ OPTIONS (
"MOD((a + 1), b)",
"MOD(a + 1, b)",
)
def test_inline_constructor(self):
self.validate_identity(
"""SELECT STRUCT<ARRAY<STRING>>(["2023-01-17"])""",
"""SELECT CAST(STRUCT(['2023-01-17']) AS STRUCT<ARRAY<STRING>>)""",
)
self.validate_identity(
"""SELECT STRUCT<STRING>((SELECT 'foo')).*""",
"""SELECT CAST(STRUCT((SELECT 'foo')) AS STRUCT<STRING>).*""",
)
self.validate_all(
"SELECT ARRAY<INT>[1, 2, 3]",
write={
"bigquery": "SELECT CAST([1, 2, 3] AS ARRAY<INT64>)",
"duckdb": "SELECT CAST([1, 2, 3] AS INT[])",
},
)
self.validate_all(
"CAST(STRUCT<a INT64>(1) AS STRUCT<a INT64>)",
write={
"bigquery": "CAST(CAST(STRUCT(1) AS STRUCT<a INT64>) AS STRUCT<a INT64>)",
"duckdb": "CAST(CAST(ROW(1) AS STRUCT(a BIGINT)) AS STRUCT(a BIGINT))",
},
)
self.validate_all(
"SELECT * FROM UNNEST(ARRAY<STRUCT<x INT64>>[])",
write={
"bigquery": "SELECT * FROM UNNEST(CAST([] AS ARRAY<STRUCT<x INT64>>))",
"duckdb": "SELECT * FROM UNNEST(CAST([] AS STRUCT(x BIGINT)[]))",
},
)
self.validate_all(
"SELECT * FROM UNNEST(ARRAY<STRUCT<device_id INT64, time DATETIME, signal INT64, state STRING>>[STRUCT(1, DATETIME '2023-11-01 09:34:01', 74, 'INACTIVE'),STRUCT(4, DATETIME '2023-11-01 09:38:01', 80, 'ACTIVE')])",
write={
"bigquery": "SELECT * FROM UNNEST(CAST([STRUCT(1, CAST('2023-11-01 09:34:01' AS DATETIME), 74, 'INACTIVE'), STRUCT(4, CAST('2023-11-01 09:38:01' AS DATETIME), 80, 'ACTIVE')] AS ARRAY<STRUCT<device_id INT64, time DATETIME, signal INT64, state STRING>>))",
"duckdb": "SELECT * FROM UNNEST(CAST([ROW(1, CAST('2023-11-01 09:34:01' AS TIMESTAMP), 74, 'INACTIVE'), ROW(4, CAST('2023-11-01 09:38:01' AS TIMESTAMP), 80, 'ACTIVE')] AS STRUCT(device_id BIGINT, time TIMESTAMP, signal BIGINT, state TEXT)[]))",
},
)
self.validate_all(
"SELECT STRUCT<a INT64, b STRUCT<c STRING>>(1, STRUCT('c_str'))",
write={
"bigquery": "SELECT CAST(STRUCT(1, STRUCT('c_str')) AS STRUCT<a INT64, b STRUCT<c STRING>>)",
"duckdb": "SELECT CAST(ROW(1, ROW('c_str')) AS STRUCT(a BIGINT, b STRUCT(c TEXT)))",
},
)

View file

@ -83,6 +83,9 @@ class TestClickhouse(Validator):
self.validate_identity("TRUNCATE TABLE t1 ON CLUSTER test_cluster")
self.validate_identity("TRUNCATE DATABASE db")
self.validate_identity("TRUNCATE DATABASE db ON CLUSTER test_cluster")
self.validate_identity(
"SELECT number, COUNT() OVER (PARTITION BY number % 3) AS partition_count FROM numbers(10) WINDOW window_name AS (PARTITION BY number) QUALIFY partition_count = 4 ORDER BY number"
)
self.validate_identity(
"SELECT id, quantileGK(100, 0.95)(reading) OVER (PARTITION BY id ORDER BY id RANGE BETWEEN 30000 PRECEDING AND CURRENT ROW) AS window FROM table"
)
@ -448,6 +451,10 @@ class TestClickhouse(Validator):
self.validate_identity("ALTER TABLE visits REPLACE PARTITION ID '201901' FROM visits_tmp")
self.validate_identity("ALTER TABLE visits ON CLUSTER test_cluster DROP COLUMN col1")
self.assertIsInstance(
parse_one("Tuple(select Int64)", into=exp.DataType, read="clickhouse"), exp.DataType
)
def test_cte(self):
self.validate_identity("WITH 'x' AS foo SELECT foo")
self.validate_identity("WITH ['c'] AS field_names SELECT field_names")
@ -545,6 +552,7 @@ class TestClickhouse(Validator):
self.validate_identity(
"CREATE TABLE foo (x UInt32) TTL time_column + INTERVAL '1' MONTH DELETE WHERE column = 'value'"
)
self.validate_identity("CREATE TABLE named_tuples (a Tuple(select String, i Int64))")
self.validate_all(
"""

View file

@ -98,6 +98,22 @@ class TestDatabricks(Validator):
read="databricks",
)
self.validate_all(
"CREATE OR REPLACE FUNCTION func(a BIGINT, b BIGINT) RETURNS TABLE (a INT) RETURN SELECT a",
write={
"databricks": "CREATE OR REPLACE FUNCTION func(a BIGINT, b BIGINT) RETURNS TABLE (a INT) RETURN SELECT a",
"duckdb": "CREATE OR REPLACE FUNCTION func(a, b) AS TABLE SELECT a",
},
)
self.validate_all(
"CREATE OR REPLACE FUNCTION func(a BIGINT, b BIGINT) RETURNS BIGINT RETURN a",
write={
"databricks": "CREATE OR REPLACE FUNCTION func(a BIGINT, b BIGINT) RETURNS BIGINT RETURN a",
"duckdb": "CREATE OR REPLACE FUNCTION func(a, b) AS a",
},
)
# https://docs.databricks.com/sql/language-manual/functions/colonsign.html
def test_json(self):
self.validate_identity("SELECT c1:price, c1:price.foo, c1:price.bar[1]")

View file

@ -1164,6 +1164,13 @@ class TestDialect(Validator):
},
)
order_by_all_sql = "SELECT * FROM t ORDER BY ALL"
self.validate_identity(order_by_all_sql).find(exp.Ordered).this.assert_is(exp.Column)
for dialect in ("duckdb", "spark", "databricks"):
with self.subTest(f"Testing ORDER BY ALL in {dialect}"):
parse_one(order_by_all_sql, read=dialect).find(exp.Ordered).this.assert_is(exp.Var)
def test_json(self):
self.validate_all(
"""JSON_EXTRACT(x, '$["a b"]')""",
@ -2267,7 +2274,7 @@ SELECT
write={
"duckdb": "SELECT * FROM t QUALIFY COUNT(*) OVER () > 1",
"snowflake": "SELECT * FROM t QUALIFY COUNT(*) OVER () > 1",
"clickhouse": "SELECT * FROM (SELECT *, COUNT(*) OVER () AS _w FROM t) AS _t WHERE _w > 1",
"clickhouse": "SELECT * FROM t QUALIFY COUNT(*) OVER () > 1",
"mysql": "SELECT * FROM (SELECT *, COUNT(*) OVER () AS _w FROM t) AS _t WHERE _w > 1",
"oracle": "SELECT * FROM (SELECT *, COUNT(*) OVER () AS _w FROM t) _t WHERE _w > 1",
"postgres": "SELECT * FROM (SELECT *, COUNT(*) OVER () AS _w FROM t) AS _t WHERE _w > 1",
@ -2279,7 +2286,7 @@ SELECT
write={
"duckdb": 'SELECT "user id", some_id, 1 AS other_id, 2 AS "2 nd id" FROM t QUALIFY COUNT(*) OVER () > 1',
"snowflake": 'SELECT "user id", some_id, 1 AS other_id, 2 AS "2 nd id" FROM t QUALIFY COUNT(*) OVER () > 1',
"clickhouse": 'SELECT "user id", some_id, other_id, "2 nd id" FROM (SELECT "user id", some_id, 1 AS other_id, 2 AS "2 nd id", COUNT(*) OVER () AS _w FROM t) AS _t WHERE _w > 1',
"clickhouse": 'SELECT "user id", some_id, 1 AS other_id, 2 AS "2 nd id" FROM t QUALIFY COUNT(*) OVER () > 1',
"mysql": "SELECT `user id`, some_id, other_id, `2 nd id` FROM (SELECT `user id`, some_id, 1 AS other_id, 2 AS `2 nd id`, COUNT(*) OVER () AS _w FROM t) AS _t WHERE _w > 1",
"oracle": 'SELECT "user id", some_id, other_id, "2 nd id" FROM (SELECT "user id", some_id, 1 AS other_id, 2 AS "2 nd id", COUNT(*) OVER () AS _w FROM t) _t WHERE _w > 1',
"postgres": 'SELECT "user id", some_id, other_id, "2 nd id" FROM (SELECT "user id", some_id, 1 AS other_id, 2 AS "2 nd id", COUNT(*) OVER () AS _w FROM t) AS _t WHERE _w > 1',
@ -2566,3 +2573,33 @@ FROM subquery2""",
"""SELECT partition.d FROM t PARTITION (d)""",
"""SELECT partition.d FROM t AS PARTITION(d)""",
)
def test_string_functions(self):
for pad_func in ("LPAD", "RPAD"):
ch_alias = "LEFTPAD" if pad_func == "LPAD" else "RIGHTPAD"
for fill_pattern in ("", ", ' '"):
with self.subTest(f"Testing {pad_func}() with pattern {fill_pattern}"):
self.validate_all(
f"SELECT {pad_func}('bar', 5{fill_pattern})",
read={
"snowflake": f"SELECT {pad_func}('bar', 5{fill_pattern})",
"databricks": f"SELECT {pad_func}('bar', 5{fill_pattern})",
"spark": f"SELECT {pad_func}('bar', 5{fill_pattern})",
"postgres": f"SELECT {pad_func}('bar', 5{fill_pattern})",
"clickhouse": f"SELECT {ch_alias}('bar', 5{fill_pattern})",
},
write={
"": f"SELECT {pad_func}('bar', 5{fill_pattern})",
"spark": f"SELECT {pad_func}('bar', 5{fill_pattern})",
"postgres": f"SELECT {pad_func}('bar', 5{fill_pattern})",
"clickhouse": f"SELECT {pad_func}('bar', 5{fill_pattern})",
"snowflake": f"SELECT {pad_func}('bar', 5{fill_pattern})",
"databricks": f"SELECT {pad_func}('bar', 5{fill_pattern})",
"duckdb": f"SELECT {pad_func}('bar', 5, ' ')",
"mysql": f"SELECT {pad_func}('bar', 5, ' ')",
"hive": f"SELECT {pad_func}('bar', 5, ' ')",
"spark2": f"SELECT {pad_func}('bar', 5, ' ')",
"presto": f"SELECT {pad_func}('bar', 5, ' ')",
"trino": f"SELECT {pad_func}('bar', 5, ' ')",
},
)

View file

@ -361,6 +361,10 @@ class TestDuckDB(Validator):
self.validate_identity(
"SELECT * FROM (PIVOT Cities ON Year USING SUM(Population) GROUP BY Country) AS pivot_alias"
)
self.validate_identity(
# QUALIFY comes after WINDOW
"SELECT schema_name, function_name, ROW_NUMBER() OVER my_window AS function_rank FROM DUCKDB_FUNCTIONS() WINDOW my_window AS (PARTITION BY schema_name ORDER BY function_name) QUALIFY ROW_NUMBER() OVER my_window < 3"
)
self.validate_identity("DATE_SUB('YEAR', col, '2020-01-01')").assert_is(exp.Anonymous)
self.validate_identity("DATESUB('YEAR', col, '2020-01-01')").assert_is(exp.Anonymous)
@ -1038,11 +1042,11 @@ class TestDuckDB(Validator):
)
self.validate_identity(
"CAST([[STRUCT_PACK(a := 1)]] AS STRUCT(a BIGINT)[][])",
"CAST([[{'a': 1}]] AS STRUCT(a BIGINT)[][])",
"CAST([[ROW(1)]] AS STRUCT(a BIGINT)[][])",
)
self.validate_identity(
"CAST([STRUCT_PACK(a := 1)] AS STRUCT(a BIGINT)[])",
"CAST([{'a': 1}] AS STRUCT(a BIGINT)[])",
"CAST([ROW(1)] AS STRUCT(a BIGINT)[])",
)
self.validate_all(

View file

@ -977,6 +977,10 @@ class TestPostgres(Validator):
},
)
self.validate_identity("CREATE TABLE tbl (col INT UNIQUE NULLS NOT DISTINCT DEFAULT 9.99)")
self.validate_identity("CREATE TABLE tbl (col UUID UNIQUE DEFAULT GEN_RANDOM_UUID())")
self.validate_identity("CREATE TABLE tbl (col UUID, UNIQUE NULLS NOT DISTINCT (col))")
with self.assertRaises(ParseError):
transpile("CREATE TABLE products (price DECIMAL CHECK price > 0)", read="postgres")
with self.assertRaises(ParseError):
@ -1130,3 +1134,12 @@ CROSS JOIN JSON_ARRAY_ELEMENTS(CAST(boxcrate AS JSON)) AS x(tbox)
CROSS JOIN JSON_ARRAY_ELEMENTS(CAST(JSON_EXTRACT_PATH(tbox, 'boxes') AS JSON)) AS y(boxes)"""
self.validate_all(expected_postgres, read={"trino": trino_input}, pretty=True)
def test_rows_from(self):
self.validate_identity("""SELECT * FROM ROWS FROM (FUNC1(col1, col2))""")
self.validate_identity(
"""SELECT * FROM ROWS FROM (FUNC1(col1) AS alias1("col1" TEXT), FUNC2(col2) AS alias2("col2" INT)) WITH ORDINALITY"""
)
self.validate_identity(
"""SELECT * FROM table1, ROWS FROM (FUNC1(col1) AS alias1("col1" TEXT)) WITH ORDINALITY AS alias3("col3" INT, "col4" TEXT)"""
)

View file

@ -405,6 +405,14 @@ class TestPresto(Validator):
)
self.validate_identity("DATE_ADD('DAY', 1, y)")
self.validate_all(
"SELECT DATE_ADD('MINUTE', 30, col)",
write={
"presto": "SELECT DATE_ADD('MINUTE', 30, col)",
"trino": "SELECT DATE_ADD('MINUTE', 30, col)",
},
)
def test_ddl(self):
self.validate_all(
"CREATE TABLE test WITH (FORMAT = 'PARQUET') AS SELECT 1",

View file

@ -684,6 +684,8 @@ TBLPROPERTIES (
write={
"spark": "SELECT DATE_ADD(MONTH, 20, col)",
"databricks": "SELECT DATE_ADD(MONTH, 20, col)",
"presto": "SELECT DATE_ADD('MONTH', 20, col)",
"trino": "SELECT DATE_ADD('MONTH', 20, col)",
},
)