1
0
Fork 0

Merging upstream version 23.10.0.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-02-13 21:31:23 +01:00
parent 6cbc5d6f97
commit 49aa147013
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
91 changed files with 52881 additions and 50396 deletions

View file

@ -810,3 +810,6 @@ LIFETIME(MIN 0 MAX 0)""",
},
pretty=True,
)
self.validate_identity(
"CREATE TABLE t1 (a String EPHEMERAL, b String EPHEMERAL func(), c String MATERIALIZED func(), d String ALIAS func()) ENGINE=TinyLog()"
)

View file

@ -7,6 +7,8 @@ class TestDatabricks(Validator):
dialect = "databricks"
def test_databricks(self):
self.validate_identity("DESCRIBE HISTORY a.b")
self.validate_identity("DESCRIBE history.tbl")
self.validate_identity("CREATE TABLE t (c STRUCT<interval: DOUBLE COMMENT 'aaa'>)")
self.validate_identity("CREATE TABLE my_table TBLPROPERTIES (a.b=15)")
self.validate_identity("CREATE TABLE my_table TBLPROPERTIES ('a.b'=15)")
@ -23,6 +25,9 @@ class TestDatabricks(Validator):
self.validate_identity("CREATE FUNCTION a AS b")
self.validate_identity("SELECT ${x} FROM ${y} WHERE ${z} > 1")
self.validate_identity("CREATE TABLE foo (x DATE GENERATED ALWAYS AS (CAST(y AS DATE)))")
self.validate_identity(
"SELECT DATE_FORMAT(CAST(FROM_UTC_TIMESTAMP(CAST(foo AS TIMESTAMP), 'America/Los_Angeles') AS TIMESTAMP), 'yyyy-MM-dd HH:mm:ss') AS foo FROM t"
)
self.validate_identity(
"SELECT * FROM sales UNPIVOT INCLUDE NULLS (sales FOR quarter IN (q1 AS `Jan-Mar`))"
)

View file

@ -1332,6 +1332,15 @@ class TestDialect(Validator):
)
def test_set_operators(self):
self.validate_all(
"SELECT * FROM a UNION SELECT * FROM b ORDER BY x LIMIT 1",
write={
"": "SELECT * FROM a UNION SELECT * FROM b ORDER BY x LIMIT 1",
"clickhouse": "SELECT * FROM (SELECT * FROM a UNION DISTINCT SELECT * FROM b) AS _l_0 ORDER BY x NULLS FIRST LIMIT 1",
"tsql": "SELECT TOP 1 * FROM (SELECT * FROM a UNION SELECT * FROM b) AS _l_0 ORDER BY x",
},
)
self.validate_all(
"SELECT * FROM a UNION SELECT * FROM b",
read={
@ -1667,7 +1676,7 @@ class TestDialect(Validator):
"presto": "CAST(a AS DOUBLE) / b",
"redshift": "CAST(a AS DOUBLE PRECISION) / b",
"sqlite": "CAST(a AS REAL) / b",
"teradata": "CAST(a AS DOUBLE) / b",
"teradata": "CAST(a AS DOUBLE PRECISION) / b",
"trino": "CAST(a AS DOUBLE) / b",
"tsql": "CAST(a AS FLOAT) / b",
},

View file

@ -240,6 +240,7 @@ class TestDuckDB(Validator):
self.validate_identity("SELECT MAP(['key1', 'key2', 'key3'], [10, 20, 30])")
self.validate_identity("SELECT MAP {'x': 1}")
self.validate_identity("SELECT (MAP {'x': 1})['x']")
self.validate_identity("SELECT df1.*, df2.* FROM df1 POSITIONAL JOIN df2")
self.validate_identity("MAKE_TIMESTAMP(1992, 9, 20, 13, 34, 27.123456)")
self.validate_identity("MAKE_TIMESTAMP(1667810584123456)")

View file

@ -1103,7 +1103,7 @@ COMMENT='客户账户表'"""
"presto": "CAST(a AS DOUBLE) / NULLIF(b, 0)",
"redshift": "CAST(a AS DOUBLE PRECISION) / NULLIF(b, 0)",
"sqlite": "CAST(a AS REAL) / b",
"teradata": "CAST(a AS DOUBLE) / NULLIF(b, 0)",
"teradata": "CAST(a AS DOUBLE PRECISION) / NULLIF(b, 0)",
"trino": "CAST(a AS DOUBLE) / NULLIF(b, 0)",
"tsql": "CAST(a AS FLOAT) / NULLIF(b, 0)",
},

View file

@ -50,6 +50,7 @@ class TestPostgres(Validator):
self.validate_identity("STRING_AGG(DISTINCT x, ',' ORDER BY y DESC)")
self.validate_identity("SELECT CASE WHEN SUBSTRING('abcdefg') IN ('ab') THEN 1 ELSE 0 END")
self.validate_identity("COMMENT ON TABLE mytable IS 'this'")
self.validate_identity("COMMENT ON MATERIALIZED VIEW my_view IS 'this'")
self.validate_identity("SELECT e'\\xDEADBEEF'")
self.validate_identity("SELECT CAST(e'\\176' AS BYTEA)")
self.validate_identity("SELECT * FROM x WHERE SUBSTRING('Thomas' FROM '...$') IN ('mas')")
@ -466,6 +467,12 @@ class TestPostgres(Validator):
"tsql": "SELECT (CAST('2016-01-10' AS DATE), CAST('2016-02-01' AS DATE)) OVERLAPS (CAST('2016-01-20' AS DATE), CAST('2016-02-10' AS DATE))",
},
)
self.validate_all(
"SELECT DATE_PART('epoch', CAST('2023-01-04 04:05:06.789' AS TIMESTAMP))",
read={
"": "SELECT TIME_TO_UNIX(TIMESTAMP '2023-01-04 04:05:06.789')",
},
)
self.validate_all(
"x ^ y",
write={

View file

@ -7,6 +7,7 @@ class TestPresto(Validator):
dialect = "presto"
def test_cast(self):
self.validate_identity("SELECT * FROM x qualify", "SELECT * FROM x AS qualify")
self.validate_identity("CAST(x AS IPADDRESS)")
self.validate_identity("CAST(x AS IPPREFIX)")
@ -611,6 +612,15 @@ class TestPresto(Validator):
self.validate_identity(
"SELECT * FROM example.testdb.customer_orders FOR TIMESTAMP AS OF CAST('2022-03-23 09:59:29.803 Europe/Vienna' AS TIMESTAMP)"
)
self.validate_identity(
"SELECT origin_state, destination_state, origin_zip, SUM(package_weight) FROM shipping GROUP BY ALL CUBE (origin_state, destination_state), ROLLUP (origin_state, origin_zip)"
)
self.validate_identity(
"SELECT origin_state, destination_state, origin_zip, SUM(package_weight) FROM shipping GROUP BY DISTINCT CUBE (origin_state, destination_state), ROLLUP (origin_state, origin_zip)"
)
self.validate_identity(
"SELECT JSON_EXTRACT_SCALAR(CAST(extra AS JSON), '$.value_b'), COUNT(*) FROM table_a GROUP BY DISTINCT (JSON_EXTRACT_SCALAR(CAST(extra AS JSON), '$.value_b'))"
)
self.validate_all(
"SELECT LAST_DAY_OF_MONTH(CAST('2008-11-25' AS DATE))",

View file

@ -5,13 +5,56 @@ class TestPRQL(Validator):
dialect = "prql"
def test_prql(self):
self.validate_identity("FROM x", "SELECT * FROM x")
self.validate_identity("FROM x DERIVE a + 1", "SELECT *, a + 1 FROM x")
self.validate_identity("FROM x DERIVE x = a + 1", "SELECT *, a + 1 AS x FROM x")
self.validate_identity("FROM x DERIVE {a + 1}", "SELECT *, a + 1 FROM x")
self.validate_identity("FROM x DERIVE {x = a + 1, b}", "SELECT *, a + 1 AS x, b FROM x")
self.validate_identity("FROM x TAKE 10", "SELECT * FROM x LIMIT 10")
self.validate_identity("FROM x TAKE 10 TAKE 5", "SELECT * FROM x LIMIT 5")
self.validate_identity("from x", "SELECT * FROM x")
self.validate_identity("from x derive a + 1", "SELECT *, a + 1 FROM x")
self.validate_identity("from x derive x = a + 1", "SELECT *, a + 1 AS x FROM x")
self.validate_identity("from x derive {a + 1}", "SELECT *, a + 1 FROM x")
self.validate_identity("from x derive {x = a + 1, b}", "SELECT *, a + 1 AS x, b FROM x")
self.validate_identity(
"FROM x DERIVE {x = a + 1, b} SELECT {y = x, 2}", "SELECT a + 1 AS y, 2 FROM x"
"from x derive {x = a + 1, b} select {y = x, 2}", "SELECT a + 1 AS y, 2 FROM x"
)
self.validate_identity("from x take 10", "SELECT * FROM x LIMIT 10")
self.validate_identity("from x take 10 take 5", "SELECT * FROM x LIMIT 5")
self.validate_identity("from x filter age > 25", "SELECT * FROM x WHERE age > 25")
self.validate_identity(
"from x derive {x = a + 1, b} filter age > 25",
"SELECT *, a + 1 AS x, b FROM x WHERE age > 25",
)
self.validate_identity("from x filter dept != 'IT'", "SELECT * FROM x WHERE dept <> 'IT'")
self.validate_identity(
"from x filter p == 'product' select { a, b }", "SELECT a, b FROM x WHERE p = 'product'"
)
self.validate_identity(
"from x filter age > 25 filter age < 27", "SELECT * FROM x WHERE age > 25 AND age < 27"
)
self.validate_identity(
"from x filter (age > 25 && age < 27)", "SELECT * FROM x WHERE (age > 25 AND age < 27)"
)
self.validate_identity(
"from x filter (age > 25 || age < 27)", "SELECT * FROM x WHERE (age > 25 OR age < 27)"
)
self.validate_identity(
"from x filter (age > 25 || age < 22) filter age > 26 filter age < 27",
"SELECT * FROM x WHERE ((age > 25 OR age < 22) AND age > 26) AND age < 27",
)
self.validate_identity(
"from x sort age",
"SELECT * FROM x ORDER BY age",
)
self.validate_identity(
"from x sort {-age}",
"SELECT * FROM x ORDER BY age DESC",
)
self.validate_identity(
"from x sort {age, name}",
"SELECT * FROM x ORDER BY age, name",
)
self.validate_identity(
"from x sort {-age, +name}",
"SELECT * FROM x ORDER BY age DESC, name",
)
self.validate_identity("from x append y", "SELECT * FROM x UNION ALL SELECT * FROM y")
self.validate_identity("from x remove y", "SELECT * FROM x EXCEPT ALL SELECT * FROM y")
self.validate_identity(
"from x intersect y", "SELECT * FROM x INTERSECT ALL SELECT * FROM y"
)

View file

@ -66,6 +66,7 @@ WHERE
self.validate_identity("SELECT DAYOFYEAR(CURRENT_TIMESTAMP())")
self.validate_identity("LISTAGG(data['some_field'], ',')")
self.validate_identity("WEEKOFYEAR(tstamp)")
self.validate_identity("SELECT QUARTER(CURRENT_TIMESTAMP())")
self.validate_identity("SELECT SUM(amount) FROM mytable GROUP BY ALL")
self.validate_identity("WITH x AS (SELECT 1 AS foo) SELECT foo FROM IDENTIFIER('x')")
self.validate_identity("WITH x AS (SELECT 1 AS foo) SELECT IDENTIFIER('foo') FROM x")
@ -1575,22 +1576,26 @@ FROM persons AS p, LATERAL FLATTEN(input => p.c, path => 'contact') AS _flattene
)
def test_match_recognize(self):
for row in (
"ONE ROW PER MATCH",
"ALL ROWS PER MATCH",
"ALL ROWS PER MATCH SHOW EMPTY MATCHES",
"ALL ROWS PER MATCH OMIT EMPTY MATCHES",
"ALL ROWS PER MATCH WITH UNMATCHED ROWS",
):
for after in (
"AFTER MATCH SKIP",
"AFTER MATCH SKIP PAST LAST ROW",
"AFTER MATCH SKIP TO NEXT ROW",
"AFTER MATCH SKIP TO FIRST x",
"AFTER MATCH SKIP TO LAST x",
for window_frame in ("", "FINAL ", "RUNNING "):
for row in (
"ONE ROW PER MATCH",
"ALL ROWS PER MATCH",
"ALL ROWS PER MATCH SHOW EMPTY MATCHES",
"ALL ROWS PER MATCH OMIT EMPTY MATCHES",
"ALL ROWS PER MATCH WITH UNMATCHED ROWS",
):
self.validate_identity(
f"""SELECT
for after in (
"AFTER MATCH SKIP",
"AFTER MATCH SKIP PAST LAST ROW",
"AFTER MATCH SKIP TO NEXT ROW",
"AFTER MATCH SKIP TO FIRST x",
"AFTER MATCH SKIP TO LAST x",
):
with self.subTest(
f"MATCH_RECOGNIZE with window frame {window_frame}, rows {row}, after {after}: "
):
self.validate_identity(
f"""SELECT
*
FROM x
MATCH_RECOGNIZE (
@ -1598,15 +1603,15 @@ MATCH_RECOGNIZE (
ORDER BY
x DESC
MEASURES
y AS b
{window_frame}y AS b
{row}
{after}
PATTERN (^ S1 S2*? ( {{- S3 -}} S4 )+ | PERMUTE(S1, S2){{1,2}} $)
DEFINE
x AS y
)""",
pretty=True,
)
pretty=True,
)
def test_show_users(self):
self.validate_identity("SHOW USERS")

View file

@ -2,6 +2,7 @@ from unittest import mock
from sqlglot import exp, parse_one
from sqlglot.dialects.dialect import Dialects
from sqlglot.helper import logger as helper_logger
from tests.dialects.test_dialect import Validator
@ -223,17 +224,16 @@ TBLPROPERTIES (
)
def test_spark(self):
self.validate_identity("any_value(col, true)", "ANY_VALUE(col) IGNORE NULLS")
self.validate_identity("first(col, true)", "FIRST(col) IGNORE NULLS")
self.validate_identity("first_value(col, true)", "FIRST_VALUE(col) IGNORE NULLS")
self.validate_identity("last(col, true)", "LAST(col) IGNORE NULLS")
self.validate_identity("last_value(col, true)", "LAST_VALUE(col) IGNORE NULLS")
self.assertEqual(
parse_one("REFRESH TABLE t", read="spark").assert_is(exp.Refresh).sql(dialect="spark"),
"REFRESH TABLE t",
)
self.validate_identity("any_value(col, true)", "ANY_VALUE(col) IGNORE NULLS")
self.validate_identity("first(col, true)", "FIRST(col) IGNORE NULLS")
self.validate_identity("first_value(col, true)", "FIRST_VALUE(col) IGNORE NULLS")
self.validate_identity("last(col, true)", "LAST(col) IGNORE NULLS")
self.validate_identity("last_value(col, true)", "LAST_VALUE(col) IGNORE NULLS")
self.validate_identity("DESCRIBE EXTENDED db.table")
self.validate_identity("SELECT * FROM test TABLESAMPLE (50 PERCENT)")
self.validate_identity("SELECT * FROM test TABLESAMPLE (5 ROWS)")
@ -284,6 +284,30 @@ TBLPROPERTIES (
"SELECT STR_TO_MAP('a:1,b:2,c:3', ',', ':')",
)
with self.assertLogs(helper_logger):
self.validate_all(
"SELECT TRY_ELEMENT_AT(ARRAY(1, 2, 3), 2)",
read={
"databricks": "SELECT TRY_ELEMENT_AT(ARRAY(1, 2, 3), 2)",
},
write={
"databricks": "SELECT TRY_ELEMENT_AT(ARRAY(1, 2, 3), 2)",
"duckdb": "SELECT ([1, 2, 3])[3]",
"spark": "SELECT TRY_ELEMENT_AT(ARRAY(1, 2, 3), 2)",
},
)
self.validate_all(
"SELECT TRY_ELEMENT_AT(MAP(1, 'a', 2, 'b'), 2)",
read={
"databricks": "SELECT TRY_ELEMENT_AT(MAP(1, 'a', 2, 'b'), 2)",
},
write={
"databricks": "SELECT TRY_ELEMENT_AT(MAP(1, 'a', 2, 'b'), 2)",
"duckdb": "SELECT (MAP([1, 2], ['a', 'b'])[2])[1]",
"spark": "SELECT TRY_ELEMENT_AT(MAP(1, 'a', 2, 'b'), 2)",
},
)
self.validate_all(
"SELECT SPLIT('123|789', '\\\\|')",
read={

View file

@ -210,3 +210,92 @@ class TestTeradata(Validator):
"teradata": "TRYCAST('-2.5' AS DECIMAL(5, 2))",
},
)
def test_time(self):
self.validate_all(
"CURRENT_TIMESTAMP",
read={
"teradata": "CURRENT_TIMESTAMP",
"snowflake": "CURRENT_TIMESTAMP()",
},
)
self.validate_all(
"SELECT '2023-01-01' + INTERVAL '5' YEAR",
read={
"teradata": "SELECT '2023-01-01' + INTERVAL '5' YEAR",
"snowflake": "SELECT DATEADD(YEAR, 5, '2023-01-01')",
},
)
self.validate_all(
"SELECT '2023-01-01' - INTERVAL '5' YEAR",
read={
"teradata": "SELECT '2023-01-01' - INTERVAL '5' YEAR",
"snowflake": "SELECT DATEADD(YEAR, -5, '2023-01-01')",
},
)
self.validate_all(
"SELECT '2023-01-01' - INTERVAL '5' YEAR",
read={
"teradata": "SELECT '2023-01-01' - INTERVAL '5' YEAR",
"sqlite": "SELECT DATE_SUB('2023-01-01', 5, YEAR)",
},
)
self.validate_all(
"SELECT '2023-01-01' + INTERVAL '5' YEAR",
read={
"teradata": "SELECT '2023-01-01' + INTERVAL '5' YEAR",
"sqlite": "SELECT DATE_SUB('2023-01-01', -5, YEAR)",
},
)
self.validate_all(
"SELECT (90 * INTERVAL '1' DAY)",
read={
"teradata": "SELECT (90 * INTERVAL '1' DAY)",
"snowflake": "SELECT INTERVAL '1' QUARTER",
},
)
self.validate_all(
"SELECT (7 * INTERVAL '1' DAY)",
read={
"teradata": "SELECT (7 * INTERVAL '1' DAY)",
"snowflake": "SELECT INTERVAL '1' WEEK",
},
)
self.validate_all(
"SELECT '2023-01-01' + (90 * INTERVAL '5' DAY)",
read={
"teradata": "SELECT '2023-01-01' + (90 * INTERVAL '5' DAY)",
"snowflake": "SELECT DATEADD(QUARTER, 5, '2023-01-01')",
},
)
self.validate_all(
"SELECT '2023-01-01' + (7 * INTERVAL '5' DAY)",
read={
"teradata": "SELECT '2023-01-01' + (7 * INTERVAL '5' DAY)",
"snowflake": "SELECT DATEADD(WEEK, 5, '2023-01-01')",
},
)
self.validate_all(
"CAST(TO_CHAR(x, 'Q') AS INT)",
read={
"teradata": "CAST(TO_CHAR(x, 'Q') AS INT)",
"snowflake": "DATE_PART(QUARTER, x)",
"bigquery": "EXTRACT(QUARTER FROM x)",
},
)
self.validate_all(
"EXTRACT(MONTH FROM x)",
read={
"teradata": "EXTRACT(MONTH FROM x)",
"snowflake": "DATE_PART(MONTH, x)",
"bigquery": "EXTRACT(MONTH FROM x)",
},
)
self.validate_all(
"CAST(TO_CHAR(x, 'Q') AS INT)",
read={
"snowflake": "quarter(x)",
"teradata": "CAST(TO_CHAR(x, 'Q') AS INT)",
},
)