1
0
Fork 0

Merging upstream version 26.1.3.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-02-13 21:59:50 +01:00
parent 829d661a08
commit c8d4d2df63
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
119 changed files with 71635 additions and 68059 deletions

View file

@ -29,6 +29,7 @@ class TestClickhouse(Validator):
self.assertEqual(expr.sql(dialect="clickhouse"), "COUNT(x)")
self.assertIsNone(expr._meta)
self.validate_identity("WITH arrayJoin([(1, [2, 3])]) AS arr SELECT arr")
self.validate_identity("CAST(1 AS Bool)")
self.validate_identity("SELECT toString(CHAR(104.1, 101, 108.9, 108.9, 111, 32))")
self.validate_identity("@macro").assert_is(exp.Parameter).this.assert_is(exp.Var)
@ -549,6 +550,9 @@ class TestClickhouse(Validator):
"SELECT name FROM data WHERE NOT ((SELECT DISTINCT name FROM data) IS NULL)",
)
self.validate_identity("SELECT 1_2_3_4_5", "SELECT 12345")
self.validate_identity("SELECT 1_b", "SELECT 1_b")
def test_clickhouse_values(self):
values = exp.select("*").from_(
exp.values([exp.tuple_(1, 2, 3)], alias="subq", columns=["a", "b", "c"])

View file

@ -520,6 +520,19 @@ class TestDialect(Validator):
},
)
def test_is_ascii(self):
self.validate_all(
"SELECT IS_ASCII(x)",
write={
"": "SELECT IS_ASCII(x)",
"sqlite": "SELECT (NOT x GLOB CAST(x'2a5b5e012d7f5d2a' AS TEXT))",
"mysql": "SELECT REGEXP_LIKE(x, '^[[:ascii:]]*$')",
"postgres": "SELECT (x ~ '^[[:ascii:]]*$')",
"tsql": "SELECT (PATINDEX('%[^' + CHAR(0x00) + '-' + CHAR(0x7f) + ']%' COLLATE Latin1_General_BIN, x) = 0)",
"oracle": "SELECT NVL(REGEXP_LIKE(x, '^[' || CHR(1) || '-' || CHR(127) || ']*$'), TRUE)",
},
)
def test_nvl2(self):
self.validate_all(
"SELECT NVL2(a, b, c)",
@ -1669,11 +1682,11 @@ class TestDialect(Validator):
},
)
self.validate_all(
"POSITION(needle in haystack)",
"POSITION(needle IN haystack)",
write={
"drill": "STRPOS(haystack, needle)",
"duckdb": "STRPOS(haystack, needle)",
"postgres": "STRPOS(haystack, needle)",
"postgres": "POSITION(needle IN haystack)",
"presto": "STRPOS(haystack, needle)",
"spark": "LOCATE(needle, haystack)",
"clickhouse": "position(haystack, needle)",
@ -1686,7 +1699,7 @@ class TestDialect(Validator):
write={
"drill": "STRPOS(haystack, needle)",
"duckdb": "STRPOS(haystack, needle)",
"postgres": "STRPOS(haystack, needle)",
"postgres": "POSITION(needle IN haystack)",
"presto": "STRPOS(haystack, needle)",
"bigquery": "STRPOS(haystack, needle)",
"spark": "LOCATE(needle, haystack)",

View file

@ -276,12 +276,6 @@ class TestDuckDB(Validator):
self.validate_identity("SELECT UNNEST(col, recursive := TRUE) FROM t")
self.validate_identity("VAR_POP(a)")
self.validate_identity("SELECT * FROM foo ASOF LEFT JOIN bar ON a = b")
self.validate_identity("PIVOT Cities ON Year USING SUM(Population)")
self.validate_identity("PIVOT Cities ON Year USING FIRST(Population)")
self.validate_identity("PIVOT Cities ON Year USING SUM(Population) GROUP BY Country")
self.validate_identity("PIVOT Cities ON Country, Name USING SUM(Population)")
self.validate_identity("PIVOT Cities ON Country || '_' || Name USING SUM(Population)")
self.validate_identity("PIVOT Cities ON Year USING SUM(Population) GROUP BY Country, Name")
self.validate_identity("SELECT {'a': 1} AS x")
self.validate_identity("SELECT {'a': {'b': {'c': 1}}, 'd': {'e': 2}} AS x")
self.validate_identity("SELECT {'x': 1, 'y': 2, 'z': 3}")
@ -1415,3 +1409,42 @@ class TestDuckDB(Validator):
self.validate_identity("DETACH IF EXISTS file")
self.validate_identity("DETACH DATABASE db", "DETACH db")
def test_simplified_pivot_unpivot(self):
self.validate_identity("PIVOT Cities ON Year USING SUM(Population)")
self.validate_identity("PIVOT Cities ON Year USING FIRST(Population)")
self.validate_identity("PIVOT Cities ON Year USING SUM(Population) GROUP BY Country")
self.validate_identity("PIVOT Cities ON Country, Name USING SUM(Population)")
self.validate_identity("PIVOT Cities ON Country || '_' || Name USING SUM(Population)")
self.validate_identity("PIVOT Cities ON Year USING SUM(Population) GROUP BY Country, Name")
self.validate_identity("UNPIVOT (SELECT 1 AS col1, 2 AS col2) ON foo, bar")
self.validate_identity(
"UNPIVOT monthly_sales ON jan, feb, mar, apr, may, jun INTO NAME month VALUE sales"
)
self.validate_identity(
"UNPIVOT monthly_sales ON COLUMNS(* EXCLUDE (empid, dept)) INTO NAME month VALUE sales"
)
self.validate_identity(
"UNPIVOT monthly_sales ON (jan, feb, mar) AS q1, (apr, may, jun) AS q2 INTO NAME quarter VALUE month_1_sales, month_2_sales, month_3_sales"
)
self.validate_identity(
"WITH unpivot_alias AS (UNPIVOT monthly_sales ON COLUMNS(* EXCLUDE (empid, dept)) INTO NAME month VALUE sales) SELECT * FROM unpivot_alias"
)
self.validate_identity(
"SELECT * FROM (UNPIVOT monthly_sales ON COLUMNS(* EXCLUDE (empid, dept)) INTO NAME month VALUE sales) AS unpivot_alias"
)
def test_from_first_with_parentheses(self):
self.validate_identity(
"CREATE TABLE t1 AS (FROM t2 SELECT foo1, foo2)",
"CREATE TABLE t1 AS (SELECT foo1, foo2 FROM t2)",
)
self.validate_identity(
"FROM (FROM t1 SELECT foo1, foo2)",
"SELECT * FROM (SELECT foo1, foo2 FROM t1)",
)
self.validate_identity(
"WITH t1 AS (FROM (FROM t2 SELECT foo1, foo2)) FROM t1",
"WITH t1 AS (SELECT * FROM (SELECT foo1, foo2 FROM t2)) SELECT * FROM t1",
)

View file

@ -806,6 +806,8 @@ class TestHive(Validator):
},
)
self.validate_identity("SELECT 1_2")
def test_escapes(self) -> None:
self.validate_identity("'\n'", "'\\n'")
self.validate_identity("'\\n'")

View file

@ -332,6 +332,8 @@ class TestMySQL(Validator):
write={
"mysql": "CHAR(10)",
"presto": "CHR(10)",
"sqlite": "CHAR(10)",
"tsql": "CHAR(10)",
},
)
@ -723,6 +725,7 @@ class TestMySQL(Validator):
write={
"duckdb": "SELECT LENGTH('foo')",
"mysql": "SELECT CHAR_LENGTH('foo')",
"postgres": "SELECT LENGTH('foo')",
},
)

View file

@ -45,6 +45,7 @@ class TestOracle(Validator):
self.validate_identity("SELECT COUNT(*) * 10 FROM orders SAMPLE (10) SEED (1)")
self.validate_identity("SELECT * FROM V$SESSION")
self.validate_identity("SELECT TO_DATE('January 15, 1989, 11:00 A.M.')")
self.validate_identity("SELECT INSTR(haystack, needle)")
self.validate_identity(
"SELECT * FROM test UNPIVOT INCLUDE NULLS (value FOR Description IN (col AS 'PREFIX ' || CHR(38) || ' SUFFIX'))"
)

View file

@ -49,6 +49,10 @@ class TestPostgres(Validator):
self.validate_identity("CAST(x AS DATERANGE)")
self.validate_identity("CAST(x AS DATEMULTIRANGE)")
self.validate_identity("x$")
self.validate_identity("LENGTH(x)")
self.validate_identity("LENGTH(x, utf8)")
self.validate_identity("CHAR_LENGTH(x)", "LENGTH(x)")
self.validate_identity("CHARACTER_LENGTH(x)", "LENGTH(x)")
self.validate_identity("SELECT ARRAY[1, 2, 3]")
self.validate_identity("SELECT ARRAY(SELECT 1)")
self.validate_identity("STRING_AGG(x, y)")
@ -71,6 +75,9 @@ class TestPostgres(Validator):
self.validate_identity("EXEC AS myfunc @id = 123", check_command_warning=True)
self.validate_identity("SELECT CURRENT_USER")
self.validate_identity("SELECT * FROM ONLY t1")
self.validate_identity(
"SELECT id, name FROM XMLTABLE('/root/user' PASSING xml_data COLUMNS id INT PATH '@id', name TEXT PATH 'name/text()') AS t"
)
self.validate_identity(
"SELECT * FROM t WHERE some_column >= CURRENT_DATE + INTERVAL '1 day 1 hour' AND some_another_column IS TRUE"
)
@ -874,6 +881,9 @@ class TestPostgres(Validator):
self.validate_identity("ALTER TABLE t1 SET ACCESS METHOD method")
self.validate_identity("ALTER TABLE t1 SET TABLESPACE tablespace")
self.validate_identity("ALTER TABLE t1 SET (fillfactor = 5, autovacuum_enabled = TRUE)")
self.validate_identity(
"INSERT INTO newtable AS t(a, b, c) VALUES (1, 2, 3) ON CONFLICT(c) DO UPDATE SET a = t.a + 1 WHERE t.a < 1"
)
self.validate_identity(
"ALTER TABLE tested_table ADD CONSTRAINT unique_example UNIQUE (column_name) NOT VALID"
)

View file

@ -21,6 +21,7 @@ class TestSnowflake(Validator):
expr.selects[0].assert_is(exp.AggFunc)
self.assertEqual(expr.sql(dialect="snowflake"), "SELECT APPROX_TOP_K(C4, 3, 5) FROM t")
self.validate_identity("INSERT INTO test VALUES (x'48FAF43B0AFCEF9B63EE3A93EE2AC2')")
self.validate_identity("exclude := [foo]")
self.validate_identity("SELECT CAST([1, 2, 3] AS VECTOR(FLOAT, 3))")
self.validate_identity("SELECT CONNECT_BY_ROOT test AS test_column_alias")
@ -495,6 +496,7 @@ class TestSnowflake(Validator):
"snowflake": "SELECT BOOLAND_AGG(c1), BOOLAND_AGG(c2) FROM test",
"spark": "SELECT BOOL_AND(c1), BOOL_AND(c2) FROM test",
"sqlite": "SELECT MIN(c1), MIN(c2) FROM test",
"mysql": "SELECT MIN(c1), MIN(c2) FROM test",
},
)
for suffix in (
@ -2358,3 +2360,11 @@ SINGLE = TRUE""",
self.assertEqual(ast.sql("snowflake"), query)
self.assertEqual(len(list(ast.find_all(exp.Column))), 1)
self.assertEqual(window.this.sql("snowflake"), "db.schema.FUNC(a)")
def test_offset_without_limit(self):
self.validate_all(
"SELECT 1 ORDER BY 1 LIMIT NULL OFFSET 0",
read={
"trino": "SELECT 1 ORDER BY 1 OFFSET 0",
},
)

View file

@ -9,6 +9,8 @@ class TestSpark(Validator):
dialect = "spark"
def test_ddl(self):
self.validate_identity("DROP NAMESPACE my_catalog.my_namespace")
self.validate_identity("CREATE NAMESPACE my_catalog.my_namespace")
self.validate_identity("INSERT OVERWRITE TABLE db1.tb1 TABLE db2.tb2")
self.validate_identity("CREATE TABLE foo AS WITH t AS (SELECT 1 AS col) SELECT col FROM t")
self.validate_identity("CREATE TEMPORARY VIEW test AS SELECT 1")

View file

@ -92,6 +92,17 @@ class TestSQLite(Validator):
read={"snowflake": "LEAST(x, y, z)"},
write={"snowflake": "LEAST(x, y, z)"},
)
self.validate_all(
"UNICODE(x)",
write={
"": "UNICODE(x)",
"mysql": "ORD(CONVERT(x USING utf32))",
"oracle": "ASCII(UNISTR(x))",
"postgres": "ASCII(x)",
"redshift": "ASCII(x)",
"spark": "ASCII(x)",
},
)
self.validate_identity(
"SELECT * FROM station WHERE city IS NOT ''",
"SELECT * FROM station WHERE NOT city IS ''",

View file

@ -753,6 +753,16 @@ class TestTSQL(Validator):
},
)
self.validate_all(
"CREATE TABLE t (col1 DATETIME2(2))",
read={
"snowflake": "CREATE TABLE t (col1 TIMESTAMP_NTZ(2))",
},
write={
"tsql": "CREATE TABLE t (col1 DATETIME2(2))",
},
)
def test_types_bin(self):
self.validate_all(
"CAST(x as BIT)",
@ -1220,7 +1230,10 @@ WHERE
def test_datefromparts(self):
self.validate_all(
"SELECT DATEFROMPARTS('2020', 10, 01)",
write={"spark": "SELECT MAKE_DATE('2020', 10, 01)"},
write={
"spark": "SELECT MAKE_DATE('2020', 10, 01)",
"tsql": "SELECT DATEFROMPARTS('2020', 10, 01)",
},
)
def test_datename(self):
@ -2090,3 +2103,27 @@ FROM OPENJSON(@json) WITH (
"oracle": "SELECT NEXT VALUE FOR db.schema.sequence_name",
},
)
# string literals in the DATETRUNC are casted as DATETIME2
def test_datetrunc(self):
self.validate_all(
"SELECT DATETRUNC(month, 'foo')",
write={
"duckdb": "SELECT DATE_TRUNC('MONTH', CAST('foo' AS TIMESTAMP))",
"tsql": "SELECT DATETRUNC(MONTH, CAST('foo' AS DATETIME2))",
},
)
self.validate_all(
"SELECT DATETRUNC(month, foo)",
write={
"duckdb": "SELECT DATE_TRUNC('MONTH', foo)",
"tsql": "SELECT DATETRUNC(MONTH, foo)",
},
)
self.validate_all(
"SELECT DATETRUNC(year, CAST('foo1' AS date))",
write={
"duckdb": "SELECT DATE_TRUNC('YEAR', CAST('foo1' AS DATE))",
"tsql": "SELECT DATETRUNC(YEAR, CAST('foo1' AS DATE))",
},
)