1
0
Fork 0

Merging upstream version 26.17.1.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-05-12 06:51:01 +02:00
parent 85ae79ba39
commit 7a05a01838
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
63 changed files with 16004 additions and 15816 deletions

View file

@ -236,6 +236,10 @@ LANGUAGE js AS
self.validate_identity(
"CREATE OR REPLACE VIEW test (tenant_id OPTIONS (description='Test description on table creation')) AS SELECT 1 AS tenant_id, 1 AS customer_id",
)
self.validate_identity(
'SELECT r"\\t"',
"SELECT '\\\\t'",
)
self.validate_identity(
"ARRAY(SELECT AS STRUCT e.x AS y, e.z AS bla FROM UNNEST(bob))::ARRAY<STRUCT<y STRING, bro NUMERIC>>",
"CAST(ARRAY(SELECT AS STRUCT e.x AS y, e.z AS bla FROM UNNEST(bob)) AS ARRAY<STRUCT<y STRING, bro NUMERIC>>)",
@ -1023,8 +1027,8 @@ LANGUAGE js AS
r'r"""/\*.*\*/"""',
write={
"bigquery": r"'/\\*.*\\*/'",
"duckdb": r"'/\\*.*\\*/'",
"presto": r"'/\\*.*\\*/'",
"duckdb": r"'/\*.*\*/'",
"presto": r"'/\*.*\*/'",
"hive": r"'/\\*.*\\*/'",
"spark": r"'/\\*.*\\*/'",
},
@ -1033,8 +1037,8 @@ LANGUAGE js AS
r'R"""/\*.*\*/"""',
write={
"bigquery": r"'/\\*.*\\*/'",
"duckdb": r"'/\\*.*\\*/'",
"presto": r"'/\\*.*\\*/'",
"duckdb": r"'/\*.*\*/'",
"presto": r"'/\*.*\*/'",
"hive": r"'/\\*.*\\*/'",
"spark": r"'/\\*.*\\*/'",
},

View file

@ -167,6 +167,7 @@ class TestClickhouse(Validator):
self.validate_identity(
"CREATE MATERIALIZED VIEW test_view ON CLUSTER '{cluster}' (id UInt8) ENGINE=AggregatingMergeTree() ORDER BY tuple() AS SELECT * FROM test_data"
)
self.validate_identity("CREATE TABLE test (id UInt8) ENGINE=Null()")
self.validate_identity(
"CREATE MATERIALIZED VIEW test_view ON CLUSTER cl1 TO table1 AS SELECT * FROM test_data"
)

View file

@ -338,3 +338,8 @@ class TestDatabricks(Validator):
self.validate_identity(
"ANALYZE TABLE ctlg.db.tbl PARTITION(foo = 'foo', bar = 'bar') COMPUTE STATISTICS NOSCAN"
)
def test_udf_environment_property(self):
self.validate_identity(
"""CREATE FUNCTION a() ENVIRONMENT (dependencies = '["foo1==1", "foo2==2"]', environment_version = 'None')"""
)

View file

@ -428,6 +428,7 @@ class TestDuckDB(Validator):
"SELECT STRFTIME(CAST('2020-01-01' AS TIMESTAMP), CONCAT('%Y', '%m'))",
write={
"duckdb": "SELECT STRFTIME(CAST('2020-01-01' AS TIMESTAMP), CONCAT('%Y', '%m'))",
"spark": "SELECT DATE_FORMAT(CAST('2020-01-01' AS TIMESTAMP_NTZ), CONCAT(COALESCE('yyyy', ''), COALESCE('MM', '')))",
"tsql": "SELECT FORMAT(CAST('2020-01-01' AS DATETIME2), CONCAT('yyyy', 'MM'))",
},
)
@ -1110,6 +1111,28 @@ class TestDuckDB(Validator):
},
)
self.validate_all(
"SELECT TIMESTAMP 'foo'",
write={
"duckdb": "SELECT CAST('foo' AS TIMESTAMP)",
"hive": "SELECT CAST('foo' AS TIMESTAMP)",
"spark2": "SELECT CAST('foo' AS TIMESTAMP)",
"spark": "SELECT CAST('foo' AS TIMESTAMP_NTZ)",
"postgres": "SELECT CAST('foo' AS TIMESTAMP)",
"mysql": "SELECT CAST('foo' AS DATETIME)",
"clickhouse": "SELECT CAST('foo' AS Nullable(DateTime))",
"databricks": "SELECT CAST('foo' AS TIMESTAMP_NTZ)",
"snowflake": "SELECT CAST('foo' AS TIMESTAMPNTZ)",
"redshift": "SELECT CAST('foo' AS TIMESTAMP)",
"tsql": "SELECT CAST('foo' AS DATETIME2)",
"presto": "SELECT CAST('foo' AS TIMESTAMP)",
"trino": "SELECT CAST('foo' AS TIMESTAMP)",
"oracle": "SELECT CAST('foo' AS TIMESTAMP)",
"bigquery": "SELECT CAST('foo' AS DATETIME)",
"starrocks": "SELECT CAST('foo' AS DATETIME)",
},
)
def test_sample(self):
self.validate_identity(
"SELECT * FROM tbl USING SAMPLE 5",

View file

@ -1311,7 +1311,7 @@ COMMENT='客户账户表'"""
def test_timestamp_trunc(self):
hive_dialects = ("spark", "databricks")
for dialect in ("postgres", "snowflake", "duckdb", *hive_dialects):
for dialect in ("postgres", "snowflake", *hive_dialects):
for unit in (
"SECOND",
"DAY",

View file

@ -1022,6 +1022,39 @@ class TestSnowflake(Validator):
},
)
self.validate_all(
"DAYOFWEEKISO(foo)",
read={
"presto": "DAY_OF_WEEK(foo)",
"trino": "DAY_OF_WEEK(foo)",
},
write={
"snowflake": "DAYOFWEEKISO(foo)",
},
)
self.validate_all(
"DAYOFWEEKISO(foo)",
read={
"presto": "DOW(foo)",
"trino": "DOW(foo)",
},
write={
"snowflake": "DAYOFWEEKISO(foo)",
},
)
self.validate_all(
"DAYOFYEAR(foo)",
read={
"presto": "DOY(foo)",
"trino": "DOY(foo)",
},
write={
"snowflake": "DAYOFYEAR(foo)",
},
)
def test_null_treatment(self):
self.validate_all(
r"SELECT FIRST_VALUE(TABLE1.COLUMN1) OVER (PARTITION BY RANDOM_COLUMN1, RANDOM_COLUMN2 ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS MY_ALIAS FROM TABLE1",

View file

@ -1062,6 +1062,16 @@ WHERE
"tsql": "ALTER TABLE a ADD b INTEGER, c INTEGER",
},
)
self.validate_all(
"ALTER TABLE a ALTER COLUMN b INTEGER",
read={
"": "ALTER TABLE a ALTER COLUMN b INT",
},
write={
"": "ALTER TABLE a ALTER COLUMN b SET DATA TYPE INT",
"tsql": "ALTER TABLE a ALTER COLUMN b INTEGER",
},
)
self.validate_all(
"CREATE TABLE #mytemp (a INTEGER, b CHAR(2), c TIME(4), d FLOAT(24))",
write={