1
0
Fork 0

Merging upstream version 25.16.1.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-02-13 21:52:32 +01:00
parent 7688e2bdf8
commit bad79d1f7c
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
110 changed files with 75353 additions and 68092 deletions

View file

@ -129,6 +129,16 @@ TBLPROPERTIES (
"spark": "ALTER TABLE StudentInfo DROP COLUMNS (LastName, DOB)",
},
)
self.validate_identity("ALTER VIEW StudentInfoView AS SELECT * FROM StudentInfo")
self.validate_identity("ALTER VIEW StudentInfoView AS SELECT LastName FROM StudentInfo")
self.validate_identity("ALTER VIEW StudentInfoView RENAME TO StudentInfoViewRenamed")
self.validate_identity(
"ALTER VIEW StudentInfoView SET TBLPROPERTIES ('key1'='val1', 'key2'='val2')"
)
self.validate_identity(
"ALTER VIEW StudentInfoView UNSET TBLPROPERTIES ('key1', 'key2')",
check_command_warning=True,
)
def test_to_date(self):
self.validate_all(
@ -297,6 +307,13 @@ TBLPROPERTIES (
},
)
self.validate_all(
"SELECT DATE_FORMAT(DATE '2020-01-01', 'EEEE') AS weekday",
write={
"presto": "SELECT DATE_FORMAT(CAST(CAST('2020-01-01' AS DATE) AS TIMESTAMP), '%W') AS weekday",
"spark": "SELECT DATE_FORMAT(CAST(CAST('2020-01-01' AS DATE) AS TIMESTAMP), 'EEEE') AS weekday",
},
)
self.validate_all(
"SELECT TRY_ELEMENT_AT(MAP(1, 'a', 2, 'b'), 2)",
read={
@ -557,7 +574,10 @@ TBLPROPERTIES (
)
self.validate_all(
"CAST(x AS TIMESTAMP)", read={"trino": "CAST(x AS TIMESTAMP(6) WITH TIME ZONE)"}
"CAST(x AS TIMESTAMP)",
read={
"trino": "CAST(x AS TIMESTAMP(6) WITH TIME ZONE)",
},
)
self.validate_all(
"SELECT DATE_ADD(my_date_column, 1)",
@ -688,6 +708,7 @@ TBLPROPERTIES (
"trino": "SELECT DATE_ADD('MONTH', 20, col)",
},
)
self.validate_identity("DESCRIBE schema.test PARTITION(ds = '2024-01-01')")
def test_bool_or(self):
self.validate_all(
@ -805,8 +826,22 @@ TBLPROPERTIES (
self.assertEqual(query.sql(name), without_modifiers)
def test_schema_binding_options(self):
for schema_binding in ("BINDING", "COMPENSATION", "TYPE EVOLUTION", "EVOLUTION"):
for schema_binding in (
"BINDING",
"COMPENSATION",
"TYPE EVOLUTION",
"EVOLUTION",
):
with self.subTest(f"Test roundtrip of VIEW schema binding {schema_binding}"):
self.validate_identity(
f"CREATE VIEW emp_v WITH SCHEMA {schema_binding} AS SELECT * FROM emp"
)
def test_minus(self):
self.validate_all(
"SELECT * FROM db.table1 MINUS SELECT * FROM db.table2",
write={
"spark": "SELECT * FROM db.table1 EXCEPT SELECT * FROM db.table2",
"databricks": "SELECT * FROM db.table1 EXCEPT SELECT * FROM db.table2",
},
)