1
0
Fork 0

Merging upstream version 12.2.0.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-02-13 15:53:39 +01:00
parent fffa0d5761
commit 62b2b24d3b
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
100 changed files with 35022 additions and 30936 deletions

View file

@ -215,19 +215,37 @@ TBLPROPERTIES (
self.validate_identity("SPLIT(str, pattern, lim)")
self.validate_all(
"BOOLEAN(x)",
write={
"": "CAST(x AS BOOLEAN)",
"spark": "CAST(x AS BOOLEAN)",
"SELECT * FROM produce PIVOT(SUM(produce.sales) FOR quarter IN ('Q1', 'Q2'))",
read={
"snowflake": "SELECT * FROM produce PIVOT (SUM(produce.sales) FOR produce.quarter IN ('Q1', 'Q2'))",
},
)
self.validate_all(
"INT(x)",
write={
"": "CAST(x AS INT)",
"spark": "CAST(x AS INT)",
"SELECT * FROM produce AS p PIVOT(SUM(p.sales) AS sales FOR quarter IN ('Q1' AS Q1, 'Q2' AS Q1))",
read={
"bigquery": "SELECT * FROM produce AS p PIVOT(SUM(p.sales) AS sales FOR p.quarter IN ('Q1' AS Q1, 'Q2' AS Q1))",
},
)
self.validate_all(
"SELECT DATEDIFF(MONTH, '2020-01-01', '2020-03-05')",
write={
"databricks": "SELECT DATEDIFF(MONTH, TO_DATE('2020-01-01'), TO_DATE('2020-03-05'))",
"hive": "SELECT MONTHS_BETWEEN(TO_DATE('2020-03-05'), TO_DATE('2020-01-01'))",
"presto": "SELECT DATE_DIFF('MONTH', CAST(SUBSTR(CAST('2020-01-01' AS VARCHAR), 1, 10) AS DATE), CAST(SUBSTR(CAST('2020-03-05' AS VARCHAR), 1, 10) AS DATE))",
"spark": "SELECT DATEDIFF(MONTH, TO_DATE('2020-01-01'), TO_DATE('2020-03-05'))",
"spark2": "SELECT MONTHS_BETWEEN(TO_DATE('2020-03-05'), TO_DATE('2020-01-01'))",
"trino": "SELECT DATE_DIFF('MONTH', CAST(SUBSTR(CAST('2020-01-01' AS VARCHAR), 1, 10) AS DATE), CAST(SUBSTR(CAST('2020-03-05' AS VARCHAR), 1, 10) AS DATE))",
},
)
for data_type in ("BOOLEAN", "DATE", "DOUBLE", "FLOAT", "INT", "TIMESTAMP"):
self.validate_all(
f"{data_type}(x)",
write={
"": f"CAST(x AS {data_type})",
"spark": f"CAST(x AS {data_type})",
},
)
self.validate_all(
"STRING(x)",
write={
@ -235,20 +253,7 @@ TBLPROPERTIES (
"spark": "CAST(x AS STRING)",
},
)
self.validate_all(
"DATE(x)",
write={
"": "CAST(x AS DATE)",
"spark": "CAST(x AS DATE)",
},
)
self.validate_all(
"TIMESTAMP(x)",
write={
"": "CAST(x AS TIMESTAMP)",
"spark": "CAST(x AS TIMESTAMP)",
},
)
self.validate_all(
"CAST(x AS TIMESTAMP)", read={"trino": "CAST(x AS TIMESTAMP(6) WITH TIME ZONE)"}
)