1
0
Fork 0

Adding upstream version 20.1.0.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-02-13 21:16:46 +01:00
parent 6a89523da4
commit 5bd573dda1
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
127 changed files with 73384 additions and 73067 deletions

View file

@ -315,6 +315,7 @@ class TestHive(Validator):
self.validate_all(
"DATE_FORMAT('2020-01-01', 'yyyy-MM-dd HH:mm:ss')",
write={
"bigquery": "FORMAT_DATE('%Y-%m-%d %H:%M:%S', CAST('2020-01-01' AS DATETIME))",
"duckdb": "STRFTIME(CAST('2020-01-01' AS TIMESTAMP), '%Y-%m-%d %H:%M:%S')",
"presto": "DATE_FORMAT(CAST('2020-01-01' AS TIMESTAMP), '%Y-%m-%d %T')",
"hive": "DATE_FORMAT(CAST('2020-01-01' AS TIMESTAMP), 'yyyy-MM-dd HH:mm:ss')",
@ -324,21 +325,29 @@ class TestHive(Validator):
self.validate_all(
"DATE_ADD('2020-01-01', 1)",
write={
"": "TS_OR_DS_ADD('2020-01-01', 1, DAY)",
"bigquery": "DATE_ADD(CAST(CAST('2020-01-01' AS DATETIME) AS DATE), INTERVAL 1 DAY)",
"duckdb": "CAST('2020-01-01' AS DATE) + INTERVAL 1 DAY",
"presto": "DATE_ADD('DAY', 1, CAST(CAST('2020-01-01' AS TIMESTAMP) AS DATE))",
"hive": "DATE_ADD('2020-01-01', 1)",
"presto": "DATE_ADD('DAY', 1, CAST(CAST('2020-01-01' AS TIMESTAMP) AS DATE))",
"redshift": "DATEADD(DAY, 1, '2020-01-01')",
"snowflake": "DATEADD(DAY, 1, CAST(CAST('2020-01-01' AS TIMESTAMPNTZ) AS DATE))",
"spark": "DATE_ADD('2020-01-01', 1)",
"": "TS_OR_DS_ADD('2020-01-01', 1, 'DAY')",
"tsql": "DATEADD(DAY, 1, CAST(CAST('2020-01-01' AS DATETIME2) AS DATE))",
},
)
self.validate_all(
"DATE_SUB('2020-01-01', 1)",
write={
"": "TS_OR_DS_ADD('2020-01-01', 1 * -1, DAY)",
"bigquery": "DATE_ADD(CAST(CAST('2020-01-01' AS DATETIME) AS DATE), INTERVAL (1 * -1) DAY)",
"duckdb": "CAST('2020-01-01' AS DATE) + INTERVAL (1 * -1) DAY",
"presto": "DATE_ADD('DAY', 1 * -1, CAST(CAST('2020-01-01' AS TIMESTAMP) AS DATE))",
"hive": "DATE_ADD('2020-01-01', 1 * -1)",
"presto": "DATE_ADD('DAY', 1 * -1, CAST(CAST('2020-01-01' AS TIMESTAMP) AS DATE))",
"redshift": "DATEADD(DAY, 1 * -1, '2020-01-01')",
"snowflake": "DATEADD(DAY, 1 * -1, CAST(CAST('2020-01-01' AS TIMESTAMPNTZ) AS DATE))",
"spark": "DATE_ADD('2020-01-01', 1 * -1)",
"": "TS_OR_DS_ADD('2020-01-01', 1 * -1, 'DAY')",
"tsql": "DATEADD(DAY, 1 * -1, CAST(CAST('2020-01-01' AS DATETIME2) AS DATE))",
},
)
self.validate_all("DATE_ADD('2020-01-01', -1)", read={"": "DATE_SUB('2020-01-01', 1)"})
@ -351,8 +360,8 @@ class TestHive(Validator):
write={
"duckdb": "DATE_DIFF('day', CAST(x AS DATE), CAST(CAST(y AS DATE) AS DATE))",
"presto": "DATE_DIFF('day', CAST(CAST(x AS TIMESTAMP) AS DATE), CAST(CAST(CAST(CAST(y AS TIMESTAMP) AS DATE) AS TIMESTAMP) AS DATE))",
"hive": "DATEDIFF(TO_DATE(TO_DATE(y)), TO_DATE(x))",
"spark": "DATEDIFF(TO_DATE(TO_DATE(y)), TO_DATE(x))",
"hive": "DATEDIFF(TO_DATE(y), TO_DATE(x))",
"spark": "DATEDIFF(TO_DATE(y), TO_DATE(x))",
"": "DATEDIFF(TS_OR_DS_TO_DATE(TS_OR_DS_TO_DATE(y)), TS_OR_DS_TO_DATE(x))",
},
)
@ -522,11 +531,16 @@ class TestHive(Validator):
)
self.validate_all(
"ARRAY_CONTAINS(x, 1)",
read={
"duckdb": "LIST_HAS(x, 1)",
"snowflake": "ARRAY_CONTAINS(1, x)",
},
write={
"duckdb": "ARRAY_CONTAINS(x, 1)",
"presto": "CONTAINS(x, 1)",
"hive": "ARRAY_CONTAINS(x, 1)",
"spark": "ARRAY_CONTAINS(x, 1)",
"snowflake": "ARRAY_CONTAINS(1, x)",
},
)
self.validate_all(
@ -687,7 +701,7 @@ class TestHive(Validator):
"x div y",
write={
"duckdb": "x // y",
"presto": "CAST(x / y AS INTEGER)",
"presto": "CAST(CAST(x AS DOUBLE) / y AS INTEGER)",
"hive": "CAST(x / y AS INT)",
"spark": "CAST(x / y AS INT)",
},
@ -707,11 +721,15 @@ class TestHive(Validator):
self.validate_all(
"COLLECT_SET(x)",
read={
"doris": "COLLECT_SET(x)",
"presto": "SET_AGG(x)",
"snowflake": "ARRAY_UNIQUE_AGG(x)",
},
write={
"presto": "SET_AGG(x)",
"doris": "COLLECT_SET(x)",
"hive": "COLLECT_SET(x)",
"presto": "SET_AGG(x)",
"snowflake": "ARRAY_UNIQUE_AGG(x)",
"spark": "COLLECT_SET(x)",
},
)