1
0
Fork 0

Merging upstream version 11.7.1.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-02-13 15:52:09 +01:00
parent 0c053462ae
commit 8d96084fad
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
144 changed files with 44104 additions and 39367 deletions

View file

@ -70,8 +70,8 @@ class TestHive(Validator):
self.validate_all(
"1s",
write={
"duckdb": "CAST(1 AS SMALLINT)",
"presto": "CAST(1 AS SMALLINT)",
"duckdb": "TRY_CAST(1 AS SMALLINT)",
"presto": "TRY_CAST(1 AS SMALLINT)",
"hive": "CAST(1 AS SMALLINT)",
"spark": "CAST(1 AS SHORT)",
},
@ -79,8 +79,8 @@ class TestHive(Validator):
self.validate_all(
"1S",
write={
"duckdb": "CAST(1 AS SMALLINT)",
"presto": "CAST(1 AS SMALLINT)",
"duckdb": "TRY_CAST(1 AS SMALLINT)",
"presto": "TRY_CAST(1 AS SMALLINT)",
"hive": "CAST(1 AS SMALLINT)",
"spark": "CAST(1 AS SHORT)",
},
@ -88,8 +88,8 @@ class TestHive(Validator):
self.validate_all(
"1Y",
write={
"duckdb": "CAST(1 AS TINYINT)",
"presto": "CAST(1 AS TINYINT)",
"duckdb": "TRY_CAST(1 AS TINYINT)",
"presto": "TRY_CAST(1 AS TINYINT)",
"hive": "CAST(1 AS TINYINT)",
"spark": "CAST(1 AS BYTE)",
},
@ -97,8 +97,8 @@ class TestHive(Validator):
self.validate_all(
"1L",
write={
"duckdb": "CAST(1 AS BIGINT)",
"presto": "CAST(1 AS BIGINT)",
"duckdb": "TRY_CAST(1 AS BIGINT)",
"presto": "TRY_CAST(1 AS BIGINT)",
"hive": "CAST(1 AS BIGINT)",
"spark": "CAST(1 AS LONG)",
},
@ -106,8 +106,8 @@ class TestHive(Validator):
self.validate_all(
"1.0bd",
write={
"duckdb": "CAST(1.0 AS DECIMAL)",
"presto": "CAST(1.0 AS DECIMAL)",
"duckdb": "TRY_CAST(1.0 AS DECIMAL)",
"presto": "TRY_CAST(1.0 AS DECIMAL)",
"hive": "CAST(1.0 AS DECIMAL)",
"spark": "CAST(1.0 AS DECIMAL)",
},
@ -148,6 +148,9 @@ class TestHive(Validator):
self.validate_identity(
"""CREATE EXTERNAL TABLE x (y INT) ROW FORMAT SERDE 'serde' ROW FORMAT DELIMITED FIELDS TERMINATED BY '1' WITH SERDEPROPERTIES ('input.regex'='')""",
)
self.validate_identity(
"""CREATE EXTERNAL TABLE `my_table` (`a7` ARRAY<DATE>) ROW FORMAT SERDE 'a' STORED AS INPUTFORMAT 'b' OUTPUTFORMAT 'c' LOCATION 'd' TBLPROPERTIES ('e'='f')"""
)
def test_lateral_view(self):
self.validate_all(
@ -318,6 +321,11 @@ class TestHive(Validator):
"": "TS_OR_DS_ADD('2020-01-01', 1 * -1, 'DAY')",
},
)
self.validate_all("DATE_ADD('2020-01-01', -1)", read={"": "DATE_SUB('2020-01-01', 1)"})
self.validate_all("DATE_ADD(a, b * -1)", read={"": "DATE_SUB(a, b)"})
self.validate_all(
"ADD_MONTHS('2020-01-01', -2)", read={"": "DATE_SUB('2020-01-01', 2, month)"}
)
self.validate_all(
"DATEDIFF(TO_DATE(y), x)",
write={
@ -504,11 +512,10 @@ class TestHive(Validator):
},
)
self.validate_all(
"SELECT * FROM x TABLESAMPLE(10) y",
"SELECT * FROM x TABLESAMPLE(10 PERCENT) y",
write={
"presto": "SELECT * FROM x AS y TABLESAMPLE (10)",
"hive": "SELECT * FROM x TABLESAMPLE (10) AS y",
"spark": "SELECT * FROM x TABLESAMPLE (10) AS y",
"hive": "SELECT * FROM x TABLESAMPLE (10 PERCENT) AS y",
"spark": "SELECT * FROM x TABLESAMPLE (10 PERCENT) AS y",
},
)
self.validate_all(
@ -650,25 +657,13 @@ class TestHive(Validator):
},
)
self.validate_all(
"SELECT * FROM x TABLESAMPLE (1) AS foo",
"SELECT * FROM x TABLESAMPLE (1 PERCENT) AS foo",
read={
"presto": "SELECT * FROM x AS foo TABLESAMPLE (1)",
"presto": "SELECT * FROM x AS foo TABLESAMPLE BERNOULLI (1)",
},
write={
"presto": "SELECT * FROM x AS foo TABLESAMPLE (1)",
"hive": "SELECT * FROM x TABLESAMPLE (1) AS foo",
"spark": "SELECT * FROM x TABLESAMPLE (1) AS foo",
},
)
self.validate_all(
"SELECT * FROM x TABLESAMPLE (1) AS foo",
read={
"presto": "SELECT * FROM x AS foo TABLESAMPLE (1)",
},
write={
"presto": "SELECT * FROM x AS foo TABLESAMPLE (1)",
"hive": "SELECT * FROM x TABLESAMPLE (1) AS foo",
"spark": "SELECT * FROM x TABLESAMPLE (1) AS foo",
"hive": "SELECT * FROM x TABLESAMPLE (1 PERCENT) AS foo",
"spark": "SELECT * FROM x TABLESAMPLE (1 PERCENT) AS foo",
},
)
self.validate_all(