1
0
Fork 0

Adding upstream version 15.0.0.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-02-13 15:56:32 +01:00
parent 70d5d3451a
commit bb75596aa9
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
167 changed files with 58268 additions and 51337 deletions

View file

@ -4,6 +4,17 @@ from tests.dialects.test_dialect import Validator
class TestHive(Validator):
dialect = "hive"
def test_hive(self):
self.validate_identity("SELECT * FROM test DISTRIBUTE BY y SORT BY x DESC ORDER BY l")
self.validate_identity(
"SELECT * FROM test WHERE RAND() <= 0.1 DISTRIBUTE BY RAND() SORT BY RAND()"
)
self.validate_identity("(SELECT 1 UNION SELECT 2) DISTRIBUTE BY z")
self.validate_identity("(SELECT 1 UNION SELECT 2) DISTRIBUTE BY z SORT BY x")
self.validate_identity("(SELECT 1 UNION SELECT 2) CLUSTER BY y DESC")
self.validate_identity("SELECT * FROM test CLUSTER BY y")
self.validate_identity("(SELECT 1 UNION SELECT 2) SORT BY z")
def test_bits(self):
self.validate_all(
"x & 1",
@ -362,7 +373,7 @@ class TestHive(Validator):
self.validate_all(
"SELECT fname, lname, age FROM person ORDER BY age DESC NULLS FIRST, fname ASC NULLS LAST, lname",
write={
"duckdb": "SELECT fname, lname, age FROM person ORDER BY age DESC NULLS FIRST, fname NULLS LAST, lname",
"duckdb": "SELECT fname, lname, age FROM person ORDER BY age DESC NULLS FIRST, fname, lname NULLS FIRST",
"presto": "SELECT fname, lname, age FROM person ORDER BY age DESC NULLS FIRST, fname, lname NULLS FIRST",
"hive": "SELECT fname, lname, age FROM person ORDER BY age DESC NULLS FIRST, fname NULLS LAST, lname",
"spark": "SELECT fname, lname, age FROM person ORDER BY age DESC NULLS FIRST, fname NULLS LAST, lname",
@ -512,10 +523,10 @@ class TestHive(Validator):
},
)
self.validate_all(
"SELECT * FROM x TABLESAMPLE(10 PERCENT) y",
"SELECT * FROM x.z TABLESAMPLE(10 PERCENT) y",
write={
"hive": "SELECT * FROM x TABLESAMPLE (10 PERCENT) AS y",
"spark": "SELECT * FROM x TABLESAMPLE (10 PERCENT) AS y",
"hive": "SELECT * FROM x.z TABLESAMPLE (10 PERCENT) AS y",
"spark": "SELECT * FROM x.z TABLESAMPLE (10 PERCENT) AS y",
},
)
self.validate_all(
@ -548,6 +559,12 @@ class TestHive(Validator):
"spark": "GET_JSON_OBJECT(x, '$.name')",
},
)
self.validate_all(
"STRUCT(a = b, c = d)",
read={
"snowflake": "OBJECT_CONSTRUCT(a, b, c, d)",
},
)
self.validate_all(
"MAP(a, b, c, d)",
read={
@ -557,7 +574,6 @@ class TestHive(Validator):
"hive": "MAP(a, b, c, d)",
"presto": "MAP(ARRAY[a, c], ARRAY[b, d])",
"spark": "MAP(a, b, c, d)",
"snowflake": "OBJECT_CONSTRUCT(a, b, c, d)",
},
write={
"": "MAP(ARRAY(a, c), ARRAY(b, d))",
@ -627,7 +643,7 @@ class TestHive(Validator):
self.validate_all(
"x div y",
write={
"duckdb": "CAST(x / y AS INT)",
"duckdb": "x // y",
"presto": "CAST(x / y AS INTEGER)",
"hive": "CAST(x / y AS INT)",
"spark": "CAST(x / y AS INT)",