1
0
Fork 0

Adding upstream version 20.4.0.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-02-13 21:18:16 +01:00
parent fd9de5e4cb
commit 943dfc0887
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
20 changed files with 562 additions and 52 deletions

View file

@ -206,6 +206,7 @@ class TestBigQuery(Validator):
"duckdb": "SELECT * FROM t WHERE EXISTS(SELECT * FROM UNNEST(nums) AS _t(x) WHERE x > 1)",
},
)
self.validate_identity("UPDATE x SET y = NULL")
self.validate_all(
"NULL",
read={

View file

@ -47,6 +47,7 @@ class TestClickhouse(Validator):
self.validate_identity("SELECT INTERVAL t.days day")
self.validate_identity("SELECT match('abc', '([a-z]+)')")
self.validate_identity("dictGet(x, 'y')")
self.validate_identity("SELECT * FROM final")
self.validate_identity("SELECT * FROM x FINAL")
self.validate_identity("SELECT * FROM x AS y FINAL")
self.validate_identity("'a' IN mapKeys(map('a', 1, 'b', 2))")

View file

@ -2056,3 +2056,40 @@ SELECT
self.assertEqual(expression.sql(dialect="mysql"), expected_sql)
self.assertEqual(expression.sql(dialect="tsql"), expected_sql)
def test_random(self):
self.validate_all(
"RAND()",
write={
"bigquery": "RAND()",
"clickhouse": "randCanonical()",
"databricks": "RAND()",
"doris": "RAND()",
"drill": "RAND()",
"duckdb": "RANDOM()",
"hive": "RAND()",
"mysql": "RAND()",
"oracle": "RAND()",
"postgres": "RANDOM()",
"presto": "RAND()",
"spark": "RAND()",
"sqlite": "RANDOM()",
"tsql": "RAND()",
},
read={
"bigquery": "RAND()",
"clickhouse": "randCanonical()",
"databricks": "RAND()",
"doris": "RAND()",
"drill": "RAND()",
"duckdb": "RANDOM()",
"hive": "RAND()",
"mysql": "RAND()",
"oracle": "RAND()",
"postgres": "RANDOM()",
"presto": "RAND()",
"spark": "RAND()",
"sqlite": "RANDOM()",
"tsql": "RAND()",
},
)

View file

@ -546,13 +546,21 @@ class TestPresto(Validator):
def test_unicode_string(self):
for prefix in ("u&", "U&"):
self.validate_identity(
self.validate_all(
f"{prefix}'Hello winter \\2603 !'",
"U&'Hello winter \\2603 !'",
write={
"presto": "U&'Hello winter \\2603 !'",
"snowflake": "'Hello winter \\u2603 !'",
"spark": "'Hello winter \\u2603 !'",
},
)
self.validate_identity(
self.validate_all(
f"{prefix}'Hello winter #2603 !' UESCAPE '#'",
"U&'Hello winter #2603 !' UESCAPE '#'",
write={
"presto": "U&'Hello winter #2603 !' UESCAPE '#'",
"snowflake": "'Hello winter \\u2603 !'",
"spark": "'Hello winter \\u2603 !'",
},
)
def test_presto(self):

View file

@ -696,6 +696,18 @@ x <> 1;
NOT 1 <> x;
x = 1;
x > CAST('2024-01-01' AS DATE) OR x > CAST('2023-12-31' AS DATE);
x > CAST('2023-12-31' AS DATE);
CAST(x AS DATE) > CAST('2024-01-01' AS DATE) OR CAST(x AS DATE) > CAST('2023-12-31' AS DATE);
CAST(x AS DATE) > CAST('2023-12-31' AS DATE);
FUN() > 0 OR FUN() > 1;
FUN() > 0;
RAND() > 0 OR RAND() > 1;
RAND() > 0 OR RAND() > 1;
--------------------------------------
-- COALESCE
--------------------------------------
@ -835,7 +847,7 @@ DATE_TRUNC('quarter', x) = CAST('2021-01-02' AS DATE);
DATE_TRUNC('quarter', x) = CAST('2021-01-02' AS DATE);
DATE_TRUNC('year', x) <> CAST('2021-01-01' AS DATE);
x < CAST('2021-01-01' AS DATE) AND x >= CAST('2022-01-01' AS DATE);
FALSE;
-- Always true, except for nulls
DATE_TRUNC('year', x) <> CAST('2021-01-02' AS DATE);