1
0
Fork 0

Merging upstream version 23.16.0.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-02-13 21:35:32 +01:00
parent d0f42f708a
commit 213191b8e3
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
93 changed files with 64106 additions and 59061 deletions

View file

@ -20,6 +20,13 @@ class TestBigQuery(Validator):
maxDiff = None
def test_bigquery(self):
self.validate_identity(
"""CREATE TEMPORARY FUNCTION FOO()
RETURNS STRING
LANGUAGE js AS
'return "Hello world!"'""",
pretty=True,
)
self.validate_identity(
"[a, a(1, 2,3,4444444444444444, tttttaoeunthaoentuhaoentuheoantu, toheuntaoheutnahoeunteoahuntaoeh), b(3, 4,5), c, d, tttttttttttttttteeeeeeeeeeeeeett, 12312312312]",
"""[
@ -278,6 +285,13 @@ class TestBigQuery(Validator):
r"REGEXP_EXTRACT(svc_plugin_output, '\\\\\\((.*)')",
)
self.validate_all(
"SELECT t.c1, h.c2, s.c3 FROM t1 AS t, UNNEST(t.t2) AS h, UNNEST(h.t3) AS s",
write={
"bigquery": "SELECT t.c1, h.c2, s.c3 FROM t1 AS t, UNNEST(t.t2) AS h, UNNEST(h.t3) AS s",
"duckdb": "SELECT t.c1, h.c2, s.c3 FROM t1 AS t, UNNEST(t.t2) AS _t0(h), UNNEST(h.t3) AS _t1(s)",
},
)
self.validate_all(
"PARSE_TIMESTAMP('%Y-%m-%dT%H:%M:%E6S%z', x)",
write={
@ -289,7 +303,7 @@ class TestBigQuery(Validator):
"SELECT results FROM Coordinates, Coordinates.position AS results",
write={
"bigquery": "SELECT results FROM Coordinates, UNNEST(Coordinates.position) AS results",
"presto": "SELECT results FROM Coordinates, UNNEST(Coordinates.position) AS _t(results)",
"presto": "SELECT results FROM Coordinates, UNNEST(Coordinates.position) AS _t0(results)",
},
)
self.validate_all(
@ -307,7 +321,7 @@ class TestBigQuery(Validator):
},
write={
"bigquery": "SELECT results FROM Coordinates AS c, UNNEST(c.position) AS results",
"presto": "SELECT results FROM Coordinates AS c, UNNEST(c.position) AS _t(results)",
"presto": "SELECT results FROM Coordinates AS c, UNNEST(c.position) AS _t0(results)",
"redshift": "SELECT results FROM Coordinates AS c, c.position AS results",
},
)
@ -525,7 +539,7 @@ class TestBigQuery(Validator):
"SELECT * FROM t WHERE EXISTS(SELECT * FROM unnest(nums) AS x WHERE x > 1)",
write={
"bigquery": "SELECT * FROM t WHERE EXISTS(SELECT * FROM UNNEST(nums) AS x WHERE x > 1)",
"duckdb": "SELECT * FROM t WHERE EXISTS(SELECT * FROM UNNEST(nums) AS _t(x) WHERE x > 1)",
"duckdb": "SELECT * FROM t WHERE EXISTS(SELECT * FROM UNNEST(nums) AS _t0(x) WHERE x > 1)",
},
)
self.validate_all(
@ -617,13 +631,88 @@ class TestBigQuery(Validator):
"mysql": "SELECT DATE_SUB(TIMESTAMP('2008-12-25 15:30:00+00'), INTERVAL 10 MINUTE)",
},
)
self.validate_all(
"LOWER(TO_HEX(x))",
write={
"": "LOWER(HEX(x))",
"bigquery": "TO_HEX(x)",
"clickhouse": "LOWER(HEX(x))",
"duckdb": "LOWER(HEX(x))",
"hive": "LOWER(HEX(x))",
"mysql": "LOWER(HEX(x))",
"spark": "LOWER(HEX(x))",
"sqlite": "LOWER(HEX(x))",
"presto": "LOWER(TO_HEX(x))",
"trino": "LOWER(TO_HEX(x))",
},
)
self.validate_all(
"TO_HEX(x)",
read={
"": "LOWER(HEX(x))",
"clickhouse": "LOWER(HEX(x))",
"duckdb": "LOWER(HEX(x))",
"hive": "LOWER(HEX(x))",
"mysql": "LOWER(HEX(x))",
"spark": "LOWER(HEX(x))",
"sqlite": "LOWER(HEX(x))",
"presto": "LOWER(TO_HEX(x))",
"trino": "LOWER(TO_HEX(x))",
},
write={
"": "LOWER(HEX(x))",
"bigquery": "TO_HEX(x)",
"clickhouse": "LOWER(HEX(x))",
"duckdb": "LOWER(HEX(x))",
"hive": "LOWER(HEX(x))",
"mysql": "LOWER(HEX(x))",
"presto": "LOWER(TO_HEX(x))",
"spark": "LOWER(HEX(x))",
"sqlite": "LOWER(HEX(x))",
"trino": "LOWER(TO_HEX(x))",
},
)
self.validate_all(
"UPPER(TO_HEX(x))",
read={
"": "HEX(x)",
"clickhouse": "HEX(x)",
"duckdb": "HEX(x)",
"hive": "HEX(x)",
"mysql": "HEX(x)",
"presto": "TO_HEX(x)",
"spark": "HEX(x)",
"sqlite": "HEX(x)",
"trino": "TO_HEX(x)",
},
write={
"": "HEX(x)",
"bigquery": "UPPER(TO_HEX(x))",
"clickhouse": "HEX(x)",
"duckdb": "HEX(x)",
"hive": "HEX(x)",
"mysql": "HEX(x)",
"presto": "TO_HEX(x)",
"spark": "HEX(x)",
"sqlite": "HEX(x)",
"trino": "TO_HEX(x)",
},
)
self.validate_all(
"MD5(x)",
read={
"clickhouse": "MD5(x)",
"presto": "MD5(x)",
"trino": "MD5(x)",
},
write={
"": "MD5_DIGEST(x)",
"bigquery": "MD5(x)",
"clickhouse": "MD5(x)",
"hive": "UNHEX(MD5(x))",
"presto": "MD5(x)",
"spark": "UNHEX(MD5(x))",
"trino": "MD5(x)",
},
)
self.validate_all(
@ -631,25 +720,69 @@ class TestBigQuery(Validator):
read={
"duckdb": "SELECT MD5(some_string)",
"spark": "SELECT MD5(some_string)",
"clickhouse": "SELECT LOWER(HEX(MD5(some_string)))",
"presto": "SELECT LOWER(TO_HEX(MD5(some_string)))",
"trino": "SELECT LOWER(TO_HEX(MD5(some_string)))",
},
write={
"": "SELECT MD5(some_string)",
"bigquery": "SELECT TO_HEX(MD5(some_string))",
"duckdb": "SELECT MD5(some_string)",
"clickhouse": "SELECT LOWER(HEX(MD5(some_string)))",
"presto": "SELECT LOWER(TO_HEX(MD5(some_string)))",
"trino": "SELECT LOWER(TO_HEX(MD5(some_string)))",
},
)
self.validate_all(
"SHA1(x)",
read={
"clickhouse": "SHA1(x)",
"presto": "SHA1(x)",
"trino": "SHA1(x)",
},
write={
"clickhouse": "SHA1(x)",
"bigquery": "SHA1(x)",
"": "SHA(x)",
"presto": "SHA1(x)",
"trino": "SHA1(x)",
},
)
self.validate_all(
"SHA1(x)",
write={
"bigquery": "SHA1(x)",
"": "SHA(x)",
},
)
self.validate_all(
"SHA256(x)",
read={
"clickhouse": "SHA256(x)",
"presto": "SHA256(x)",
"trino": "SHA256(x)",
},
write={
"bigquery": "SHA256(x)",
"spark2": "SHA2(x, 256)",
"clickhouse": "SHA256(x)",
"presto": "SHA256(x)",
"trino": "SHA256(x)",
},
)
self.validate_all(
"SHA512(x)",
read={
"clickhouse": "SHA512(x)",
"presto": "SHA512(x)",
"trino": "SHA512(x)",
},
write={
"clickhouse": "SHA512(x)",
"bigquery": "SHA512(x)",
"spark2": "SHA2(x, 512)",
"presto": "SHA512(x)",
"trino": "SHA512(x)",
},
)
self.validate_all(
@ -860,8 +993,8 @@ class TestBigQuery(Validator):
},
write={
"bigquery": "SELECT * FROM UNNEST(['7', '14']) AS x",
"presto": "SELECT * FROM UNNEST(ARRAY['7', '14']) AS _t(x)",
"spark": "SELECT * FROM UNNEST(ARRAY('7', '14')) AS _t(x)",
"presto": "SELECT * FROM UNNEST(ARRAY['7', '14']) AS _t0(x)",
"spark": "SELECT * FROM UNNEST(ARRAY('7', '14')) AS _t0(x)",
},
)
self.validate_all(
@ -981,6 +1114,69 @@ class TestBigQuery(Validator):
"bigquery": "CREATE TABLE db.example_table (x INT64) PARTITION BY x CLUSTER BY x",
},
)
self.validate_all(
"DELETE db.example_table WHERE x = 1",
write={
"bigquery": "DELETE db.example_table WHERE x = 1",
"presto": "DELETE FROM db.example_table WHERE x = 1",
},
)
self.validate_all(
"DELETE db.example_table tb WHERE tb.x = 1",
write={
"bigquery": "DELETE db.example_table AS tb WHERE tb.x = 1",
"presto": "DELETE FROM db.example_table WHERE x = 1",
},
)
self.validate_all(
"DELETE db.example_table AS tb WHERE tb.x = 1",
write={
"bigquery": "DELETE db.example_table AS tb WHERE tb.x = 1",
"presto": "DELETE FROM db.example_table WHERE x = 1",
},
)
self.validate_all(
"DELETE FROM db.example_table WHERE x = 1",
write={
"bigquery": "DELETE FROM db.example_table WHERE x = 1",
"presto": "DELETE FROM db.example_table WHERE x = 1",
},
)
self.validate_all(
"DELETE FROM db.example_table tb WHERE tb.x = 1",
write={
"bigquery": "DELETE FROM db.example_table AS tb WHERE tb.x = 1",
"presto": "DELETE FROM db.example_table WHERE x = 1",
},
)
self.validate_all(
"DELETE FROM db.example_table AS tb WHERE tb.x = 1",
write={
"bigquery": "DELETE FROM db.example_table AS tb WHERE tb.x = 1",
"presto": "DELETE FROM db.example_table WHERE x = 1",
},
)
self.validate_all(
"DELETE FROM db.example_table AS tb WHERE example_table.x = 1",
write={
"bigquery": "DELETE FROM db.example_table AS tb WHERE example_table.x = 1",
"presto": "DELETE FROM db.example_table WHERE x = 1",
},
)
self.validate_all(
"DELETE FROM db.example_table WHERE example_table.x = 1",
write={
"bigquery": "DELETE FROM db.example_table WHERE example_table.x = 1",
"presto": "DELETE FROM db.example_table WHERE example_table.x = 1",
},
)
self.validate_all(
"DELETE FROM db.t1 AS t1 WHERE NOT t1.c IN (SELECT db.t2.c FROM db.t2)",
write={
"bigquery": "DELETE FROM db.t1 AS t1 WHERE NOT t1.c IN (SELECT db.t2.c FROM db.t2)",
"presto": "DELETE FROM db.t1 WHERE NOT c IN (SELECT c FROM db.t2)",
},
)
self.validate_all(
"SELECT * FROM a WHERE b IN UNNEST([1, 2, 3])",
write={
@ -1464,3 +1660,14 @@ OPTIONS (
with self.assertRaises(ParseError):
transpile("SELECT JSON_OBJECT('a', 1, 'b') AS json_data", read="bigquery")
def test_mod(self):
for sql in ("MOD(a, b)", "MOD('a', b)", "MOD(5, 2)", "MOD((a + 1) * 8, 5 - 1)"):
with self.subTest(f"Testing BigQuery roundtrip of modulo operation: {sql}"):
self.validate_identity(sql)
self.validate_identity("SELECT MOD((SELECT 1), 2)")
self.validate_identity(
"MOD((a + 1), b)",
"MOD(a + 1, b)",
)