1
0
Fork 0

Adding upstream version 21.0.1.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-02-13 21:20:19 +01:00
parent 07f4660f31
commit 91f2cef5f0
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
115 changed files with 66603 additions and 60920 deletions

View file

@ -18,6 +18,64 @@ class TestBigQuery(Validator):
maxDiff = None
def test_bigquery(self):
self.validate_all(
"SELECT SUM(x IGNORE NULLS) AS x",
read={
"bigquery": "SELECT SUM(x IGNORE NULLS) AS x",
"duckdb": "SELECT SUM(x IGNORE NULLS) AS x",
"postgres": "SELECT SUM(x) IGNORE NULLS AS x",
"spark": "SELECT SUM(x) IGNORE NULLS AS x",
"snowflake": "SELECT SUM(x) IGNORE NULLS AS x",
},
write={
"bigquery": "SELECT SUM(x IGNORE NULLS) AS x",
"duckdb": "SELECT SUM(x IGNORE NULLS) AS x",
"postgres": "SELECT SUM(x) IGNORE NULLS AS x",
"spark": "SELECT SUM(x) IGNORE NULLS AS x",
"snowflake": "SELECT SUM(x) IGNORE NULLS AS x",
},
)
self.validate_all(
"SELECT SUM(x RESPECT NULLS) AS x",
read={
"bigquery": "SELECT SUM(x RESPECT NULLS) AS x",
"duckdb": "SELECT SUM(x RESPECT NULLS) AS x",
"postgres": "SELECT SUM(x) RESPECT NULLS AS x",
"spark": "SELECT SUM(x) RESPECT NULLS AS x",
"snowflake": "SELECT SUM(x) RESPECT NULLS AS x",
},
write={
"bigquery": "SELECT SUM(x RESPECT NULLS) AS x",
"duckdb": "SELECT SUM(x RESPECT NULLS) AS x",
"postgres": "SELECT SUM(x) RESPECT NULLS AS x",
"spark": "SELECT SUM(x) RESPECT NULLS AS x",
"snowflake": "SELECT SUM(x) RESPECT NULLS AS x",
},
)
self.validate_all(
"SELECT PERCENTILE_CONT(x, 0.5 RESPECT NULLS) OVER ()",
write={
"duckdb": "SELECT QUANTILE_CONT(x, 0.5 RESPECT NULLS) OVER ()",
"spark": "SELECT PERCENTILE_CONT(x, 0.5) RESPECT NULLS OVER ()",
},
)
self.validate_all(
"SELECT ARRAY_AGG(DISTINCT x IGNORE NULLS ORDER BY a, b DESC LIMIT 10) AS x",
write={
"duckdb": "SELECT ARRAY_AGG(DISTINCT x ORDER BY a NULLS FIRST, b DESC LIMIT 10 IGNORE NULLS) AS x",
"spark": "SELECT COLLECT_LIST(DISTINCT x ORDER BY a, b DESC LIMIT 10) IGNORE NULLS AS x",
},
)
self.validate_all(
"SELECT ARRAY_AGG(DISTINCT x IGNORE NULLS ORDER BY a, b DESC LIMIT 1, 10) AS x",
write={
"duckdb": "SELECT ARRAY_AGG(DISTINCT x ORDER BY a NULLS FIRST, b DESC LIMIT 1, 10 IGNORE NULLS) AS x",
"spark": "SELECT COLLECT_LIST(DISTINCT x ORDER BY a, b DESC LIMIT 1, 10) IGNORE NULLS AS x",
},
)
self.validate_identity("SELECT COUNT(x RESPECT NULLS)")
self.validate_identity("SELECT LAST_VALUE(x IGNORE NULLS) OVER y AS x")
self.validate_identity(
"create or replace view test (tenant_id OPTIONS(description='Test description on table creation')) select 1 as tenant_id, 1 as customer_id;",
"CREATE OR REPLACE VIEW test (tenant_id OPTIONS (description='Test description on table creation')) AS SELECT 1 AS tenant_id, 1 AS customer_id",
@ -358,10 +416,25 @@ class TestBigQuery(Validator):
"SELECT TIMESTAMP_DIFF(TIMESTAMP_SECONDS(60), TIMESTAMP_SECONDS(0), minute)",
write={
"bigquery": "SELECT TIMESTAMP_DIFF(TIMESTAMP_SECONDS(60), TIMESTAMP_SECONDS(0), MINUTE)",
"databricks": "SELECT TIMESTAMPDIFF(MINUTE, CAST(FROM_UNIXTIME(0) AS TIMESTAMP), CAST(FROM_UNIXTIME(60) AS TIMESTAMP))",
"duckdb": "SELECT DATE_DIFF('MINUTE', TO_TIMESTAMP(0), TO_TIMESTAMP(60))",
"snowflake": "SELECT TIMESTAMPDIFF(MINUTE, TO_TIMESTAMP(0), TO_TIMESTAMP(60))",
},
)
self.validate_all(
"TIMESTAMP_DIFF(a, b, MONTH)",
read={
"bigquery": "TIMESTAMP_DIFF(a, b, month)",
"databricks": "TIMESTAMPDIFF(month, b, a)",
"mysql": "TIMESTAMPDIFF(month, b, a)",
},
write={
"databricks": "TIMESTAMPDIFF(MONTH, b, a)",
"mysql": "TIMESTAMPDIFF(MONTH, b, a)",
"snowflake": "TIMESTAMPDIFF(MONTH, b, a)",
},
)
self.validate_all(
"SELECT TIMESTAMP_MICROS(x)",
read={
@ -419,34 +492,42 @@ class TestBigQuery(Validator):
"snowflake": "CREATE OR REPLACE TABLE a.b.c CLONE a.b.d",
},
)
self.validate_all(
"SELECT DATETIME_DIFF('2023-01-01T00:00:00', '2023-01-01T05:00:00', MILLISECOND)",
write={
"bigquery": "SELECT DATETIME_DIFF('2023-01-01T00:00:00', '2023-01-01T05:00:00', MILLISECOND)",
"databricks": "SELECT TIMESTAMPDIFF(MILLISECOND, '2023-01-01T05:00:00', '2023-01-01T00:00:00')",
},
),
self.validate_all(
"SELECT DATETIME_ADD('2023-01-01T00:00:00', INTERVAL 1 MILLISECOND)",
write={
"bigquery": "SELECT DATETIME_ADD('2023-01-01T00:00:00', INTERVAL 1 MILLISECOND)",
"databricks": "SELECT TIMESTAMPADD(MILLISECOND, 1, '2023-01-01T00:00:00')",
},
),
self.validate_all(
"SELECT DATETIME_SUB('2023-01-01T00:00:00', INTERVAL 1 MILLISECOND)",
write={
"bigquery": "SELECT DATETIME_SUB('2023-01-01T00:00:00', INTERVAL 1 MILLISECOND)",
"databricks": "SELECT TIMESTAMPADD(MILLISECOND, 1 * -1, '2023-01-01T00:00:00')",
},
),
self.validate_all(
"SELECT DATETIME_TRUNC('2023-01-01T01:01:01', HOUR)",
write={
"bigquery": "SELECT DATETIME_TRUNC('2023-01-01T01:01:01', HOUR)",
"databricks": "SELECT DATE_TRUNC('HOUR', '2023-01-01T01:01:01')",
},
),
(
self.validate_all(
"SELECT DATETIME_DIFF('2023-01-01T00:00:00', '2023-01-01T05:00:00', MILLISECOND)",
write={
"bigquery": "SELECT DATETIME_DIFF('2023-01-01T00:00:00', '2023-01-01T05:00:00', MILLISECOND)",
"databricks": "SELECT TIMESTAMPDIFF(MILLISECOND, '2023-01-01T05:00:00', '2023-01-01T00:00:00')",
},
),
)
(
self.validate_all(
"SELECT DATETIME_ADD('2023-01-01T00:00:00', INTERVAL 1 MILLISECOND)",
write={
"bigquery": "SELECT DATETIME_ADD('2023-01-01T00:00:00', INTERVAL 1 MILLISECOND)",
"databricks": "SELECT TIMESTAMPADD(MILLISECOND, 1, '2023-01-01T00:00:00')",
},
),
)
(
self.validate_all(
"SELECT DATETIME_SUB('2023-01-01T00:00:00', INTERVAL 1 MILLISECOND)",
write={
"bigquery": "SELECT DATETIME_SUB('2023-01-01T00:00:00', INTERVAL 1 MILLISECOND)",
"databricks": "SELECT TIMESTAMPADD(MILLISECOND, 1 * -1, '2023-01-01T00:00:00')",
},
),
)
(
self.validate_all(
"SELECT DATETIME_TRUNC('2023-01-01T01:01:01', HOUR)",
write={
"bigquery": "SELECT DATETIME_TRUNC('2023-01-01T01:01:01', HOUR)",
"databricks": "SELECT DATE_TRUNC('HOUR', '2023-01-01T01:01:01')",
},
),
)
self.validate_all("LEAST(x, y)", read={"sqlite": "MIN(x, y)"})
self.validate_all("CAST(x AS CHAR)", write={"bigquery": "CAST(x AS STRING)"})
self.validate_all("CAST(x AS NCHAR)", write={"bigquery": "CAST(x AS STRING)"})