Adding upstream version 25.20.1.
Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
parent
b35dbeb6b6
commit
0b78a18345
119 changed files with 78094 additions and 71498 deletions
|
@ -108,7 +108,6 @@ LANGUAGE js AS
|
|||
self.validate_identity("SELECT * FROM READ_CSV('bla.csv')")
|
||||
self.validate_identity("CAST(x AS STRUCT<list ARRAY<INT64>>)")
|
||||
self.validate_identity("assert.true(1 = 1)")
|
||||
self.validate_identity("SELECT ARRAY_TO_STRING(list, '--') AS text")
|
||||
self.validate_identity("SELECT jsondoc['some_key']")
|
||||
self.validate_identity("SELECT `p.d.UdF`(data).* FROM `p.d.t`")
|
||||
self.validate_identity("SELECT * FROM `my-project.my-dataset.my-table`")
|
||||
|
@ -631,9 +630,9 @@ LANGUAGE js AS
|
|||
self.validate_all(
|
||||
"SELECT DATETIME_ADD('2023-01-01T00:00:00', INTERVAL 1 MILLISECOND)",
|
||||
write={
|
||||
"bigquery": "SELECT DATETIME_ADD('2023-01-01T00:00:00', INTERVAL 1 MILLISECOND)",
|
||||
"databricks": "SELECT TIMESTAMPADD(MILLISECOND, 1, '2023-01-01T00:00:00')",
|
||||
"duckdb": "SELECT CAST('2023-01-01T00:00:00' AS DATETIME) + INTERVAL 1 MILLISECOND",
|
||||
"bigquery": "SELECT DATETIME_ADD('2023-01-01T00:00:00', INTERVAL '1' MILLISECOND)",
|
||||
"databricks": "SELECT TIMESTAMPADD(MILLISECOND, '1', '2023-01-01T00:00:00')",
|
||||
"duckdb": "SELECT CAST('2023-01-01T00:00:00' AS DATETIME) + INTERVAL '1' MILLISECOND",
|
||||
},
|
||||
),
|
||||
)
|
||||
|
@ -641,9 +640,9 @@ LANGUAGE js AS
|
|||
self.validate_all(
|
||||
"SELECT DATETIME_SUB('2023-01-01T00:00:00', INTERVAL 1 MILLISECOND)",
|
||||
write={
|
||||
"bigquery": "SELECT DATETIME_SUB('2023-01-01T00:00:00', INTERVAL 1 MILLISECOND)",
|
||||
"databricks": "SELECT TIMESTAMPADD(MILLISECOND, 1 * -1, '2023-01-01T00:00:00')",
|
||||
"duckdb": "SELECT CAST('2023-01-01T00:00:00' AS DATETIME) - INTERVAL 1 MILLISECOND",
|
||||
"bigquery": "SELECT DATETIME_SUB('2023-01-01T00:00:00', INTERVAL '1' MILLISECOND)",
|
||||
"databricks": "SELECT TIMESTAMPADD(MILLISECOND, '1' * -1, '2023-01-01T00:00:00')",
|
||||
"duckdb": "SELECT CAST('2023-01-01T00:00:00' AS DATETIME) - INTERVAL '1' MILLISECOND",
|
||||
},
|
||||
),
|
||||
)
|
||||
|
@ -661,17 +660,24 @@ LANGUAGE js AS
|
|||
self.validate_all(
|
||||
'SELECT TIMESTAMP_ADD(TIMESTAMP "2008-12-25 15:30:00+00", INTERVAL 10 MINUTE)',
|
||||
write={
|
||||
"bigquery": "SELECT TIMESTAMP_ADD(CAST('2008-12-25 15:30:00+00' AS TIMESTAMP), INTERVAL 10 MINUTE)",
|
||||
"databricks": "SELECT DATE_ADD(MINUTE, 10, CAST('2008-12-25 15:30:00+00' AS TIMESTAMP))",
|
||||
"mysql": "SELECT DATE_ADD(TIMESTAMP('2008-12-25 15:30:00+00'), INTERVAL 10 MINUTE)",
|
||||
"spark": "SELECT DATE_ADD(MINUTE, 10, CAST('2008-12-25 15:30:00+00' AS TIMESTAMP))",
|
||||
"bigquery": "SELECT TIMESTAMP_ADD(CAST('2008-12-25 15:30:00+00' AS TIMESTAMP), INTERVAL '10' MINUTE)",
|
||||
"databricks": "SELECT DATE_ADD(MINUTE, '10', CAST('2008-12-25 15:30:00+00' AS TIMESTAMP))",
|
||||
"mysql": "SELECT DATE_ADD(TIMESTAMP('2008-12-25 15:30:00+00'), INTERVAL '10' MINUTE)",
|
||||
"spark": "SELECT DATE_ADD(MINUTE, '10', CAST('2008-12-25 15:30:00+00' AS TIMESTAMP))",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
'SELECT TIMESTAMP_SUB(TIMESTAMP "2008-12-25 15:30:00+00", INTERVAL 10 MINUTE)',
|
||||
write={
|
||||
"bigquery": "SELECT TIMESTAMP_SUB(CAST('2008-12-25 15:30:00+00' AS TIMESTAMP), INTERVAL 10 MINUTE)",
|
||||
"mysql": "SELECT DATE_SUB(TIMESTAMP('2008-12-25 15:30:00+00'), INTERVAL 10 MINUTE)",
|
||||
"bigquery": "SELECT TIMESTAMP_SUB(CAST('2008-12-25 15:30:00+00' AS TIMESTAMP), INTERVAL '10' MINUTE)",
|
||||
"mysql": "SELECT DATE_SUB(TIMESTAMP('2008-12-25 15:30:00+00'), INTERVAL '10' MINUTE)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT TIME_ADD(CAST('09:05:03' AS TIME), INTERVAL 2 HOUR)",
|
||||
write={
|
||||
"bigquery": "SELECT TIME_ADD(CAST('09:05:03' AS TIME), INTERVAL '2' HOUR)",
|
||||
"duckdb": "SELECT CAST('09:05:03' AS TIME) + INTERVAL '2' HOUR",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
|
@ -1237,19 +1243,19 @@ LANGUAGE js AS
|
|||
"DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY)",
|
||||
write={
|
||||
"postgres": "CURRENT_DATE - INTERVAL '1 DAY'",
|
||||
"bigquery": "DATE_SUB(CURRENT_DATE, INTERVAL 1 DAY)",
|
||||
"bigquery": "DATE_SUB(CURRENT_DATE, INTERVAL '1' DAY)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"DATE_ADD(CURRENT_DATE(), INTERVAL 1 DAY)",
|
||||
"DATE_ADD(CURRENT_DATE(), INTERVAL -1 DAY)",
|
||||
write={
|
||||
"bigquery": "DATE_ADD(CURRENT_DATE, INTERVAL 1 DAY)",
|
||||
"duckdb": "CURRENT_DATE + INTERVAL 1 DAY",
|
||||
"mysql": "DATE_ADD(CURRENT_DATE, INTERVAL 1 DAY)",
|
||||
"postgres": "CURRENT_DATE + INTERVAL '1 DAY'",
|
||||
"presto": "DATE_ADD('DAY', 1, CURRENT_DATE)",
|
||||
"hive": "DATE_ADD(CURRENT_DATE, 1)",
|
||||
"spark": "DATE_ADD(CURRENT_DATE, 1)",
|
||||
"bigquery": "DATE_ADD(CURRENT_DATE, INTERVAL '-1' DAY)",
|
||||
"duckdb": "CURRENT_DATE + INTERVAL '-1' DAY",
|
||||
"mysql": "DATE_ADD(CURRENT_DATE, INTERVAL '-1' DAY)",
|
||||
"postgres": "CURRENT_DATE + INTERVAL '-1 DAY'",
|
||||
"presto": "DATE_ADD('DAY', CAST('-1' AS BIGINT), CURRENT_DATE)",
|
||||
"hive": "DATE_ADD(CURRENT_DATE, '-1')",
|
||||
"spark": "DATE_ADD(CURRENT_DATE, '-1')",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
|
@ -1478,6 +1484,20 @@ WHERE
|
|||
"duckdb": "SELECT CAST(STRPTIME('Thursday Dec 25 2008', '%A %b %-d %Y') AS DATE)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT ARRAY_TO_STRING(['cake', 'pie', NULL], '--') AS text",
|
||||
write={
|
||||
"bigquery": "SELECT ARRAY_TO_STRING(['cake', 'pie', NULL], '--') AS text",
|
||||
"duckdb": "SELECT ARRAY_TO_STRING(['cake', 'pie', NULL], '--') AS text",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT ARRAY_TO_STRING(['cake', 'pie', NULL], '--', 'MISSING') AS text",
|
||||
write={
|
||||
"bigquery": "SELECT ARRAY_TO_STRING(['cake', 'pie', NULL], '--', 'MISSING') AS text",
|
||||
"duckdb": "SELECT ARRAY_TO_STRING(LIST_TRANSFORM(['cake', 'pie', NULL], x -> COALESCE(x, 'MISSING')), '--') AS text",
|
||||
},
|
||||
)
|
||||
|
||||
def test_errors(self):
|
||||
with self.assertRaises(TokenError):
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue