Adding upstream version 25.16.1.
Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
parent
a500eebbbb
commit
1341bc6477
110 changed files with 75353 additions and 68092 deletions
|
@ -1,4 +1,6 @@
|
|||
from unittest import mock
|
||||
import datetime
|
||||
import pytz
|
||||
|
||||
from sqlglot import (
|
||||
ErrorLevel,
|
||||
|
@ -103,6 +105,7 @@ LANGUAGE js AS
|
|||
select_with_quoted_udf = self.validate_identity("SELECT `p.d.UdF`(data) FROM `p.d.t`")
|
||||
self.assertEqual(select_with_quoted_udf.selects[0].name, "p.d.UdF")
|
||||
|
||||
self.validate_identity("SELECT * FROM READ_CSV('bla.csv')")
|
||||
self.validate_identity("CAST(x AS STRUCT<list ARRAY<INT64>>)")
|
||||
self.validate_identity("assert.true(1 = 1)")
|
||||
self.validate_identity("SELECT ARRAY_TO_STRING(list, '--') AS text")
|
||||
|
@ -446,7 +449,7 @@ LANGUAGE js AS
|
|||
write={
|
||||
"bigquery": "SELECT LAST_DAY(CAST('2008-11-25' AS DATE), MONTH)",
|
||||
"duckdb": "SELECT LAST_DAY(CAST('2008-11-25' AS DATE))",
|
||||
"clickhouse": "SELECT LAST_DAY(CAST('2008-11-25' AS DATE))",
|
||||
"clickhouse": "SELECT LAST_DAY(CAST('2008-11-25' AS Nullable(DATE)))",
|
||||
"mysql": "SELECT LAST_DAY(CAST('2008-11-25' AS DATE))",
|
||||
"oracle": "SELECT LAST_DAY(CAST('2008-11-25' AS DATE))",
|
||||
"postgres": "SELECT CAST(DATE_TRUNC('MONTH', CAST('2008-11-25' AS DATE)) + INTERVAL '1 MONTH' - INTERVAL '1 DAY' AS DATE)",
|
||||
|
@ -510,6 +513,20 @@ LANGUAGE js AS
|
|||
"duckdb": "SELECT STRFTIME(CAST('2023-12-25' AS DATE), '%Y%m%d')",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT FORMAT_DATETIME('%Y%m%d %H:%M:%S', DATETIME '2023-12-25 15:30:00')",
|
||||
write={
|
||||
"bigquery": "SELECT FORMAT_DATETIME('%Y%m%d %H:%M:%S', CAST('2023-12-25 15:30:00' AS DATETIME))",
|
||||
"duckdb": "SELECT STRFTIME(CAST('2023-12-25 15:30:00' AS TIMESTAMP), '%Y%m%d %H:%M:%S')",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT FORMAT_DATETIME('%x', '2023-12-25 15:30:00')",
|
||||
write={
|
||||
"bigquery": "SELECT FORMAT_DATETIME('%x', '2023-12-25 15:30:00')",
|
||||
"duckdb": "SELECT STRFTIME(CAST('2023-12-25 15:30:00' AS TIMESTAMP), '%x')",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT COUNTIF(x)",
|
||||
read={
|
||||
|
@ -636,6 +653,7 @@ LANGUAGE js AS
|
|||
write={
|
||||
"bigquery": "SELECT DATETIME_TRUNC('2023-01-01T01:01:01', HOUR)",
|
||||
"databricks": "SELECT DATE_TRUNC('HOUR', '2023-01-01T01:01:01')",
|
||||
"duckdb": "SELECT DATE_TRUNC('HOUR', CAST('2023-01-01T01:01:01' AS DATETIME))",
|
||||
},
|
||||
),
|
||||
)
|
||||
|
@ -1209,10 +1227,9 @@ LANGUAGE js AS
|
|||
"SELECT * FROM a WHERE b IN UNNEST([1, 2, 3])",
|
||||
write={
|
||||
"bigquery": "SELECT * FROM a WHERE b IN UNNEST([1, 2, 3])",
|
||||
"mysql": "SELECT * FROM a WHERE b IN (SELECT UNNEST(ARRAY(1, 2, 3)))",
|
||||
"presto": "SELECT * FROM a WHERE b IN (SELECT UNNEST(ARRAY[1, 2, 3]))",
|
||||
"hive": "SELECT * FROM a WHERE b IN (SELECT UNNEST(ARRAY(1, 2, 3)))",
|
||||
"spark": "SELECT * FROM a WHERE b IN (SELECT UNNEST(ARRAY(1, 2, 3)))",
|
||||
"hive": "SELECT * FROM a WHERE b IN (SELECT EXPLODE(ARRAY(1, 2, 3)))",
|
||||
"spark": "SELECT * FROM a WHERE b IN (SELECT EXPLODE(ARRAY(1, 2, 3)))",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
|
@ -1256,6 +1273,13 @@ LANGUAGE js AS
|
|||
"starrocks": "DATE_DIFF('MINUTE', CAST('2010-07-07' AS DATE), CAST('2008-12-25' AS DATE))",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"DATE_DIFF('2021-01-01', '2020-01-01', DAY)",
|
||||
write={
|
||||
"bigquery": "DATE_DIFF('2021-01-01', '2020-01-01', DAY)",
|
||||
"duckdb": "DATE_DIFF('DAY', CAST('2020-01-01' AS DATE), CAST('2021-01-01' AS DATE))",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"CURRENT_DATE('UTC')",
|
||||
write={
|
||||
|
@ -1402,6 +1426,57 @@ WHERE
|
|||
"": "SELECT LENGTH(foo)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT TIME_DIFF('12:00:00', '12:30:00', MINUTE)",
|
||||
write={
|
||||
"duckdb": "SELECT DATE_DIFF('MINUTE', CAST('12:30:00' AS TIME), CAST('12:00:00' AS TIME))",
|
||||
"bigquery": "SELECT TIME_DIFF('12:00:00', '12:30:00', MINUTE)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"ARRAY_CONCAT([1, 2], [3, 4], [5, 6])",
|
||||
write={
|
||||
"bigquery": "ARRAY_CONCAT([1, 2], [3, 4], [5, 6])",
|
||||
"duckdb": "ARRAY_CONCAT([1, 2], ARRAY_CONCAT([3, 4], [5, 6]))",
|
||||
"postgres": "ARRAY_CAT(ARRAY[1, 2], ARRAY_CAT(ARRAY[3, 4], ARRAY[5, 6]))",
|
||||
"redshift": "ARRAY_CONCAT(ARRAY(1, 2), ARRAY_CONCAT(ARRAY(3, 4), ARRAY(5, 6)))",
|
||||
"snowflake": "ARRAY_CAT([1, 2], ARRAY_CAT([3, 4], [5, 6]))",
|
||||
"hive": "CONCAT(ARRAY(1, 2), ARRAY(3, 4), ARRAY(5, 6))",
|
||||
"spark2": "CONCAT(ARRAY(1, 2), ARRAY(3, 4), ARRAY(5, 6))",
|
||||
"spark": "CONCAT(ARRAY(1, 2), ARRAY(3, 4), ARRAY(5, 6))",
|
||||
"databricks": "CONCAT(ARRAY(1, 2), ARRAY(3, 4), ARRAY(5, 6))",
|
||||
"presto": "CONCAT(ARRAY[1, 2], ARRAY[3, 4], ARRAY[5, 6])",
|
||||
"trino": "CONCAT(ARRAY[1, 2], ARRAY[3, 4], ARRAY[5, 6])",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT GENERATE_DATE_ARRAY('2016-10-05', '2016-10-08')",
|
||||
write={
|
||||
"duckdb": "SELECT CAST(GENERATE_SERIES(CAST('2016-10-05' AS DATE), CAST('2016-10-08' AS DATE), INTERVAL 1 DAY) AS DATE[])",
|
||||
"bigquery": "SELECT GENERATE_DATE_ARRAY('2016-10-05', '2016-10-08', INTERVAL 1 DAY)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT GENERATE_DATE_ARRAY('2016-10-05', '2016-10-08', INTERVAL '1' MONTH)",
|
||||
write={
|
||||
"duckdb": "SELECT CAST(GENERATE_SERIES(CAST('2016-10-05' AS DATE), CAST('2016-10-08' AS DATE), INTERVAL '1' MONTH) AS DATE[])",
|
||||
"bigquery": "SELECT GENERATE_DATE_ARRAY('2016-10-05', '2016-10-08', INTERVAL '1' MONTH)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT GENERATE_TIMESTAMP_ARRAY('2016-10-05 00:00:00', '2016-10-07 00:00:00', INTERVAL '1' DAY)",
|
||||
write={
|
||||
"duckdb": "SELECT GENERATE_SERIES(CAST('2016-10-05 00:00:00' AS TIMESTAMP), CAST('2016-10-07 00:00:00' AS TIMESTAMP), INTERVAL '1' DAY)",
|
||||
"bigquery": "SELECT GENERATE_TIMESTAMP_ARRAY('2016-10-05 00:00:00', '2016-10-07 00:00:00', INTERVAL '1' DAY)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT PARSE_DATE('%A %b %e %Y', 'Thursday Dec 25 2008')",
|
||||
write={
|
||||
"bigquery": "SELECT PARSE_DATE('%A %b %e %Y', 'Thursday Dec 25 2008')",
|
||||
"duckdb": "SELECT CAST(STRPTIME('Thursday Dec 25 2008', '%A %b %-d %Y') AS DATE)",
|
||||
},
|
||||
)
|
||||
|
||||
def test_errors(self):
|
||||
with self.assertRaises(TokenError):
|
||||
|
@ -1794,14 +1869,14 @@ OPTIONS (
|
|||
"SELECT * FROM UNNEST(ARRAY<STRUCT<x INT64>>[])",
|
||||
write={
|
||||
"bigquery": "SELECT * FROM UNNEST(CAST([] AS ARRAY<STRUCT<x INT64>>))",
|
||||
"duckdb": "SELECT * FROM UNNEST(CAST([] AS STRUCT(x BIGINT)[]))",
|
||||
"duckdb": "SELECT * FROM (SELECT UNNEST(CAST([] AS STRUCT(x BIGINT)[]), max_depth => 2))",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT * FROM UNNEST(ARRAY<STRUCT<device_id INT64, time DATETIME, signal INT64, state STRING>>[STRUCT(1, DATETIME '2023-11-01 09:34:01', 74, 'INACTIVE'),STRUCT(4, DATETIME '2023-11-01 09:38:01', 80, 'ACTIVE')])",
|
||||
write={
|
||||
"bigquery": "SELECT * FROM UNNEST(CAST([STRUCT(1, CAST('2023-11-01 09:34:01' AS DATETIME), 74, 'INACTIVE'), STRUCT(4, CAST('2023-11-01 09:38:01' AS DATETIME), 80, 'ACTIVE')] AS ARRAY<STRUCT<device_id INT64, time DATETIME, signal INT64, state STRING>>))",
|
||||
"duckdb": "SELECT * FROM UNNEST(CAST([ROW(1, CAST('2023-11-01 09:34:01' AS TIMESTAMP), 74, 'INACTIVE'), ROW(4, CAST('2023-11-01 09:38:01' AS TIMESTAMP), 80, 'ACTIVE')] AS STRUCT(device_id BIGINT, time TIMESTAMP, signal BIGINT, state TEXT)[]))",
|
||||
"duckdb": "SELECT * FROM (SELECT UNNEST(CAST([ROW(1, CAST('2023-11-01 09:34:01' AS TIMESTAMP), 74, 'INACTIVE'), ROW(4, CAST('2023-11-01 09:38:01' AS TIMESTAMP), 80, 'ACTIVE')] AS STRUCT(device_id BIGINT, time TIMESTAMP, signal BIGINT, state TEXT)[]), max_depth => 2))",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
|
@ -1811,3 +1886,51 @@ OPTIONS (
|
|||
"duckdb": "SELECT CAST(ROW(1, ROW('c_str')) AS STRUCT(a BIGINT, b STRUCT(c TEXT)))",
|
||||
},
|
||||
)
|
||||
|
||||
def test_convert(self):
|
||||
for value, expected in [
|
||||
(datetime.datetime(2023, 1, 1), "CAST('2023-01-01 00:00:00' AS DATETIME)"),
|
||||
(datetime.datetime(2023, 1, 1, 12, 13, 14), "CAST('2023-01-01 12:13:14' AS DATETIME)"),
|
||||
(
|
||||
datetime.datetime(2023, 1, 1, 12, 13, 14, tzinfo=datetime.timezone.utc),
|
||||
"CAST('2023-01-01 12:13:14+00:00' AS TIMESTAMP)",
|
||||
),
|
||||
(
|
||||
pytz.timezone("America/Los_Angeles").localize(
|
||||
datetime.datetime(2023, 1, 1, 12, 13, 14)
|
||||
),
|
||||
"CAST('2023-01-01 12:13:14-08:00' AS TIMESTAMP)",
|
||||
),
|
||||
]:
|
||||
with self.subTest(value):
|
||||
self.assertEqual(exp.convert(value).sql(dialect=self.dialect), expected)
|
||||
|
||||
def test_unnest(self):
|
||||
self.validate_all(
|
||||
"SELECT name, laps FROM UNNEST([STRUCT('Rudisha' AS name, [23.4, 26.3, 26.4, 26.1] AS laps), STRUCT('Makhloufi' AS name, [24.5, 25.4, 26.6, 26.1] AS laps)])",
|
||||
write={
|
||||
"bigquery": "SELECT name, laps FROM UNNEST([STRUCT('Rudisha' AS name, [23.4, 26.3, 26.4, 26.1] AS laps), STRUCT('Makhloufi' AS name, [24.5, 25.4, 26.6, 26.1] AS laps)])",
|
||||
"duckdb": "SELECT name, laps FROM (SELECT UNNEST([{'name': 'Rudisha', 'laps': [23.4, 26.3, 26.4, 26.1]}, {'name': 'Makhloufi', 'laps': [24.5, 25.4, 26.6, 26.1]}], max_depth => 2))",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"WITH Races AS (SELECT '800M' AS race) SELECT race, name, laps FROM Races AS r CROSS JOIN UNNEST([STRUCT('Rudisha' AS name, [23.4, 26.3, 26.4, 26.1] AS laps)])",
|
||||
write={
|
||||
"bigquery": "WITH Races AS (SELECT '800M' AS race) SELECT race, name, laps FROM Races AS r CROSS JOIN UNNEST([STRUCT('Rudisha' AS name, [23.4, 26.3, 26.4, 26.1] AS laps)])",
|
||||
"duckdb": "WITH Races AS (SELECT '800M' AS race) SELECT race, name, laps FROM Races AS r CROSS JOIN (SELECT UNNEST([{'name': 'Rudisha', 'laps': [23.4, 26.3, 26.4, 26.1]}], max_depth => 2))",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT participant FROM UNNEST([STRUCT('Rudisha' AS name, [23.4, 26.3, 26.4, 26.1] AS laps)]) AS participant",
|
||||
write={
|
||||
"bigquery": "SELECT participant FROM UNNEST([STRUCT('Rudisha' AS name, [23.4, 26.3, 26.4, 26.1] AS laps)]) AS participant",
|
||||
"duckdb": "SELECT participant FROM (SELECT UNNEST([{'name': 'Rudisha', 'laps': [23.4, 26.3, 26.4, 26.1]}], max_depth => 2)) AS participant",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"WITH Races AS (SELECT '800M' AS race) SELECT race, participant FROM Races AS r CROSS JOIN UNNEST([STRUCT('Rudisha' AS name, [23.4, 26.3, 26.4, 26.1] AS laps)]) AS participant",
|
||||
write={
|
||||
"bigquery": "WITH Races AS (SELECT '800M' AS race) SELECT race, participant FROM Races AS r CROSS JOIN UNNEST([STRUCT('Rudisha' AS name, [23.4, 26.3, 26.4, 26.1] AS laps)]) AS participant",
|
||||
"duckdb": "WITH Races AS (SELECT '800M' AS race) SELECT race, participant FROM Races AS r CROSS JOIN (SELECT UNNEST([{'name': 'Rudisha', 'laps': [23.4, 26.3, 26.4, 26.1]}], max_depth => 2)) AS participant",
|
||||
},
|
||||
)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue