1
0
Fork 0

Adding upstream version 26.8.0.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-03-04 07:54:33 +01:00
parent 4b797b16f0
commit 4c394df415
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
61 changed files with 43883 additions and 41898 deletions

View file

@ -168,6 +168,18 @@ class TestDialect(Validator):
self.assertFalse(snowflake_class in {"bigquery", "redshift"})
self.assertFalse(snowflake_object in {"bigquery", "redshift"})
def test_compare_dialect_versions(self):
ddb_v1 = Dialect.get_or_raise("duckdb, version=1.0")
ddb_v1_2 = Dialect.get_or_raise("duckdb, foo=bar, version=1.0")
ddb_v2 = Dialect.get_or_raise("duckdb, version=2.2.4")
ddb_latest = Dialect.get_or_raise("duckdb")
self.assertTrue(ddb_latest.version > ddb_v2.version)
self.assertTrue(ddb_v1.version < ddb_v2.version)
self.assertTrue(ddb_v1.version == ddb_v1_2.version)
self.assertTrue(ddb_latest.version == Dialect.get_or_raise("duckdb").version)
def test_cast(self):
self.validate_all(
"CAST(a AS TEXT)",
@ -3015,7 +3027,7 @@ FROM subquery2""",
"databricks": "SELECT * FROM EXPLODE(SEQUENCE(CAST('2020-01-01' AS DATE), CAST('2020-02-01' AS DATE), INTERVAL '1' WEEK))",
"duckdb": "SELECT * FROM UNNEST(CAST(GENERATE_SERIES(CAST('2020-01-01' AS DATE), CAST('2020-02-01' AS DATE), (7 * INTERVAL '1' DAY)) AS DATE[]))",
"mysql": "WITH RECURSIVE _generated_dates(date_value) AS (SELECT CAST('2020-01-01' AS DATE) AS date_value UNION ALL SELECT CAST(DATE_ADD(date_value, INTERVAL 1 WEEK) AS DATE) FROM _generated_dates WHERE CAST(DATE_ADD(date_value, INTERVAL 1 WEEK) AS DATE) <= CAST('2020-02-01' AS DATE)) SELECT * FROM (SELECT date_value FROM _generated_dates) AS _generated_dates",
"postgres": "SELECT * FROM (SELECT CAST(value AS DATE) FROM GENERATE_SERIES(CAST('2020-01-01' AS DATE), CAST('2020-02-01' AS DATE), INTERVAL '1 WEEK') AS value) AS _unnested_generate_series",
"postgres": "SELECT * FROM (SELECT CAST(value AS DATE) FROM GENERATE_SERIES(CAST('2020-01-01' AS DATE), CAST('2020-02-01' AS DATE), INTERVAL '1 WEEK') AS _t(value)) AS _unnested_generate_series",
"presto": "SELECT * FROM UNNEST(SEQUENCE(CAST('2020-01-01' AS DATE), CAST('2020-02-01' AS DATE), (1 * INTERVAL '7' DAY)))",
"redshift": "WITH RECURSIVE _generated_dates(date_value) AS (SELECT CAST('2020-01-01' AS DATE) AS date_value UNION ALL SELECT CAST(DATEADD(WEEK, 1, date_value) AS DATE) FROM _generated_dates WHERE CAST(DATEADD(WEEK, 1, date_value) AS DATE) <= CAST('2020-02-01' AS DATE)) SELECT * FROM (SELECT date_value FROM _generated_dates) AS _generated_dates",
"snowflake": "SELECT * FROM (SELECT DATEADD(WEEK, CAST(value AS INT), CAST('2020-01-01' AS DATE)) AS value FROM TABLE(FLATTEN(INPUT => ARRAY_GENERATE_RANGE(0, (DATEDIFF(WEEK, CAST('2020-01-01' AS DATE), CAST('2020-02-01' AS DATE)) + 1 - 1) + 1))) AS _u(seq, key, path, index, value, this))",
@ -3335,3 +3347,26 @@ FROM subquery2""",
"tsql": "SCHEMA_NAME()",
},
)
def test_integer_hex_strings(self):
# Hex strings such as 0xCC represent INTEGER values in the read dialects
integer_dialects = ("bigquery", "clickhouse")
for read_dialect in integer_dialects:
for write_dialect in (
"",
"duckdb",
"databricks",
"snowflake",
"spark",
"redshift",
):
with self.subTest(f"Testing hex string -> INTEGER evaluation for {read_dialect}"):
self.assertEqual(
parse_one("SELECT 0xCC", read=read_dialect).sql(write_dialect), "SELECT 204"
)
for other_integer_dialects in integer_dialects:
self.assertEqual(
parse_one("SELECT 0xCC", read=read_dialect).sql(other_integer_dialects),
"SELECT 0xCC",
)