Merging upstream version 26.8.0.
Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
parent
d551ab0954
commit
010433ad9a
61 changed files with 43883 additions and 41898 deletions
|
@ -168,6 +168,18 @@ class TestDialect(Validator):
|
|||
self.assertFalse(snowflake_class in {"bigquery", "redshift"})
|
||||
self.assertFalse(snowflake_object in {"bigquery", "redshift"})
|
||||
|
||||
def test_compare_dialect_versions(self):
|
||||
ddb_v1 = Dialect.get_or_raise("duckdb, version=1.0")
|
||||
ddb_v1_2 = Dialect.get_or_raise("duckdb, foo=bar, version=1.0")
|
||||
ddb_v2 = Dialect.get_or_raise("duckdb, version=2.2.4")
|
||||
ddb_latest = Dialect.get_or_raise("duckdb")
|
||||
|
||||
self.assertTrue(ddb_latest.version > ddb_v2.version)
|
||||
self.assertTrue(ddb_v1.version < ddb_v2.version)
|
||||
|
||||
self.assertTrue(ddb_v1.version == ddb_v1_2.version)
|
||||
self.assertTrue(ddb_latest.version == Dialect.get_or_raise("duckdb").version)
|
||||
|
||||
def test_cast(self):
|
||||
self.validate_all(
|
||||
"CAST(a AS TEXT)",
|
||||
|
@ -3015,7 +3027,7 @@ FROM subquery2""",
|
|||
"databricks": "SELECT * FROM EXPLODE(SEQUENCE(CAST('2020-01-01' AS DATE), CAST('2020-02-01' AS DATE), INTERVAL '1' WEEK))",
|
||||
"duckdb": "SELECT * FROM UNNEST(CAST(GENERATE_SERIES(CAST('2020-01-01' AS DATE), CAST('2020-02-01' AS DATE), (7 * INTERVAL '1' DAY)) AS DATE[]))",
|
||||
"mysql": "WITH RECURSIVE _generated_dates(date_value) AS (SELECT CAST('2020-01-01' AS DATE) AS date_value UNION ALL SELECT CAST(DATE_ADD(date_value, INTERVAL 1 WEEK) AS DATE) FROM _generated_dates WHERE CAST(DATE_ADD(date_value, INTERVAL 1 WEEK) AS DATE) <= CAST('2020-02-01' AS DATE)) SELECT * FROM (SELECT date_value FROM _generated_dates) AS _generated_dates",
|
||||
"postgres": "SELECT * FROM (SELECT CAST(value AS DATE) FROM GENERATE_SERIES(CAST('2020-01-01' AS DATE), CAST('2020-02-01' AS DATE), INTERVAL '1 WEEK') AS value) AS _unnested_generate_series",
|
||||
"postgres": "SELECT * FROM (SELECT CAST(value AS DATE) FROM GENERATE_SERIES(CAST('2020-01-01' AS DATE), CAST('2020-02-01' AS DATE), INTERVAL '1 WEEK') AS _t(value)) AS _unnested_generate_series",
|
||||
"presto": "SELECT * FROM UNNEST(SEQUENCE(CAST('2020-01-01' AS DATE), CAST('2020-02-01' AS DATE), (1 * INTERVAL '7' DAY)))",
|
||||
"redshift": "WITH RECURSIVE _generated_dates(date_value) AS (SELECT CAST('2020-01-01' AS DATE) AS date_value UNION ALL SELECT CAST(DATEADD(WEEK, 1, date_value) AS DATE) FROM _generated_dates WHERE CAST(DATEADD(WEEK, 1, date_value) AS DATE) <= CAST('2020-02-01' AS DATE)) SELECT * FROM (SELECT date_value FROM _generated_dates) AS _generated_dates",
|
||||
"snowflake": "SELECT * FROM (SELECT DATEADD(WEEK, CAST(value AS INT), CAST('2020-01-01' AS DATE)) AS value FROM TABLE(FLATTEN(INPUT => ARRAY_GENERATE_RANGE(0, (DATEDIFF(WEEK, CAST('2020-01-01' AS DATE), CAST('2020-02-01' AS DATE)) + 1 - 1) + 1))) AS _u(seq, key, path, index, value, this))",
|
||||
|
@ -3335,3 +3347,26 @@ FROM subquery2""",
|
|||
"tsql": "SCHEMA_NAME()",
|
||||
},
|
||||
)
|
||||
|
||||
def test_integer_hex_strings(self):
|
||||
# Hex strings such as 0xCC represent INTEGER values in the read dialects
|
||||
integer_dialects = ("bigquery", "clickhouse")
|
||||
for read_dialect in integer_dialects:
|
||||
for write_dialect in (
|
||||
"",
|
||||
"duckdb",
|
||||
"databricks",
|
||||
"snowflake",
|
||||
"spark",
|
||||
"redshift",
|
||||
):
|
||||
with self.subTest(f"Testing hex string -> INTEGER evaluation for {read_dialect}"):
|
||||
self.assertEqual(
|
||||
parse_one("SELECT 0xCC", read=read_dialect).sql(write_dialect), "SELECT 204"
|
||||
)
|
||||
|
||||
for other_integer_dialects in integer_dialects:
|
||||
self.assertEqual(
|
||||
parse_one("SELECT 0xCC", read=read_dialect).sql(other_integer_dialects),
|
||||
"SELECT 0xCC",
|
||||
)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue