1
0
Fork 0

Adding upstream version 25.6.1.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-02-13 21:42:48 +01:00
parent 4e506fbac7
commit f9dae8e951
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
69 changed files with 46817 additions and 45778 deletions

View file

@ -1164,6 +1164,13 @@ class TestDialect(Validator):
},
)
order_by_all_sql = "SELECT * FROM t ORDER BY ALL"
self.validate_identity(order_by_all_sql).find(exp.Ordered).this.assert_is(exp.Column)
for dialect in ("duckdb", "spark", "databricks"):
with self.subTest(f"Testing ORDER BY ALL in {dialect}"):
parse_one(order_by_all_sql, read=dialect).find(exp.Ordered).this.assert_is(exp.Var)
def test_json(self):
self.validate_all(
"""JSON_EXTRACT(x, '$["a b"]')""",
@ -2267,7 +2274,7 @@ SELECT
write={
"duckdb": "SELECT * FROM t QUALIFY COUNT(*) OVER () > 1",
"snowflake": "SELECT * FROM t QUALIFY COUNT(*) OVER () > 1",
"clickhouse": "SELECT * FROM (SELECT *, COUNT(*) OVER () AS _w FROM t) AS _t WHERE _w > 1",
"clickhouse": "SELECT * FROM t QUALIFY COUNT(*) OVER () > 1",
"mysql": "SELECT * FROM (SELECT *, COUNT(*) OVER () AS _w FROM t) AS _t WHERE _w > 1",
"oracle": "SELECT * FROM (SELECT *, COUNT(*) OVER () AS _w FROM t) _t WHERE _w > 1",
"postgres": "SELECT * FROM (SELECT *, COUNT(*) OVER () AS _w FROM t) AS _t WHERE _w > 1",
@ -2279,7 +2286,7 @@ SELECT
write={
"duckdb": 'SELECT "user id", some_id, 1 AS other_id, 2 AS "2 nd id" FROM t QUALIFY COUNT(*) OVER () > 1',
"snowflake": 'SELECT "user id", some_id, 1 AS other_id, 2 AS "2 nd id" FROM t QUALIFY COUNT(*) OVER () > 1',
"clickhouse": 'SELECT "user id", some_id, other_id, "2 nd id" FROM (SELECT "user id", some_id, 1 AS other_id, 2 AS "2 nd id", COUNT(*) OVER () AS _w FROM t) AS _t WHERE _w > 1',
"clickhouse": 'SELECT "user id", some_id, 1 AS other_id, 2 AS "2 nd id" FROM t QUALIFY COUNT(*) OVER () > 1',
"mysql": "SELECT `user id`, some_id, other_id, `2 nd id` FROM (SELECT `user id`, some_id, 1 AS other_id, 2 AS `2 nd id`, COUNT(*) OVER () AS _w FROM t) AS _t WHERE _w > 1",
"oracle": 'SELECT "user id", some_id, other_id, "2 nd id" FROM (SELECT "user id", some_id, 1 AS other_id, 2 AS "2 nd id", COUNT(*) OVER () AS _w FROM t) _t WHERE _w > 1',
"postgres": 'SELECT "user id", some_id, other_id, "2 nd id" FROM (SELECT "user id", some_id, 1 AS other_id, 2 AS "2 nd id", COUNT(*) OVER () AS _w FROM t) AS _t WHERE _w > 1',
@ -2566,3 +2573,33 @@ FROM subquery2""",
"""SELECT partition.d FROM t PARTITION (d)""",
"""SELECT partition.d FROM t AS PARTITION(d)""",
)
def test_string_functions(self):
for pad_func in ("LPAD", "RPAD"):
ch_alias = "LEFTPAD" if pad_func == "LPAD" else "RIGHTPAD"
for fill_pattern in ("", ", ' '"):
with self.subTest(f"Testing {pad_func}() with pattern {fill_pattern}"):
self.validate_all(
f"SELECT {pad_func}('bar', 5{fill_pattern})",
read={
"snowflake": f"SELECT {pad_func}('bar', 5{fill_pattern})",
"databricks": f"SELECT {pad_func}('bar', 5{fill_pattern})",
"spark": f"SELECT {pad_func}('bar', 5{fill_pattern})",
"postgres": f"SELECT {pad_func}('bar', 5{fill_pattern})",
"clickhouse": f"SELECT {ch_alias}('bar', 5{fill_pattern})",
},
write={
"": f"SELECT {pad_func}('bar', 5{fill_pattern})",
"spark": f"SELECT {pad_func}('bar', 5{fill_pattern})",
"postgres": f"SELECT {pad_func}('bar', 5{fill_pattern})",
"clickhouse": f"SELECT {pad_func}('bar', 5{fill_pattern})",
"snowflake": f"SELECT {pad_func}('bar', 5{fill_pattern})",
"databricks": f"SELECT {pad_func}('bar', 5{fill_pattern})",
"duckdb": f"SELECT {pad_func}('bar', 5, ' ')",
"mysql": f"SELECT {pad_func}('bar', 5, ' ')",
"hive": f"SELECT {pad_func}('bar', 5, ' ')",
"spark2": f"SELECT {pad_func}('bar', 5, ' ')",
"presto": f"SELECT {pad_func}('bar', 5, ' ')",
"trino": f"SELECT {pad_func}('bar', 5, ' ')",
},
)