1
0
Fork 0

Merging upstream version 25.30.0.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-02-13 21:56:38 +01:00
parent 4816f3663d
commit ebf5336f85
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
69 changed files with 48139 additions and 46098 deletions

View file

@ -51,7 +51,6 @@ class TestPostgres(Validator):
self.validate_identity("x$")
self.validate_identity("SELECT ARRAY[1, 2, 3]")
self.validate_identity("SELECT ARRAY(SELECT 1)")
self.validate_identity("SELECT ARRAY_LENGTH(ARRAY[1, 2, 3], 1)")
self.validate_identity("STRING_AGG(x, y)")
self.validate_identity("STRING_AGG(x, ',' ORDER BY y)")
self.validate_identity("STRING_AGG(x, ',' ORDER BY y DESC)")
@ -683,6 +682,11 @@ class TestPostgres(Validator):
"""SELECT TRIM(TRAILING ' XXX ' COLLATE "de_DE")""",
"""SELECT RTRIM(' XXX ' COLLATE "de_DE")""",
)
self.validate_identity("LEVENSHTEIN(col1, col2)")
self.validate_identity("LEVENSHTEIN_LESS_EQUAL(col1, col2, 1)")
self.validate_identity("LEVENSHTEIN(col1, col2, 1, 2, 3)")
self.validate_identity("LEVENSHTEIN_LESS_EQUAL(col1, col2, 1, 2, 3, 4)")
self.validate_all(
"""'{"a":1,"b":2}'::json->'b'""",
write={
@ -1237,3 +1241,49 @@ CROSS JOIN JSON_ARRAY_ELEMENTS(CAST(JSON_EXTRACT_PATH(tbox, 'boxes') AS JSON)) A
self.validate_identity(
"""SELECT * FROM table1, ROWS FROM (FUNC1(col1) AS alias1("col1" TEXT)) WITH ORDINALITY AS alias3("col3" INT, "col4" TEXT)"""
)
def test_array_length(self):
self.validate_identity("SELECT ARRAY_LENGTH(ARRAY[1, 2, 3], 1)")
self.validate_all(
"ARRAY_LENGTH(arr, 1)",
read={
"bigquery": "ARRAY_LENGTH(arr)",
"duckdb": "ARRAY_LENGTH(arr)",
"presto": "CARDINALITY(arr)",
"drill": "REPEATED_COUNT(arr)",
"teradata": "CARDINALITY(arr)",
"hive": "SIZE(arr)",
"spark2": "SIZE(arr)",
"spark": "SIZE(arr)",
"databricks": "SIZE(arr)",
},
write={
"duckdb": "ARRAY_LENGTH(arr, 1)",
"presto": "CARDINALITY(arr)",
"teradata": "CARDINALITY(arr)",
"bigquery": "ARRAY_LENGTH(arr)",
"drill": "REPEATED_COUNT(arr)",
"clickhouse": "LENGTH(arr)",
"hive": "SIZE(arr)",
"spark2": "SIZE(arr)",
"spark": "SIZE(arr)",
"databricks": "SIZE(arr)",
},
)
self.validate_all(
"ARRAY_LENGTH(arr, foo)",
write={
"duckdb": "ARRAY_LENGTH(arr, foo)",
"hive": UnsupportedError,
"spark2": UnsupportedError,
"spark": UnsupportedError,
"databricks": UnsupportedError,
"presto": UnsupportedError,
"teradata": UnsupportedError,
"bigquery": UnsupportedError,
"drill": UnsupportedError,
"clickhouse": UnsupportedError,
},
)