Adding upstream version 21.0.1.
Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
parent
07f4660f31
commit
91f2cef5f0
115 changed files with 66603 additions and 60920 deletions
|
@ -378,6 +378,31 @@ class TestDialect(Validator):
|
|||
read={"postgres": "INET '127.0.0.1/32'"},
|
||||
)
|
||||
|
||||
def test_ddl(self):
|
||||
self.validate_all(
|
||||
"CREATE TABLE a LIKE b",
|
||||
write={
|
||||
"": "CREATE TABLE a LIKE b",
|
||||
"bigquery": "CREATE TABLE a LIKE b",
|
||||
"clickhouse": "CREATE TABLE a AS b",
|
||||
"databricks": "CREATE TABLE a LIKE b",
|
||||
"doris": "CREATE TABLE a LIKE b",
|
||||
"drill": "CREATE TABLE a AS SELECT * FROM b LIMIT 0",
|
||||
"duckdb": "CREATE TABLE a AS SELECT * FROM b LIMIT 0",
|
||||
"hive": "CREATE TABLE a LIKE b",
|
||||
"mysql": "CREATE TABLE a LIKE b",
|
||||
"oracle": "CREATE TABLE a LIKE b",
|
||||
"postgres": "CREATE TABLE a (LIKE b)",
|
||||
"presto": "CREATE TABLE a (LIKE b)",
|
||||
"redshift": "CREATE TABLE a (LIKE b)",
|
||||
"snowflake": "CREATE TABLE a LIKE b",
|
||||
"spark": "CREATE TABLE a LIKE b",
|
||||
"sqlite": "CREATE TABLE a AS SELECT * FROM b LIMIT 0",
|
||||
"trino": "CREATE TABLE a (LIKE b)",
|
||||
"tsql": "SELECT TOP 0 * INTO a FROM b AS temp",
|
||||
},
|
||||
)
|
||||
|
||||
def test_heredoc_strings(self):
|
||||
for dialect in ("clickhouse", "postgres", "redshift"):
|
||||
# Invalid matching tag
|
||||
|
@ -1097,61 +1122,173 @@ class TestDialect(Validator):
|
|||
|
||||
def test_json(self):
|
||||
self.validate_all(
|
||||
"JSON_EXTRACT(x, 'y')",
|
||||
"""JSON_EXTRACT(x, '$["a b"]')""",
|
||||
write={
|
||||
"": """JSON_EXTRACT(x, '$["a b"]')""",
|
||||
"bigquery": """JSON_EXTRACT(x, '$[\\'a b\\']')""",
|
||||
"clickhouse": "JSONExtractString(x, 'a b')",
|
||||
"duckdb": """x -> '$."a b"'""",
|
||||
"mysql": """JSON_EXTRACT(x, '$."a b"')""",
|
||||
"postgres": "JSON_EXTRACT_PATH(x, 'a b')",
|
||||
"presto": """JSON_EXTRACT(x, '$["a b"]')""",
|
||||
"redshift": "JSON_EXTRACT_PATH_TEXT(x, 'a b')",
|
||||
"snowflake": """GET_PATH(x, '["a b"]')""",
|
||||
"spark": """GET_JSON_OBJECT(x, '$[\\'a b\\']')""",
|
||||
"sqlite": """x -> '$."a b"'""",
|
||||
"trino": """JSON_EXTRACT(x, '$["a b"]')""",
|
||||
"tsql": """ISNULL(JSON_QUERY(x, '$."a b"'), JSON_VALUE(x, '$."a b"'))""",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"JSON_EXTRACT(x, '$.y')",
|
||||
read={
|
||||
"mysql": "JSON_EXTRACT(x, 'y')",
|
||||
"bigquery": "JSON_EXTRACT(x, '$.y')",
|
||||
"duckdb": "x -> 'y'",
|
||||
"doris": "x -> '$.y'",
|
||||
"mysql": "JSON_EXTRACT(x, '$.y')",
|
||||
"postgres": "x->'y'",
|
||||
"presto": "JSON_EXTRACT(x, 'y')",
|
||||
"starrocks": "x -> 'y'",
|
||||
"doris": "x -> 'y'",
|
||||
"presto": "JSON_EXTRACT(x, '$.y')",
|
||||
"snowflake": "GET_PATH(x, 'y')",
|
||||
"sqlite": "x -> '$.y'",
|
||||
"starrocks": "x -> '$.y'",
|
||||
},
|
||||
write={
|
||||
"mysql": "JSON_EXTRACT(x, 'y')",
|
||||
"oracle": "JSON_EXTRACT(x, 'y')",
|
||||
"postgres": "x -> 'y'",
|
||||
"presto": "JSON_EXTRACT(x, 'y')",
|
||||
"starrocks": "x -> 'y'",
|
||||
"doris": "x -> 'y'",
|
||||
"bigquery": "JSON_EXTRACT(x, '$.y')",
|
||||
"clickhouse": "JSONExtractString(x, 'y')",
|
||||
"doris": "x -> '$.y'",
|
||||
"duckdb": "x -> '$.y'",
|
||||
"mysql": "JSON_EXTRACT(x, '$.y')",
|
||||
"oracle": "JSON_EXTRACT(x, '$.y')",
|
||||
"postgres": "JSON_EXTRACT_PATH(x, 'y')",
|
||||
"presto": "JSON_EXTRACT(x, '$.y')",
|
||||
"snowflake": "GET_PATH(x, 'y')",
|
||||
"spark": "GET_JSON_OBJECT(x, '$.y')",
|
||||
"sqlite": "x -> '$.y'",
|
||||
"starrocks": "x -> '$.y'",
|
||||
"tsql": "ISNULL(JSON_QUERY(x, '$.y'), JSON_VALUE(x, '$.y'))",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"JSON_EXTRACT_SCALAR(x, 'y')",
|
||||
"JSON_EXTRACT_SCALAR(x, '$.y')",
|
||||
read={
|
||||
"bigquery": "JSON_EXTRACT_SCALAR(x, '$.y')",
|
||||
"clickhouse": "JSONExtractString(x, 'y')",
|
||||
"duckdb": "x ->> 'y'",
|
||||
"postgres": "x ->> 'y'",
|
||||
"presto": "JSON_EXTRACT_SCALAR(x, 'y')",
|
||||
"presto": "JSON_EXTRACT_SCALAR(x, '$.y')",
|
||||
"redshift": "JSON_EXTRACT_PATH_TEXT(x, 'y')",
|
||||
"spark": "GET_JSON_OBJECT(x, '$.y')",
|
||||
"snowflake": "JSON_EXTRACT_PATH_TEXT(x, 'y')",
|
||||
"sqlite": "x ->> '$.y'",
|
||||
},
|
||||
write={
|
||||
"postgres": "x ->> 'y'",
|
||||
"presto": "JSON_EXTRACT_SCALAR(x, 'y')",
|
||||
"bigquery": "JSON_EXTRACT_SCALAR(x, '$.y')",
|
||||
"clickhouse": "JSONExtractString(x, 'y')",
|
||||
"duckdb": "x ->> '$.y'",
|
||||
"postgres": "JSON_EXTRACT_PATH_TEXT(x, 'y')",
|
||||
"presto": "JSON_EXTRACT_SCALAR(x, '$.y')",
|
||||
"redshift": "JSON_EXTRACT_PATH_TEXT(x, 'y')",
|
||||
"snowflake": "JSON_EXTRACT_PATH_TEXT(x, 'y')",
|
||||
"spark": "GET_JSON_OBJECT(x, '$.y')",
|
||||
"sqlite": "x ->> '$.y'",
|
||||
"tsql": "ISNULL(JSON_QUERY(x, '$.y'), JSON_VALUE(x, '$.y'))",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"JSON_EXTRACT_SCALAR(stream_data, '$.data.results')",
|
||||
"JSON_EXTRACT(x, '$.y[0].z')",
|
||||
read={
|
||||
"hive": "GET_JSON_OBJECT(stream_data, '$.data.results')",
|
||||
"mysql": "stream_data ->> '$.data.results'",
|
||||
"bigquery": "JSON_EXTRACT(x, '$.y[0].z')",
|
||||
"duckdb": "x -> '$.y[0].z'",
|
||||
"doris": "x -> '$.y[0].z'",
|
||||
"mysql": "JSON_EXTRACT(x, '$.y[0].z')",
|
||||
"presto": "JSON_EXTRACT(x, '$.y[0].z')",
|
||||
"snowflake": "GET_PATH(x, 'y[0].z')",
|
||||
"sqlite": "x -> '$.y[0].z'",
|
||||
"starrocks": "x -> '$.y[0].z'",
|
||||
},
|
||||
write={
|
||||
"hive": "GET_JSON_OBJECT(stream_data, '$.data.results')",
|
||||
"mysql": "stream_data ->> '$.data.results'",
|
||||
"bigquery": "JSON_EXTRACT(x, '$.y[0].z')",
|
||||
"clickhouse": "JSONExtractString(x, 'y', 1, 'z')",
|
||||
"doris": "x -> '$.y[0].z'",
|
||||
"duckdb": "x -> '$.y[0].z'",
|
||||
"mysql": "JSON_EXTRACT(x, '$.y[0].z')",
|
||||
"oracle": "JSON_EXTRACT(x, '$.y[0].z')",
|
||||
"postgres": "JSON_EXTRACT_PATH(x, 'y', '0', 'z')",
|
||||
"presto": "JSON_EXTRACT(x, '$.y[0].z')",
|
||||
"redshift": "JSON_EXTRACT_PATH_TEXT(x, 'y', '0', 'z')",
|
||||
"snowflake": "GET_PATH(x, 'y[0].z')",
|
||||
"spark": "GET_JSON_OBJECT(x, '$.y[0].z')",
|
||||
"sqlite": "x -> '$.y[0].z'",
|
||||
"starrocks": "x -> '$.y[0].z'",
|
||||
"tsql": "ISNULL(JSON_QUERY(x, '$.y[0].z'), JSON_VALUE(x, '$.y[0].z'))",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"JSONB_EXTRACT(x, 'y')",
|
||||
"JSON_EXTRACT_SCALAR(x, '$.y[0].z')",
|
||||
read={
|
||||
"postgres": "x#>'y'",
|
||||
"bigquery": "JSON_EXTRACT_SCALAR(x, '$.y[0].z')",
|
||||
"clickhouse": "JSONExtractString(x, 'y', 1, 'z')",
|
||||
"duckdb": "x ->> '$.y[0].z'",
|
||||
"presto": "JSON_EXTRACT_SCALAR(x, '$.y[0].z')",
|
||||
"snowflake": "JSON_EXTRACT_PATH_TEXT(x, 'y[0].z')",
|
||||
"spark": 'GET_JSON_OBJECT(x, "$.y[0].z")',
|
||||
"sqlite": "x ->> '$.y[0].z'",
|
||||
},
|
||||
write={
|
||||
"postgres": "x #> 'y'",
|
||||
"bigquery": "JSON_EXTRACT_SCALAR(x, '$.y[0].z')",
|
||||
"clickhouse": "JSONExtractString(x, 'y', 1, 'z')",
|
||||
"duckdb": "x ->> '$.y[0].z'",
|
||||
"postgres": "JSON_EXTRACT_PATH_TEXT(x, 'y', '0', 'z')",
|
||||
"presto": "JSON_EXTRACT_SCALAR(x, '$.y[0].z')",
|
||||
"redshift": "JSON_EXTRACT_PATH_TEXT(x, 'y', '0', 'z')",
|
||||
"snowflake": "JSON_EXTRACT_PATH_TEXT(x, 'y[0].z')",
|
||||
"spark": "GET_JSON_OBJECT(x, '$.y[0].z')",
|
||||
"sqlite": "x ->> '$.y[0].z'",
|
||||
"tsql": "ISNULL(JSON_QUERY(x, '$.y[0].z'), JSON_VALUE(x, '$.y[0].z'))",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"JSONB_EXTRACT_SCALAR(x, 'y')",
|
||||
read={
|
||||
"postgres": "x#>>'y'",
|
||||
},
|
||||
"JSON_EXTRACT(x, '$.y[*]')",
|
||||
write={
|
||||
"postgres": "x #>> 'y'",
|
||||
"bigquery": UnsupportedError,
|
||||
"clickhouse": UnsupportedError,
|
||||
"duckdb": "x -> '$.y[*]'",
|
||||
"mysql": "JSON_EXTRACT(x, '$.y[*]')",
|
||||
"postgres": UnsupportedError,
|
||||
"presto": "JSON_EXTRACT(x, '$.y[*]')",
|
||||
"redshift": UnsupportedError,
|
||||
"snowflake": UnsupportedError,
|
||||
"spark": "GET_JSON_OBJECT(x, '$.y[*]')",
|
||||
"sqlite": UnsupportedError,
|
||||
"tsql": UnsupportedError,
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"JSON_EXTRACT(x, '$.y[*]')",
|
||||
write={
|
||||
"bigquery": "JSON_EXTRACT(x, '$.y')",
|
||||
"clickhouse": "JSONExtractString(x, 'y')",
|
||||
"postgres": "JSON_EXTRACT_PATH(x, 'y')",
|
||||
"redshift": "JSON_EXTRACT_PATH_TEXT(x, 'y')",
|
||||
"snowflake": "GET_PATH(x, 'y')",
|
||||
"sqlite": "x -> '$.y'",
|
||||
"tsql": "ISNULL(JSON_QUERY(x, '$.y'), JSON_VALUE(x, '$.y'))",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"JSON_EXTRACT(x, '$.y.*')",
|
||||
write={
|
||||
"bigquery": UnsupportedError,
|
||||
"clickhouse": UnsupportedError,
|
||||
"duckdb": "x -> '$.y.*'",
|
||||
"mysql": "JSON_EXTRACT(x, '$.y.*')",
|
||||
"postgres": UnsupportedError,
|
||||
"presto": "JSON_EXTRACT(x, '$.y.*')",
|
||||
"redshift": UnsupportedError,
|
||||
"snowflake": UnsupportedError,
|
||||
"spark": UnsupportedError,
|
||||
"sqlite": UnsupportedError,
|
||||
"tsql": UnsupportedError,
|
||||
},
|
||||
)
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue