Merging upstream version 21.0.1.
Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
parent
3759c601a7
commit
96b10de29a
115 changed files with 66603 additions and 60920 deletions
|
@ -84,6 +84,10 @@ WHERE
|
|||
self.validate_identity(
|
||||
"SELECT a FROM test PIVOT(SUM(x) FOR y IN ('z', 'q')) AS x TABLESAMPLE (0.1)"
|
||||
)
|
||||
self.validate_identity(
|
||||
"""SELECT GET_PATH(PARSE_JSON('{"y": [{"z": 1}]}'), 'y[0]:z')""",
|
||||
"""SELECT GET_PATH(PARSE_JSON('{"y": [{"z": 1}]}'), 'y[0].z')""",
|
||||
)
|
||||
self.validate_identity(
|
||||
"SELECT p FROM t WHERE p:val NOT IN ('2')",
|
||||
"SELECT p FROM t WHERE NOT GET_PATH(p, 'val') IN ('2')",
|
||||
|
@ -118,7 +122,7 @@ WHERE
|
|||
)
|
||||
self.validate_identity(
|
||||
'SELECT v:"fruit" FROM vartab',
|
||||
"""SELECT GET_PATH(v, '"fruit"') FROM vartab""",
|
||||
"""SELECT GET_PATH(v, 'fruit') FROM vartab""",
|
||||
)
|
||||
self.validate_identity(
|
||||
"v:attr[0]:name",
|
||||
|
@ -249,7 +253,7 @@ WHERE
|
|||
"mysql": """WITH vartab(v) AS (SELECT '[{"attr": [{"name": "banana"}]}]') SELECT JSON_EXTRACT(v, '$[0].attr[0].name') FROM vartab""",
|
||||
"presto": """WITH vartab(v) AS (SELECT JSON_PARSE('[{"attr": [{"name": "banana"}]}]')) SELECT JSON_EXTRACT(v, '$[0].attr[0].name') FROM vartab""",
|
||||
"snowflake": """WITH vartab(v) AS (SELECT PARSE_JSON('[{"attr": [{"name": "banana"}]}]')) SELECT GET_PATH(v, '[0].attr[0].name') FROM vartab""",
|
||||
"tsql": """WITH vartab(v) AS (SELECT '[{"attr": [{"name": "banana"}]}]') SELECT JSON_VALUE(v, '$[0].attr[0].name') FROM vartab""",
|
||||
"tsql": """WITH vartab(v) AS (SELECT '[{"attr": [{"name": "banana"}]}]') SELECT ISNULL(JSON_QUERY(v, '$[0].attr[0].name'), JSON_VALUE(v, '$[0].attr[0].name')) FROM vartab""",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
|
@ -260,7 +264,7 @@ WHERE
|
|||
"mysql": """WITH vartab(v) AS (SELECT '{"attr": [{"name": "banana"}]}') SELECT JSON_EXTRACT(v, '$.attr[0].name') FROM vartab""",
|
||||
"presto": """WITH vartab(v) AS (SELECT JSON_PARSE('{"attr": [{"name": "banana"}]}')) SELECT JSON_EXTRACT(v, '$.attr[0].name') FROM vartab""",
|
||||
"snowflake": """WITH vartab(v) AS (SELECT PARSE_JSON('{"attr": [{"name": "banana"}]}')) SELECT GET_PATH(v, 'attr[0].name') FROM vartab""",
|
||||
"tsql": """WITH vartab(v) AS (SELECT '{"attr": [{"name": "banana"}]}') SELECT JSON_VALUE(v, '$.attr[0].name') FROM vartab""",
|
||||
"tsql": """WITH vartab(v) AS (SELECT '{"attr": [{"name": "banana"}]}') SELECT ISNULL(JSON_QUERY(v, '$.attr[0].name'), JSON_VALUE(v, '$.attr[0].name')) FROM vartab""",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
|
@ -271,7 +275,7 @@ WHERE
|
|||
"mysql": """SELECT JSON_EXTRACT('{"fruit":"banana"}', '$.fruit')""",
|
||||
"presto": """SELECT JSON_EXTRACT(JSON_PARSE('{"fruit":"banana"}'), '$.fruit')""",
|
||||
"snowflake": """SELECT GET_PATH(PARSE_JSON('{"fruit":"banana"}'), 'fruit')""",
|
||||
"tsql": """SELECT JSON_VALUE('{"fruit":"banana"}', '$.fruit')""",
|
||||
"tsql": """SELECT ISNULL(JSON_QUERY('{"fruit":"banana"}', '$.fruit'), JSON_VALUE('{"fruit":"banana"}', '$.fruit'))""",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
|
@ -550,7 +554,7 @@ WHERE
|
|||
write={
|
||||
"duckdb": """SELECT JSON('{"a": {"b c": "foo"}}') -> '$.a' -> '$."b c"'""",
|
||||
"mysql": """SELECT JSON_EXTRACT(JSON_EXTRACT('{"a": {"b c": "foo"}}', '$.a'), '$."b c"')""",
|
||||
"snowflake": """SELECT GET_PATH(GET_PATH(PARSE_JSON('{"a": {"b c": "foo"}}'), 'a'), '"b c"')""",
|
||||
"snowflake": """SELECT GET_PATH(GET_PATH(PARSE_JSON('{"a": {"b c": "foo"}}'), 'a'), '["b c"]')""",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
|
@ -744,7 +748,7 @@ WHERE
|
|||
self.validate_all(
|
||||
r"SELECT FIRST_VALUE(TABLE1.COLUMN1 RESPECT NULLS) OVER (PARTITION BY RANDOM_COLUMN1, RANDOM_COLUMN2 ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS MY_ALIAS FROM TABLE1",
|
||||
write={
|
||||
"snowflake": r"SELECT FIRST_VALUE(TABLE1.COLUMN1 RESPECT NULLS) OVER (PARTITION BY RANDOM_COLUMN1, RANDOM_COLUMN2 ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS MY_ALIAS FROM TABLE1"
|
||||
"snowflake": r"SELECT FIRST_VALUE(TABLE1.COLUMN1) RESPECT NULLS OVER (PARTITION BY RANDOM_COLUMN1, RANDOM_COLUMN2 ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS MY_ALIAS FROM TABLE1"
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
|
@ -756,7 +760,7 @@ WHERE
|
|||
self.validate_all(
|
||||
r"SELECT FIRST_VALUE(TABLE1.COLUMN1 IGNORE NULLS) OVER (PARTITION BY RANDOM_COLUMN1, RANDOM_COLUMN2 ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS MY_ALIAS FROM TABLE1",
|
||||
write={
|
||||
"snowflake": r"SELECT FIRST_VALUE(TABLE1.COLUMN1 IGNORE NULLS) OVER (PARTITION BY RANDOM_COLUMN1, RANDOM_COLUMN2 ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS MY_ALIAS FROM TABLE1"
|
||||
"snowflake": r"SELECT FIRST_VALUE(TABLE1.COLUMN1) IGNORE NULLS OVER (PARTITION BY RANDOM_COLUMN1, RANDOM_COLUMN2 ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS MY_ALIAS FROM TABLE1"
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
|
@ -1454,12 +1458,6 @@ MATCH_RECOGNIZE (
|
|||
)
|
||||
|
||||
def test_show(self):
|
||||
# Parsed as Command
|
||||
self.validate_identity(
|
||||
"SHOW TABLES LIKE 'line%' IN tpch.public", check_command_warning=True
|
||||
)
|
||||
self.validate_identity("SHOW TABLES HISTORY IN tpch.public", check_command_warning=True)
|
||||
|
||||
# Parsed as Show
|
||||
self.validate_identity("SHOW PRIMARY KEYS")
|
||||
self.validate_identity("SHOW PRIMARY KEYS IN ACCOUNT")
|
||||
|
@ -1487,6 +1485,22 @@ MATCH_RECOGNIZE (
|
|||
"show terse objects in db1.schema1 starts with 'a' limit 10 from 'b'",
|
||||
"SHOW TERSE OBJECTS IN SCHEMA db1.schema1 STARTS WITH 'a' LIMIT 10 FROM 'b'",
|
||||
)
|
||||
self.validate_identity(
|
||||
"SHOW TABLES LIKE 'line%' IN tpch.public",
|
||||
"SHOW TABLES LIKE 'line%' IN SCHEMA tpch.public",
|
||||
)
|
||||
self.validate_identity(
|
||||
"SHOW TABLES HISTORY IN tpch.public",
|
||||
"SHOW TABLES HISTORY IN SCHEMA tpch.public",
|
||||
)
|
||||
self.validate_identity(
|
||||
"show terse tables in schema db1.schema1 starts with 'a' limit 10 from 'b'",
|
||||
"SHOW TERSE TABLES IN SCHEMA db1.schema1 STARTS WITH 'a' LIMIT 10 FROM 'b'",
|
||||
)
|
||||
self.validate_identity(
|
||||
"show terse tables in db1.schema1 starts with 'a' limit 10 from 'b'",
|
||||
"SHOW TERSE TABLES IN SCHEMA db1.schema1 STARTS WITH 'a' LIMIT 10 FROM 'b'",
|
||||
)
|
||||
|
||||
ast = parse_one('SHOW PRIMARY KEYS IN "TEST"."PUBLIC"."customers"', read="snowflake")
|
||||
table = ast.find(exp.Table)
|
||||
|
@ -1517,6 +1531,11 @@ MATCH_RECOGNIZE (
|
|||
table = ast.find(exp.Table)
|
||||
self.assertEqual(table.sql(dialect="snowflake"), "db1.schema1")
|
||||
|
||||
ast = parse_one("SHOW TABLES IN db1.schema1", read="snowflake")
|
||||
self.assertEqual(ast.args.get("scope_kind"), "SCHEMA")
|
||||
table = ast.find(exp.Table)
|
||||
self.assertEqual(table.sql(dialect="snowflake"), "db1.schema1")
|
||||
|
||||
def test_swap(self):
|
||||
ast = parse_one("ALTER TABLE a SWAP WITH b", read="snowflake")
|
||||
assert isinstance(ast, exp.AlterTable)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue