Adding upstream version 25.32.0.
Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
parent
ec2e441f55
commit
24751d63a1
74 changed files with 2284 additions and 1814 deletions
|
@ -1640,6 +1640,11 @@ WHERE
|
|||
},
|
||||
)
|
||||
|
||||
self.validate_identity(
|
||||
"SELECT * FROM ML.FEATURES_AT_TIME(TABLE mydataset.feature_table, time => '2022-06-11 10:00:00+00', num_rows => 1, ignore_feature_nulls => TRUE)"
|
||||
)
|
||||
self.validate_identity("SELECT * FROM ML.FEATURES_AT_TIME((SELECT 1), num_rows => 1)")
|
||||
|
||||
def test_errors(self):
|
||||
with self.assertRaises(TokenError):
|
||||
transpile("'\\'", read="bigquery")
|
||||
|
@ -2145,27 +2150,37 @@ OPTIONS (
|
|||
},
|
||||
)
|
||||
|
||||
self.validate_all(
|
||||
f"""SELECT {func}('{{"name": "Jakob", "age": "6"}}', '$.age')""",
|
||||
write={
|
||||
"bigquery": f"""SELECT {func}('{{"name": "Jakob", "age": "6"}}', '$.age')""",
|
||||
"duckdb": """SELECT '{"name": "Jakob", "age": "6"}' ->> '$.age'""",
|
||||
"snowflake": """SELECT JSON_EXTRACT_PATH_TEXT('{"name": "Jakob", "age": "6"}', 'age')""",
|
||||
},
|
||||
)
|
||||
sql = f"""SELECT {func}('{{"name": "Jakob", "age": "6"}}', '$.age')"""
|
||||
self.validate_all(
|
||||
sql,
|
||||
write={
|
||||
"bigquery": sql,
|
||||
"duckdb": """SELECT '{"name": "Jakob", "age": "6"}' ->> '$.age'""",
|
||||
"snowflake": """SELECT JSON_EXTRACT_PATH_TEXT('{"name": "Jakob", "age": "6"}', 'age')""",
|
||||
},
|
||||
)
|
||||
|
||||
self.assertEqual(
|
||||
self.parse_one(sql).sql("bigquery", normalize_functions="upper"), sql
|
||||
)
|
||||
|
||||
def test_json_extract_array(self):
|
||||
for func in ("JSON_QUERY_ARRAY", "JSON_EXTRACT_ARRAY"):
|
||||
with self.subTest(f"Testing BigQuery's {func}"):
|
||||
sql = f"""SELECT {func}('{{"fruits": [1, "oranges"]}}', '$.fruits')"""
|
||||
self.validate_all(
|
||||
f"""SELECT {func}('{{"fruits": [1, "oranges"]}}', '$.fruits')""",
|
||||
sql,
|
||||
write={
|
||||
"bigquery": f"""SELECT {func}('{{"fruits": [1, "oranges"]}}', '$.fruits')""",
|
||||
"bigquery": sql,
|
||||
"duckdb": """SELECT CAST('{"fruits": [1, "oranges"]}' -> '$.fruits' AS JSON[])""",
|
||||
"snowflake": """SELECT TRANSFORM(GET_PATH(PARSE_JSON('{"fruits": [1, "oranges"]}'), 'fruits'), x -> PARSE_JSON(TO_JSON(x)))""",
|
||||
},
|
||||
)
|
||||
|
||||
self.assertEqual(
|
||||
self.parse_one(sql).sql("bigquery", normalize_functions="upper"), sql
|
||||
)
|
||||
|
||||
def test_unix_seconds(self):
|
||||
self.validate_all(
|
||||
"SELECT UNIX_SECONDS('2008-12-25 15:30:00+00')",
|
||||
|
|
|
@ -2854,6 +2854,13 @@ FROM subquery2""",
|
|||
},
|
||||
)
|
||||
|
||||
self.validate_all(
|
||||
"SELECT ARRAY_LENGTH(GENERATE_DATE_ARRAY(DATE '2020-01-01', DATE '2020-02-01', INTERVAL 1 WEEK))",
|
||||
write={
|
||||
"snowflake": "SELECT ARRAY_SIZE((SELECT ARRAY_AGG(*) FROM (SELECT DATEADD(WEEK, CAST(value AS INT), CAST('2020-01-01' AS DATE)) AS value FROM TABLE(FLATTEN(INPUT => ARRAY_GENERATE_RANGE(0, (DATEDIFF(WEEK, CAST('2020-01-01' AS DATE), CAST('2020-02-01' AS DATE)) + 1 - 1) + 1))) AS _u(seq, key, path, index, value, this))))",
|
||||
},
|
||||
)
|
||||
|
||||
def test_set_operation_specifiers(self):
|
||||
self.validate_all(
|
||||
"SELECT 1 EXCEPT ALL SELECT 1",
|
||||
|
|
|
@ -379,10 +379,6 @@ class TestDuckDB(Validator):
|
|||
"JSON_EXTRACT_PATH_TEXT(x, '$.family')",
|
||||
"x ->> '$.family'",
|
||||
)
|
||||
self.validate_identity(
|
||||
"ATTACH DATABASE ':memory:' AS new_database", check_command_warning=True
|
||||
)
|
||||
self.validate_identity("DETACH DATABASE new_database", check_command_warning=True)
|
||||
self.validate_identity(
|
||||
"SELECT {'yes': 'duck', 'maybe': 'goose', 'huh': NULL, 'no': 'heron'}"
|
||||
)
|
||||
|
@ -1392,3 +1388,20 @@ class TestDuckDB(Validator):
|
|||
else:
|
||||
self.assertEqual(ignore_null.sql("duckdb"), func.sql("duckdb"))
|
||||
self.assertNotIn("IGNORE NULLS", windowed_ignore_null.sql("duckdb"))
|
||||
|
||||
def test_attach_detach(self):
|
||||
# ATTACH
|
||||
self.validate_identity("ATTACH 'file.db'")
|
||||
self.validate_identity("ATTACH ':memory:' AS db_alias")
|
||||
self.validate_identity("ATTACH IF NOT EXISTS 'file.db' AS db_alias")
|
||||
self.validate_identity("ATTACH 'file.db' AS db_alias (READ_ONLY)")
|
||||
self.validate_identity("ATTACH 'file.db' (READ_ONLY FALSE, TYPE sqlite)")
|
||||
self.validate_identity("ATTACH 'file.db' (TYPE POSTGRES, SCHEMA 'public')")
|
||||
|
||||
self.validate_identity("ATTACH DATABASE 'file.db'", "ATTACH 'file.db'")
|
||||
|
||||
# DETACH
|
||||
self.validate_identity("DETACH new_database")
|
||||
self.validate_identity("DETACH IF EXISTS file")
|
||||
|
||||
self.validate_identity("DETACH DATABASE db", "DETACH db")
|
||||
|
|
|
@ -708,6 +708,16 @@ class TestMySQL(Validator):
|
|||
)
|
||||
|
||||
def test_mysql(self):
|
||||
for func in ("CHAR_LENGTH", "CHARACTER_LENGTH"):
|
||||
with self.subTest(f"Testing MySQL's {func}"):
|
||||
self.validate_all(
|
||||
f"SELECT {func}('foo')",
|
||||
write={
|
||||
"duckdb": "SELECT LENGTH('foo')",
|
||||
"mysql": "SELECT CHAR_LENGTH('foo')",
|
||||
},
|
||||
)
|
||||
|
||||
self.validate_all(
|
||||
"SELECT CONCAT('11', '22')",
|
||||
read={
|
||||
|
@ -1319,3 +1329,6 @@ COMMENT='客户账户表'"""
|
|||
expression = self.parse_one("EXPLAIN ANALYZE SELECT * FROM t")
|
||||
self.assertIsInstance(expression, exp.Describe)
|
||||
self.assertEqual(expression.text("style"), "ANALYZE")
|
||||
|
||||
for format in ("JSON", "TRADITIONAL", "TREE"):
|
||||
self.validate_identity(f"DESCRIBE FORMAT={format} UPDATE test SET test_col = 'abc'")
|
||||
|
|
|
@ -287,6 +287,17 @@ class TestOracle(Validator):
|
|||
"clickhouse": "TRIM(BOTH 'h' FROM 'Hello World')",
|
||||
},
|
||||
)
|
||||
self.validate_identity(
|
||||
"SELECT /*+ ORDERED */* FROM tbl", "SELECT /*+ ORDERED */ * FROM tbl"
|
||||
)
|
||||
self.validate_identity(
|
||||
"SELECT /* test */ /*+ ORDERED */* FROM tbl",
|
||||
"/* test */ SELECT /*+ ORDERED */ * FROM tbl",
|
||||
)
|
||||
self.validate_identity(
|
||||
"SELECT /*+ ORDERED */*/* test */ FROM tbl",
|
||||
"SELECT /*+ ORDERED */ * /* test */ FROM tbl",
|
||||
)
|
||||
|
||||
def test_join_marker(self):
|
||||
self.validate_identity("SELECT e1.x, e2.x FROM e e1, e e2 WHERE e1.y (+) = e2.y")
|
||||
|
|
|
@ -127,10 +127,6 @@ class TestPostgres(Validator):
|
|||
"pg_catalog.PG_TABLE_IS_VISIBLE(c.oid) "
|
||||
"ORDER BY 2, 3"
|
||||
)
|
||||
self.validate_identity(
|
||||
"/*+ some comment*/ SELECT b.foo, b.bar FROM baz AS b",
|
||||
"/* + some comment */ SELECT b.foo, b.bar FROM baz AS b",
|
||||
)
|
||||
self.validate_identity(
|
||||
"SELECT ARRAY[1, 2, 3] <@ ARRAY[1, 2]",
|
||||
"SELECT ARRAY[1, 2] @> ARRAY[1, 2, 3]",
|
||||
|
@ -819,6 +815,11 @@ class TestPostgres(Validator):
|
|||
},
|
||||
)
|
||||
|
||||
self.validate_identity(
|
||||
"/*+ some comment*/ SELECT b.foo, b.bar FROM baz AS b",
|
||||
"/* + some comment */ SELECT b.foo, b.bar FROM baz AS b",
|
||||
)
|
||||
|
||||
def test_ddl(self):
|
||||
# Checks that user-defined types are parsed into DataType instead of Identifier
|
||||
self.parse_one("CREATE TABLE t (a udt)").this.expressions[0].args["kind"].assert_is(
|
||||
|
|
|
@ -7,6 +7,12 @@ class TestPresto(Validator):
|
|||
dialect = "presto"
|
||||
|
||||
def test_cast(self):
|
||||
self.validate_identity("DEALLOCATE PREPARE my_query", check_command_warning=True)
|
||||
self.validate_identity("DESCRIBE INPUT x", check_command_warning=True)
|
||||
self.validate_identity("DESCRIBE OUTPUT x", check_command_warning=True)
|
||||
self.validate_identity(
|
||||
"RESET SESSION hive.optimized_reader_enabled", check_command_warning=True
|
||||
)
|
||||
self.validate_identity("SELECT * FROM x qualify", "SELECT * FROM x AS qualify")
|
||||
self.validate_identity("CAST(x AS IPADDRESS)")
|
||||
self.validate_identity("CAST(x AS IPPREFIX)")
|
||||
|
@ -722,7 +728,7 @@ class TestPresto(Validator):
|
|||
"SELECT MIN_BY(a.id, a.timestamp, 3) FROM a",
|
||||
write={
|
||||
"clickhouse": "SELECT argMin(a.id, a.timestamp) FROM a",
|
||||
"duckdb": "SELECT ARG_MIN(a.id, a.timestamp) FROM a",
|
||||
"duckdb": "SELECT ARG_MIN(a.id, a.timestamp, 3) FROM a",
|
||||
"presto": "SELECT MIN_BY(a.id, a.timestamp, 3) FROM a",
|
||||
"snowflake": "SELECT MIN_BY(a.id, a.timestamp, 3) FROM a",
|
||||
"spark": "SELECT MIN_BY(a.id, a.timestamp) FROM a",
|
||||
|
|
|
@ -12,3 +12,12 @@ class TestRisingWave(Validator):
|
|||
"": "SELECT a FROM tbl FOR UPDATE",
|
||||
},
|
||||
)
|
||||
self.validate_identity(
|
||||
"CREATE SOURCE from_kafka (*, gen_i32_field INT AS int32_field + 2, gen_i64_field INT AS int64_field + 2, WATERMARK FOR time_col AS time_col - INTERVAL '5 SECOND') INCLUDE header foo VARCHAR AS myheader INCLUDE key AS mykey WITH (connector='kafka', topic='my_topic') FORMAT PLAIN ENCODE PROTOBUF (A=1, B=2) KEY ENCODE PROTOBUF (A=3, B=4)"
|
||||
)
|
||||
self.validate_identity(
|
||||
"CREATE SINK my_sink AS SELECT * FROM A WITH (connector='kafka', topic='my_topic') FORMAT PLAIN ENCODE PROTOBUF (A=1, B=2) KEY ENCODE PROTOBUF (A=3, B=4)"
|
||||
)
|
||||
self.validate_identity(
|
||||
"WITH t1 AS MATERIALIZED (SELECT 1), t2 AS NOT MATERIALIZED (SELECT 2) SELECT * FROM t1, t2"
|
||||
)
|
||||
|
|
|
@ -1479,13 +1479,20 @@ WHERE
|
|||
"snowflake": "CREATE OR REPLACE TRANSIENT TABLE a (id INT)",
|
||||
},
|
||||
)
|
||||
|
||||
self.validate_all(
|
||||
"CREATE TABLE a (b INT)",
|
||||
read={"teradata": "CREATE MULTISET TABLE a (b INT)"},
|
||||
write={"snowflake": "CREATE TABLE a (b INT)"},
|
||||
)
|
||||
|
||||
self.validate_identity("CREATE TABLE a TAG (key1='value_1', key2='value_2')")
|
||||
self.validate_all(
|
||||
"CREATE TABLE a TAG (key1='value_1')",
|
||||
read={
|
||||
"snowflake": "CREATE TABLE a WITH TAG (key1='value_1')",
|
||||
},
|
||||
)
|
||||
|
||||
for action in ("SET", "DROP"):
|
||||
with self.subTest(f"ALTER COLUMN {action} NOT NULL"):
|
||||
self.validate_all(
|
||||
|
@ -2250,3 +2257,13 @@ SINGLE = TRUE""",
|
|||
self.validate_identity(
|
||||
"GRANT ALL PRIVILEGES ON FUNCTION mydb.myschema.ADD5(number) TO ROLE analyst"
|
||||
)
|
||||
|
||||
def test_window_function_arg(self):
|
||||
query = "SELECT * FROM TABLE(db.schema.FUNC(a) OVER ())"
|
||||
|
||||
ast = self.parse_one(query)
|
||||
window = ast.find(exp.Window)
|
||||
|
||||
self.assertEqual(ast.sql("snowflake"), query)
|
||||
self.assertEqual(len(list(ast.find_all(exp.Column))), 1)
|
||||
self.assertEqual(window.this.sql("snowflake"), "db.schema.FUNC(a)")
|
||||
|
|
|
@ -1308,6 +1308,12 @@ WHERE
|
|||
},
|
||||
)
|
||||
|
||||
for fmt in ("WEEK", "WW", "WK"):
|
||||
self.validate_identity(
|
||||
f"SELECT DATEPART({fmt}, '2024-11-21')",
|
||||
"SELECT DATEPART(WK, CAST('2024-11-21' AS DATETIME2))",
|
||||
)
|
||||
|
||||
def test_convert(self):
|
||||
self.validate_all(
|
||||
"CONVERT(NVARCHAR(200), x)",
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue