Adding upstream version 25.5.1.
Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
parent
147b6e06e8
commit
4e506fbac7
136 changed files with 80990 additions and 72541 deletions
|
@ -11,6 +11,12 @@ class TestSnowflake(Validator):
|
|||
dialect = "snowflake"
|
||||
|
||||
def test_snowflake(self):
|
||||
self.assertEqual(
|
||||
# Ensures we don't fail when generating ParseJSON with the `safe` arg set to `True`
|
||||
self.validate_identity("""SELECT TRY_PARSE_JSON('{"x: 1}')""").sql(),
|
||||
"""SELECT PARSE_JSON('{"x: 1}')""",
|
||||
)
|
||||
|
||||
self.validate_identity(
|
||||
"transform(x, a int -> a + a + 1)",
|
||||
"TRANSFORM(x, a -> CAST(a AS INT) + CAST(a AS INT) + 1)",
|
||||
|
@ -49,6 +55,8 @@ WHERE
|
|||
)""",
|
||||
)
|
||||
|
||||
self.validate_identity("SELECT CAST([1, 2, 3] AS VECTOR(FLOAT, 3))")
|
||||
self.validate_identity("SELECT CONNECT_BY_ROOT test AS test_column_alias")
|
||||
self.validate_identity("SELECT number").selects[0].assert_is(exp.Column)
|
||||
self.validate_identity("INTERVAL '4 years, 5 months, 3 hours'")
|
||||
self.validate_identity("ALTER TABLE table1 CLUSTER BY (name DESC)")
|
||||
|
@ -182,18 +190,6 @@ WHERE
|
|||
"""SELECT PARSE_JSON('{"food":{"fruit":"banana"}}'):food.fruit::VARCHAR""",
|
||||
"""SELECT CAST(GET_PATH(PARSE_JSON('{"food":{"fruit":"banana"}}'), 'food.fruit') AS VARCHAR)""",
|
||||
)
|
||||
self.validate_identity(
|
||||
"SELECT * FROM foo at",
|
||||
"SELECT * FROM foo AS at",
|
||||
)
|
||||
self.validate_identity(
|
||||
"SELECT * FROM foo before",
|
||||
"SELECT * FROM foo AS before",
|
||||
)
|
||||
self.validate_identity(
|
||||
"SELECT * FROM foo at (col)",
|
||||
"SELECT * FROM foo AS at(col)",
|
||||
)
|
||||
self.validate_identity(
|
||||
"SELECT * FROM unnest(x) with ordinality",
|
||||
"SELECT * FROM TABLE(FLATTEN(INPUT => x)) AS _u(seq, key, path, index, value, this)",
|
||||
|
@ -337,7 +333,7 @@ WHERE
|
|||
"""SELECT PARSE_JSON('{"fruit":"banana"}'):fruit""",
|
||||
write={
|
||||
"bigquery": """SELECT JSON_EXTRACT(PARSE_JSON('{"fruit":"banana"}'), '$.fruit')""",
|
||||
"databricks": """SELECT GET_JSON_OBJECT('{"fruit":"banana"}', '$.fruit')""",
|
||||
"databricks": """SELECT '{"fruit":"banana"}':fruit""",
|
||||
"duckdb": """SELECT JSON('{"fruit":"banana"}') -> '$.fruit'""",
|
||||
"mysql": """SELECT JSON_EXTRACT('{"fruit":"banana"}', '$.fruit')""",
|
||||
"presto": """SELECT JSON_EXTRACT(JSON_PARSE('{"fruit":"banana"}'), '$.fruit')""",
|
||||
|
@ -1196,6 +1192,17 @@ WHERE
|
|||
"SELECT oldt.*, newt.* FROM my_table BEFORE (STATEMENT => '8e5d0ca9-005e-44e6-b858-a8f5b37c5726') AS oldt FULL OUTER JOIN my_table AT (STATEMENT => '8e5d0ca9-005e-44e6-b858-a8f5b37c5726') AS newt ON oldt.id = newt.id WHERE oldt.id IS NULL OR newt.id IS NULL",
|
||||
)
|
||||
|
||||
# Make sure that the historical data keywords can still be used as aliases
|
||||
for historical_data_prefix in ("AT", "BEFORE", "END", "CHANGES"):
|
||||
for schema_suffix in ("", "(col)"):
|
||||
with self.subTest(
|
||||
f"Testing historical data prefix alias: {historical_data_prefix}{schema_suffix}"
|
||||
):
|
||||
self.validate_identity(
|
||||
f"SELECT * FROM foo {historical_data_prefix}{schema_suffix}",
|
||||
f"SELECT * FROM foo AS {historical_data_prefix}{schema_suffix}",
|
||||
)
|
||||
|
||||
def test_ddl(self):
|
||||
for constraint_prefix in ("WITH ", ""):
|
||||
with self.subTest(f"Constraint prefix: {constraint_prefix}"):
|
||||
|
@ -1216,6 +1223,7 @@ WHERE
|
|||
"CREATE TABLE t (id INT TAG (key1='value_1', key2='value_2'))",
|
||||
)
|
||||
|
||||
self.validate_identity("CREATE SECURE VIEW table1 AS (SELECT a FROM table2)")
|
||||
self.validate_identity(
|
||||
"""create external table et2(
|
||||
col1 date as (parse_json(metadata$external_table_partition):COL1::date),
|
||||
|
@ -1240,6 +1248,9 @@ WHERE
|
|||
self.validate_identity(
|
||||
"CREATE OR REPLACE TAG IF NOT EXISTS cost_center COMMENT='cost_center tag'"
|
||||
).this.assert_is(exp.Identifier)
|
||||
self.validate_identity(
|
||||
"CREATE DYNAMIC TABLE product (pre_tax_profit, taxes, after_tax_profit) TARGET_LAG='20 minutes' WAREHOUSE=mywh AS SELECT revenue - cost, (revenue - cost) * tax_rate, (revenue - cost) * (1.0 - tax_rate) FROM staging_table"
|
||||
)
|
||||
self.validate_identity(
|
||||
"ALTER TABLE db_name.schmaName.tblName ADD COLUMN COLUMN_1 VARCHAR NOT NULL TAG (key1='value_1')"
|
||||
)
|
||||
|
@ -2021,3 +2032,15 @@ SINGLE = TRUE""",
|
|||
|
||||
self.validate_identity("ALTER TABLE foo UNSET TAG a, b, c")
|
||||
self.validate_identity("ALTER TABLE foo UNSET DATA_RETENTION_TIME_IN_DAYS, CHANGE_TRACKING")
|
||||
|
||||
def test_from_changes(self):
|
||||
self.validate_identity(
|
||||
"""SELECT C1 FROM t1 CHANGES (INFORMATION => APPEND_ONLY) AT (STREAM => 's1') END (TIMESTAMP => $ts2)"""
|
||||
)
|
||||
self.validate_identity(
|
||||
"""SELECT C1 FROM t1 CHANGES (INFORMATION => APPEND_ONLY) BEFORE (STATEMENT => 'STMT_ID') END (TIMESTAMP => $ts2)"""
|
||||
)
|
||||
self.validate_identity(
|
||||
"""SELECT 1 FROM some_table CHANGES (INFORMATION => APPEND_ONLY) AT (TIMESTAMP => TO_TIMESTAMP_TZ('2024-07-01 00:00:00+00:00')) END (TIMESTAMP => TO_TIMESTAMP_TZ('2024-07-01 14:28:59.999999+00:00'))""",
|
||||
"""SELECT 1 FROM some_table CHANGES (INFORMATION => APPEND_ONLY) AT (TIMESTAMP => CAST('2024-07-01 00:00:00+00:00' AS TIMESTAMPTZ)) END (TIMESTAMP => CAST('2024-07-01 14:28:59.999999+00:00' AS TIMESTAMPTZ))""",
|
||||
)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue