Merging upstream version 20.1.0.
Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
parent
d4fe7bdb16
commit
90988d8258
127 changed files with 73384 additions and 73067 deletions
|
@ -6,13 +6,38 @@ from tests.dialects.test_dialect import Validator
|
|||
|
||||
|
||||
class TestSnowflake(Validator):
|
||||
maxDiff = None
|
||||
dialect = "snowflake"
|
||||
|
||||
def test_snowflake(self):
|
||||
self.validate_identity("SELECT rename, replace")
|
||||
expr = parse_one("SELECT APPROX_TOP_K(C4, 3, 5) FROM t")
|
||||
expr.selects[0].assert_is(exp.AggFunc)
|
||||
self.assertEqual(expr.sql(dialect="snowflake"), "SELECT APPROX_TOP_K(C4, 3, 5) FROM t")
|
||||
|
||||
self.assertEqual(
|
||||
exp.select(exp.Explode(this=exp.column("x")).as_("y", quoted=True)).sql(
|
||||
"snowflake", pretty=True
|
||||
),
|
||||
"""SELECT
|
||||
IFF(_u.pos = _u_2.pos_2, _u_2."y", NULL) AS "y"
|
||||
FROM TABLE(FLATTEN(INPUT => ARRAY_GENERATE_RANGE(0, (
|
||||
GREATEST(ARRAY_SIZE(x)) - 1
|
||||
) + 1))) AS _u(seq, key, path, index, pos, this)
|
||||
CROSS JOIN TABLE(FLATTEN(INPUT => x)) AS _u_2(seq, key, path, pos_2, "y", this)
|
||||
WHERE
|
||||
_u.pos = _u_2.pos_2
|
||||
OR (
|
||||
_u.pos > (
|
||||
ARRAY_SIZE(x) - 1
|
||||
) AND _u_2.pos_2 = (
|
||||
ARRAY_SIZE(x) - 1
|
||||
)
|
||||
)""",
|
||||
)
|
||||
|
||||
self.validate_identity("SELECT user_id, value FROM table_name sample ($s) SEED (0)")
|
||||
self.validate_identity("SELECT ARRAY_UNIQUE_AGG(x)")
|
||||
self.validate_identity("SELECT OBJECT_CONSTRUCT()")
|
||||
self.validate_identity("SELECT DAYOFMONTH(CURRENT_TIMESTAMP())")
|
||||
self.validate_identity("SELECT DAYOFYEAR(CURRENT_TIMESTAMP())")
|
||||
|
@ -47,6 +72,14 @@ class TestSnowflake(Validator):
|
|||
self.validate_identity(
|
||||
'DESCRIBE TABLE "SNOWFLAKE_SAMPLE_DATA"."TPCDS_SF100TCL"."WEB_SITE" type=stage'
|
||||
)
|
||||
self.validate_identity(
|
||||
"SELECT * FROM unnest(x) with ordinality",
|
||||
"SELECT * FROM TABLE(FLATTEN(INPUT => x)) AS _u(seq, key, path, index, value, this)",
|
||||
)
|
||||
self.validate_identity(
|
||||
"CREATE TABLE foo (ID INT COMMENT $$some comment$$)",
|
||||
"CREATE TABLE foo (ID INT COMMENT 'some comment')",
|
||||
)
|
||||
self.validate_identity(
|
||||
"SELECT state, city, SUM(retail_price * quantity) AS gross_revenue FROM sales GROUP BY ALL"
|
||||
)
|
||||
|
@ -87,6 +120,21 @@ class TestSnowflake(Validator):
|
|||
self.validate_all("CAST(x AS CHAR VARYING)", write={"snowflake": "CAST(x AS VARCHAR)"})
|
||||
self.validate_all("CAST(x AS CHARACTER VARYING)", write={"snowflake": "CAST(x AS VARCHAR)"})
|
||||
self.validate_all("CAST(x AS NCHAR VARYING)", write={"snowflake": "CAST(x AS VARCHAR)"})
|
||||
self.validate_all(
|
||||
# We need to qualify the columns in this query because "value" would be ambiguous
|
||||
'WITH t(x, "value") AS (SELECT [1, 2, 3], 1) SELECT IFF(_u.pos = _u_2.pos_2, _u_2."value", NULL) AS "value" FROM t, TABLE(FLATTEN(INPUT => ARRAY_GENERATE_RANGE(0, (GREATEST(ARRAY_SIZE(t.x)) - 1) + 1))) AS _u(seq, key, path, index, pos, this) CROSS JOIN TABLE(FLATTEN(INPUT => t.x)) AS _u_2(seq, key, path, pos_2, "value", this) WHERE _u.pos = _u_2.pos_2 OR (_u.pos > (ARRAY_SIZE(t.x) - 1) AND _u_2.pos_2 = (ARRAY_SIZE(t.x) - 1))',
|
||||
read={
|
||||
"duckdb": 'WITH t(x, "value") AS (SELECT [1,2,3], 1) SELECT UNNEST(t.x) AS "value" FROM t',
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT { 'Manitoba': 'Winnipeg', 'foo': 'bar' } AS province_capital",
|
||||
write={
|
||||
"duckdb": "SELECT {'Manitoba': 'Winnipeg', 'foo': 'bar'} AS province_capital",
|
||||
"snowflake": "SELECT OBJECT_CONSTRUCT('Manitoba', 'Winnipeg', 'foo', 'bar') AS province_capital",
|
||||
"spark": "SELECT STRUCT('Manitoba' AS Winnipeg, 'foo' AS bar) AS province_capital",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT COLLATE('B', 'und:ci')",
|
||||
write={
|
||||
|
@ -225,6 +273,7 @@ class TestSnowflake(Validator):
|
|||
"spark": "POWER(x, 2)",
|
||||
"sqlite": "POWER(x, 2)",
|
||||
"starrocks": "POWER(x, 2)",
|
||||
"teradata": "x ** 2",
|
||||
"trino": "POWER(x, 2)",
|
||||
"tsql": "POWER(x, 2)",
|
||||
},
|
||||
|
@ -241,8 +290,8 @@ class TestSnowflake(Validator):
|
|||
"DIV0(foo, bar)",
|
||||
write={
|
||||
"snowflake": "IFF(bar = 0, 0, foo / bar)",
|
||||
"sqlite": "CASE WHEN bar = 0 THEN 0 ELSE foo / bar END",
|
||||
"presto": "IF(bar = 0, 0, foo / bar)",
|
||||
"sqlite": "CASE WHEN bar = 0 THEN 0 ELSE CAST(foo AS REAL) / bar END",
|
||||
"presto": "IF(bar = 0, 0, CAST(foo AS DOUBLE) / bar)",
|
||||
"spark": "IF(bar = 0, 0, foo / bar)",
|
||||
"hive": "IF(bar = 0, 0, foo / bar)",
|
||||
"duckdb": "CASE WHEN bar = 0 THEN 0 ELSE foo / bar END",
|
||||
|
@ -355,7 +404,7 @@ class TestSnowflake(Validator):
|
|||
self.validate_all(
|
||||
"SELECT TO_TIMESTAMP(1659981729)",
|
||||
write={
|
||||
"bigquery": "SELECT UNIX_TO_TIME(1659981729)",
|
||||
"bigquery": "SELECT TIMESTAMP_SECONDS(1659981729)",
|
||||
"snowflake": "SELECT TO_TIMESTAMP(1659981729)",
|
||||
"spark": "SELECT CAST(FROM_UNIXTIME(1659981729) AS TIMESTAMP)",
|
||||
},
|
||||
|
@ -363,7 +412,7 @@ class TestSnowflake(Validator):
|
|||
self.validate_all(
|
||||
"SELECT TO_TIMESTAMP(1659981729000, 3)",
|
||||
write={
|
||||
"bigquery": "SELECT UNIX_TO_TIME(1659981729000, 'millis')",
|
||||
"bigquery": "SELECT TIMESTAMP_MILLIS(1659981729000)",
|
||||
"snowflake": "SELECT TO_TIMESTAMP(1659981729000, 3)",
|
||||
"spark": "SELECT TIMESTAMP_MILLIS(1659981729000)",
|
||||
},
|
||||
|
@ -371,7 +420,6 @@ class TestSnowflake(Validator):
|
|||
self.validate_all(
|
||||
"SELECT TO_TIMESTAMP('1659981729')",
|
||||
write={
|
||||
"bigquery": "SELECT UNIX_TO_TIME('1659981729')",
|
||||
"snowflake": "SELECT TO_TIMESTAMP('1659981729')",
|
||||
"spark": "SELECT CAST(FROM_UNIXTIME('1659981729') AS TIMESTAMP)",
|
||||
},
|
||||
|
@ -379,9 +427,11 @@ class TestSnowflake(Validator):
|
|||
self.validate_all(
|
||||
"SELECT TO_TIMESTAMP(1659981729000000000, 9)",
|
||||
write={
|
||||
"bigquery": "SELECT UNIX_TO_TIME(1659981729000000000, 'micros')",
|
||||
"bigquery": "SELECT TIMESTAMP_MICROS(CAST(1659981729000000000 / 1000 AS INT64))",
|
||||
"duckdb": "SELECT TO_TIMESTAMP(1659981729000000000 / 1000000000)",
|
||||
"presto": "SELECT FROM_UNIXTIME(CAST(1659981729000000000 AS DOUBLE) / 1000000000)",
|
||||
"snowflake": "SELECT TO_TIMESTAMP(1659981729000000000, 9)",
|
||||
"spark": "SELECT TIMESTAMP_MICROS(1659981729000000000)",
|
||||
"spark": "SELECT TIMESTAMP_SECONDS(1659981729000000000 / 1000000000)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
|
@ -404,7 +454,6 @@ class TestSnowflake(Validator):
|
|||
"spark": "SELECT TO_TIMESTAMP('04/05/2013 01:02:03', 'MM/dd/yyyy HH:mm:ss')",
|
||||
},
|
||||
)
|
||||
|
||||
self.validate_all(
|
||||
"SELECT IFF(TRUE, 'true', 'false')",
|
||||
write={
|
||||
|
@ -551,6 +600,7 @@ class TestSnowflake(Validator):
|
|||
staged_file.sql(dialect="snowflake"),
|
||||
)
|
||||
|
||||
self.validate_identity("SELECT metadata$filename FROM @s1/")
|
||||
self.validate_identity("SELECT * FROM @~")
|
||||
self.validate_identity("SELECT * FROM @~/some/path/to/file.csv")
|
||||
self.validate_identity("SELECT * FROM @mystage")
|
||||
|
@ -610,6 +660,13 @@ class TestSnowflake(Validator):
|
|||
"snowflake": "SELECT * FROM testtable SAMPLE BLOCK (0.012) SEED (99992)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT * FROM (SELECT * FROM t1 join t2 on t1.a = t2.c) SAMPLE (1)",
|
||||
write={
|
||||
"snowflake": "SELECT * FROM (SELECT * FROM t1 JOIN t2 ON t1.a = t2.c) SAMPLE (1)",
|
||||
"spark": "SELECT * FROM (SELECT * FROM t1 JOIN t2 ON t1.a = t2.c) SAMPLE (1 PERCENT)",
|
||||
},
|
||||
)
|
||||
|
||||
def test_timestamps(self):
|
||||
self.validate_identity("SELECT CAST('12:00:00' AS TIME)")
|
||||
|
@ -719,6 +776,17 @@ class TestSnowflake(Validator):
|
|||
)
|
||||
|
||||
def test_ddl(self):
|
||||
self.validate_identity(
|
||||
"""create external table et2(
|
||||
col1 date as (parse_json(metadata$external_table_partition):COL1::date),
|
||||
col2 varchar as (parse_json(metadata$external_table_partition):COL2::varchar),
|
||||
col3 number as (parse_json(metadata$external_table_partition):COL3::number))
|
||||
partition by (col1,col2,col3)
|
||||
location=@s2/logs/
|
||||
partition_type = user_specified
|
||||
file_format = (type = parquet)""",
|
||||
"CREATE EXTERNAL TABLE et2 (col1 DATE AS (CAST(PARSE_JSON(metadata$external_table_partition)['COL1'] AS DATE)), col2 VARCHAR AS (CAST(PARSE_JSON(metadata$external_table_partition)['COL2'] AS VARCHAR)), col3 DECIMAL AS (CAST(PARSE_JSON(metadata$external_table_partition)['COL3'] AS DECIMAL))) LOCATION @s2/logs/ PARTITION BY (col1, col2, col3) partition_type=user_specified file_format=(type = parquet)",
|
||||
)
|
||||
self.validate_identity("CREATE OR REPLACE VIEW foo (uid) COPY GRANTS AS (SELECT 1)")
|
||||
self.validate_identity("CREATE TABLE geospatial_table (id INT, g GEOGRAPHY)")
|
||||
self.validate_identity("CREATE MATERIALIZED VIEW a COMMENT='...' AS SELECT 1 FROM x")
|
||||
|
@ -733,7 +801,7 @@ class TestSnowflake(Validator):
|
|||
"CREATE TABLE orders_clone_restore CLONE orders BEFORE (STATEMENT => '8e5d0ca9-005e-44e6-b858-a8f5b37c5726')"
|
||||
)
|
||||
self.validate_identity(
|
||||
"CREATE TABLE a (x DATE, y BIGINT) WITH (PARTITION BY (x), integration='q', auto_refresh=TRUE, file_format=(type = parquet))"
|
||||
"CREATE TABLE a (x DATE, y BIGINT) PARTITION BY (x) integration='q' auto_refresh=TRUE file_format=(type = parquet)"
|
||||
)
|
||||
self.validate_identity(
|
||||
"CREATE SCHEMA mytestschema_clone_restore CLONE testschema BEFORE (TIMESTAMP => TO_TIMESTAMP(40 * 365 * 86400))"
|
||||
|
@ -1179,3 +1247,39 @@ MATCH_RECOGNIZE (
|
|||
ast = parse_one("ALTER TABLE a SWAP WITH b", read="snowflake")
|
||||
assert isinstance(ast, exp.AlterTable)
|
||||
assert isinstance(ast.args["actions"][0], exp.SwapTable)
|
||||
|
||||
def test_try_cast(self):
|
||||
self.validate_identity("SELECT TRY_CAST(x AS DOUBLE)")
|
||||
|
||||
self.validate_all("TRY_CAST('foo' AS TEXT)", read={"hive": "CAST('foo' AS STRING)"})
|
||||
self.validate_all("CAST(5 + 5 AS TEXT)", read={"hive": "CAST(5 + 5 AS STRING)"})
|
||||
self.validate_all(
|
||||
"CAST(TRY_CAST('2020-01-01' AS DATE) AS TEXT)",
|
||||
read={
|
||||
"hive": "CAST(CAST('2020-01-01' AS DATE) AS STRING)",
|
||||
"snowflake": "CAST(TRY_CAST('2020-01-01' AS DATE) AS TEXT)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"TRY_CAST(x AS TEXT)",
|
||||
read={
|
||||
"hive": "CAST(x AS STRING)",
|
||||
"snowflake": "TRY_CAST(x AS TEXT)",
|
||||
},
|
||||
)
|
||||
|
||||
from sqlglot.optimizer.annotate_types import annotate_types
|
||||
|
||||
expression = parse_one("SELECT CAST(t.x AS STRING) FROM t", read="hive")
|
||||
|
||||
expression = annotate_types(expression, schema={"t": {"x": "string"}})
|
||||
self.assertEqual(expression.sql(dialect="snowflake"), "SELECT TRY_CAST(t.x AS TEXT) FROM t")
|
||||
|
||||
expression = annotate_types(expression, schema={"t": {"x": "int"}})
|
||||
self.assertEqual(expression.sql(dialect="snowflake"), "SELECT CAST(t.x AS TEXT) FROM t")
|
||||
|
||||
# We can't infer FOO's type since it's a UDF in this case, so we don't get rid of TRY_CAST
|
||||
expression = parse_one("SELECT TRY_CAST(FOO() AS TEXT)", read="snowflake")
|
||||
|
||||
expression = annotate_types(expression)
|
||||
self.assertEqual(expression.sql(dialect="snowflake"), "SELECT TRY_CAST(FOO() AS TEXT)")
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue