Adding upstream version 23.16.0.
Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
parent
9d7e0ff7aa
commit
b6ae88ec81
93 changed files with 64106 additions and 59061 deletions
|
@ -10,14 +10,14 @@ class TestSnowflake(Validator):
|
|||
dialect = "snowflake"
|
||||
|
||||
def test_snowflake(self):
|
||||
self.validate_identity(
|
||||
"MERGE INTO my_db AS ids USING (SELECT new_id FROM my_model WHERE NOT col IS NULL) AS new_ids ON ids.type = new_ids.type AND ids.source = new_ids.source WHEN NOT MATCHED THEN INSERT VALUES (new_ids.new_id)"
|
||||
self.validate_all(
|
||||
"ARRAY_CONSTRUCT_COMPACT(1, null, 2)",
|
||||
write={
|
||||
"spark": "ARRAY_COMPACT(ARRAY(1, NULL, 2))",
|
||||
"snowflake": "ARRAY_CONSTRUCT_COMPACT(1, NULL, 2)",
|
||||
},
|
||||
)
|
||||
self.validate_identity("ALTER TABLE table1 CLUSTER BY (name DESC)")
|
||||
self.validate_identity(
|
||||
"INSERT OVERWRITE TABLE t SELECT 1", "INSERT OVERWRITE INTO t SELECT 1"
|
||||
)
|
||||
self.validate_identity("SELECT rename, replace")
|
||||
|
||||
expr = parse_one("SELECT APPROX_TOP_K(C4, 3, 5) FROM t")
|
||||
expr.selects[0].assert_is(exp.AggFunc)
|
||||
self.assertEqual(expr.sql(dialect="snowflake"), "SELECT APPROX_TOP_K(C4, 3, 5) FROM t")
|
||||
|
@ -43,6 +43,8 @@ WHERE
|
|||
)""",
|
||||
)
|
||||
|
||||
self.validate_identity("ALTER TABLE table1 CLUSTER BY (name DESC)")
|
||||
self.validate_identity("SELECT rename, replace")
|
||||
self.validate_identity("SELECT TIMEADD(HOUR, 2, CAST('09:05:03' AS TIME))")
|
||||
self.validate_identity("SELECT CAST(OBJECT_CONSTRUCT('a', 1) AS MAP(VARCHAR, INT))")
|
||||
self.validate_identity("SELECT CAST(OBJECT_CONSTRUCT('a', 1) AS OBJECT(a CHAR NOT NULL))")
|
||||
|
@ -86,20 +88,19 @@ WHERE
|
|||
self.validate_identity("a$b") # valid snowflake identifier
|
||||
self.validate_identity("SELECT REGEXP_LIKE(a, b, c)")
|
||||
self.validate_identity("CREATE TABLE foo (bar FLOAT AUTOINCREMENT START 0 INCREMENT 1)")
|
||||
self.validate_identity("ALTER TABLE IF EXISTS foo SET TAG a = 'a', b = 'b', c = 'c'")
|
||||
self.validate_identity("ALTER TABLE foo UNSET TAG a, b, c")
|
||||
self.validate_identity("ALTER TABLE foo SET COMMENT = 'bar'")
|
||||
self.validate_identity("ALTER TABLE foo SET CHANGE_TRACKING = FALSE")
|
||||
self.validate_identity("ALTER TABLE foo UNSET DATA_RETENTION_TIME_IN_DAYS, CHANGE_TRACKING")
|
||||
self.validate_identity("COMMENT IF EXISTS ON TABLE foo IS 'bar'")
|
||||
self.validate_identity("SELECT CONVERT_TIMEZONE('UTC', 'America/Los_Angeles', col)")
|
||||
self.validate_identity("ALTER TABLE a SWAP WITH b")
|
||||
self.validate_identity("SELECT MATCH_CONDITION")
|
||||
self.validate_identity("SELECT * REPLACE (CAST(col AS TEXT) AS scol) FROM t")
|
||||
self.validate_identity(
|
||||
'DESCRIBE TABLE "SNOWFLAKE_SAMPLE_DATA"."TPCDS_SF100TCL"."WEB_SITE" type=stage'
|
||||
"MERGE INTO my_db AS ids USING (SELECT new_id FROM my_model WHERE NOT col IS NULL) AS new_ids ON ids.type = new_ids.type AND ids.source = new_ids.source WHEN NOT MATCHED THEN INSERT VALUES (new_ids.new_id)"
|
||||
)
|
||||
self.validate_identity(
|
||||
"SELECT a FROM test PIVOT(SUM(x) FOR y IN ('z', 'q')) AS x TABLESAMPLE (0.1)"
|
||||
"INSERT OVERWRITE TABLE t SELECT 1", "INSERT OVERWRITE INTO t SELECT 1"
|
||||
)
|
||||
self.validate_identity(
|
||||
'DESCRIBE TABLE "SNOWFLAKE_SAMPLE_DATA"."TPCDS_SF100TCL"."WEB_SITE" type=stage'
|
||||
)
|
||||
self.validate_identity(
|
||||
"SELECT * FROM DATA AS DATA_L ASOF JOIN DATA AS DATA_R MATCH_CONDITION (DATA_L.VAL > DATA_R.VAL) ON DATA_L.ID = DATA_R.ID"
|
||||
|
@ -145,10 +146,6 @@ WHERE
|
|||
"SELECT TIMESTAMPFROMPARTS(d, t)",
|
||||
"SELECT TIMESTAMP_FROM_PARTS(d, t)",
|
||||
)
|
||||
self.validate_identity(
|
||||
"SELECT user_id, value FROM table_name SAMPLE ($s) SEED (0)",
|
||||
"SELECT user_id, value FROM table_name TABLESAMPLE ($s) SEED (0)",
|
||||
)
|
||||
self.validate_identity(
|
||||
"SELECT v:attr[0].name FROM vartab",
|
||||
"SELECT GET_PATH(v, 'attr[0].name') FROM vartab",
|
||||
|
@ -236,6 +233,38 @@ WHERE
|
|||
"CAST(x AS NCHAR VARYING)",
|
||||
"CAST(x AS VARCHAR)",
|
||||
)
|
||||
self.validate_identity(
|
||||
"CREATE OR REPLACE TEMPORARY TABLE x (y NUMBER IDENTITY(0, 1))",
|
||||
"CREATE OR REPLACE TEMPORARY TABLE x (y DECIMAL(38, 0) AUTOINCREMENT START 0 INCREMENT 1)",
|
||||
)
|
||||
self.validate_identity(
|
||||
"CREATE TEMPORARY TABLE x (y NUMBER AUTOINCREMENT(0, 1))",
|
||||
"CREATE TEMPORARY TABLE x (y DECIMAL(38, 0) AUTOINCREMENT START 0 INCREMENT 1)",
|
||||
)
|
||||
self.validate_identity(
|
||||
"CREATE TABLE x (y NUMBER IDENTITY START 0 INCREMENT 1)",
|
||||
"CREATE TABLE x (y DECIMAL(38, 0) AUTOINCREMENT START 0 INCREMENT 1)",
|
||||
)
|
||||
self.validate_identity(
|
||||
"ALTER TABLE foo ADD COLUMN id INT identity(1, 1)",
|
||||
"ALTER TABLE foo ADD COLUMN id INT AUTOINCREMENT START 1 INCREMENT 1",
|
||||
)
|
||||
self.validate_identity(
|
||||
"SELECT DAYOFWEEK('2016-01-02T23:39:20.123-07:00'::TIMESTAMP)",
|
||||
"SELECT DAYOFWEEK(CAST('2016-01-02T23:39:20.123-07:00' AS TIMESTAMP))",
|
||||
)
|
||||
self.validate_identity(
|
||||
"SELECT * FROM xxx WHERE col ilike '%Don''t%'",
|
||||
"SELECT * FROM xxx WHERE col ILIKE '%Don\\'t%'",
|
||||
)
|
||||
self.validate_identity(
|
||||
"SELECT * EXCLUDE a, b FROM xxx",
|
||||
"SELECT * EXCLUDE (a), b FROM xxx",
|
||||
)
|
||||
self.validate_identity(
|
||||
"SELECT * RENAME a AS b, c AS d FROM xxx",
|
||||
"SELECT * RENAME (a AS b), c AS d FROM xxx",
|
||||
)
|
||||
|
||||
self.validate_all(
|
||||
"OBJECT_CONSTRUCT_KEEP_NULL('key_1', 'one', 'key_2', NULL)",
|
||||
|
@ -249,18 +278,6 @@ WHERE
|
|||
"snowflake": "OBJECT_CONSTRUCT_KEEP_NULL('key_1', 'one', 'key_2', NULL)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT * FROM example TABLESAMPLE (3) SEED (82)",
|
||||
read={
|
||||
"databricks": "SELECT * FROM example TABLESAMPLE (3 PERCENT) REPEATABLE (82)",
|
||||
"duckdb": "SELECT * FROM example TABLESAMPLE (3 PERCENT) REPEATABLE (82)",
|
||||
},
|
||||
write={
|
||||
"databricks": "SELECT * FROM example TABLESAMPLE (3 PERCENT) REPEATABLE (82)",
|
||||
"duckdb": "SELECT * FROM example TABLESAMPLE (3 PERCENT) REPEATABLE (82)",
|
||||
"snowflake": "SELECT * FROM example TABLESAMPLE (3) SEED (82)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT TIME_FROM_PARTS(12, 34, 56, 987654321)",
|
||||
write={
|
||||
|
@ -568,60 +585,12 @@ WHERE
|
|||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"CREATE OR REPLACE TEMPORARY TABLE x (y NUMBER IDENTITY(0, 1))",
|
||||
write={
|
||||
"snowflake": "CREATE OR REPLACE TEMPORARY TABLE x (y DECIMAL AUTOINCREMENT START 0 INCREMENT 1)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"CREATE TEMPORARY TABLE x (y NUMBER AUTOINCREMENT(0, 1))",
|
||||
write={
|
||||
"snowflake": "CREATE TEMPORARY TABLE x (y DECIMAL AUTOINCREMENT START 0 INCREMENT 1)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"CREATE TABLE x (y NUMBER IDENTITY START 0 INCREMENT 1)",
|
||||
write={
|
||||
"snowflake": "CREATE TABLE x (y DECIMAL AUTOINCREMENT START 0 INCREMENT 1)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"ALTER TABLE foo ADD COLUMN id INT identity(1, 1)",
|
||||
write={
|
||||
"snowflake": "ALTER TABLE foo ADD COLUMN id INT AUTOINCREMENT START 1 INCREMENT 1",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT DAYOFWEEK('2016-01-02T23:39:20.123-07:00'::TIMESTAMP)",
|
||||
write={
|
||||
"snowflake": "SELECT DAYOFWEEK(CAST('2016-01-02T23:39:20.123-07:00' AS TIMESTAMP))",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT * FROM xxx WHERE col ilike '%Don''t%'",
|
||||
write={
|
||||
"snowflake": "SELECT * FROM xxx WHERE col ILIKE '%Don\\'t%'",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT * EXCLUDE a, b FROM xxx",
|
||||
write={
|
||||
"snowflake": "SELECT * EXCLUDE (a), b FROM xxx",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT * RENAME a AS b, c AS d FROM xxx",
|
||||
write={
|
||||
"snowflake": "SELECT * RENAME (a AS b), c AS d FROM xxx",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT * EXCLUDE (a, b) RENAME (c AS d, E AS F) FROM xxx",
|
||||
"SELECT * EXCLUDE (a, b) REPLACE (c AS d, E AS F) FROM xxx",
|
||||
read={
|
||||
"duckdb": "SELECT * EXCLUDE (a, b) REPLACE (c AS d, E AS F) FROM xxx",
|
||||
},
|
||||
write={
|
||||
"snowflake": "SELECT * EXCLUDE (a, b) RENAME (c AS d, E AS F) FROM xxx",
|
||||
"snowflake": "SELECT * EXCLUDE (a, b) REPLACE (c AS d, E AS F) FROM xxx",
|
||||
"duckdb": "SELECT * EXCLUDE (a, b) REPLACE (c AS d, E AS F) FROM xxx",
|
||||
},
|
||||
)
|
||||
|
@ -831,6 +800,18 @@ WHERE
|
|||
"snowflake": "SELECT LISTAGG(col1, ', ') WITHIN GROUP (ORDER BY col2) FROM t",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT APPROX_PERCENTILE(a, 0.5) FROM t",
|
||||
read={
|
||||
"trino": "SELECT APPROX_PERCENTILE(a, 1, 0.5, 0.001) FROM t",
|
||||
"presto": "SELECT APPROX_PERCENTILE(a, 1, 0.5, 0.001) FROM t",
|
||||
},
|
||||
write={
|
||||
"trino": "SELECT APPROX_PERCENTILE(a, 0.5) FROM t",
|
||||
"presto": "SELECT APPROX_PERCENTILE(a, 0.5) FROM t",
|
||||
"snowflake": "SELECT APPROX_PERCENTILE(a, 0.5) FROM t",
|
||||
},
|
||||
)
|
||||
|
||||
def test_null_treatment(self):
|
||||
self.validate_all(
|
||||
|
@ -881,6 +862,10 @@ WHERE
|
|||
self.validate_identity("SELECT * FROM @namespace.%table_name/path/to/file.json.gz")
|
||||
self.validate_identity("SELECT * FROM '@external/location' (FILE_FORMAT => 'path.to.csv')")
|
||||
self.validate_identity("PUT file:///dir/tmp.csv @%table", check_command_warning=True)
|
||||
self.validate_identity("SELECT * FROM (SELECT a FROM @foo)")
|
||||
self.validate_identity(
|
||||
"SELECT * FROM (SELECT * FROM '@external/location' (FILE_FORMAT => 'path.to.csv'))"
|
||||
)
|
||||
self.validate_identity(
|
||||
"SELECT * FROM @foo/bar (FILE_FORMAT => ds_sandbox.test.my_csv_format, PATTERN => 'test') AS bla"
|
||||
)
|
||||
|
@ -902,18 +887,27 @@ WHERE
|
|||
|
||||
def test_sample(self):
|
||||
self.validate_identity("SELECT * FROM testtable TABLESAMPLE BERNOULLI (20.3)")
|
||||
self.validate_identity("SELECT * FROM testtable TABLESAMPLE (100)")
|
||||
self.validate_identity("SELECT * FROM testtable TABLESAMPLE SYSTEM (3) SEED (82)")
|
||||
self.validate_identity("SELECT * FROM testtable TABLESAMPLE (10 ROWS)")
|
||||
self.validate_identity(
|
||||
"SELECT i, j FROM table1 AS t1 INNER JOIN table2 AS t2 TABLESAMPLE (50) WHERE t2.j = t1.i"
|
||||
"SELECT a FROM test PIVOT(SUM(x) FOR y IN ('z', 'q')) AS x TABLESAMPLE BERNOULLI (0.1)"
|
||||
)
|
||||
self.validate_identity(
|
||||
"SELECT * FROM (SELECT * FROM t1 JOIN t2 ON t1.a = t2.c) TABLESAMPLE (1)"
|
||||
"SELECT i, j FROM table1 AS t1 INNER JOIN table2 AS t2 TABLESAMPLE BERNOULLI (50) WHERE t2.j = t1.i"
|
||||
)
|
||||
self.validate_identity(
|
||||
"SELECT * FROM (SELECT * FROM t1 JOIN t2 ON t1.a = t2.c) TABLESAMPLE BERNOULLI (1)"
|
||||
)
|
||||
self.validate_identity(
|
||||
"SELECT * FROM testtable TABLESAMPLE (10 ROWS)",
|
||||
"SELECT * FROM testtable TABLESAMPLE BERNOULLI (10 ROWS)",
|
||||
)
|
||||
self.validate_identity(
|
||||
"SELECT * FROM testtable TABLESAMPLE (100)",
|
||||
"SELECT * FROM testtable TABLESAMPLE BERNOULLI (100)",
|
||||
)
|
||||
self.validate_identity(
|
||||
"SELECT * FROM testtable SAMPLE (10)",
|
||||
"SELECT * FROM testtable TABLESAMPLE (10)",
|
||||
"SELECT * FROM testtable TABLESAMPLE BERNOULLI (10)",
|
||||
)
|
||||
self.validate_identity(
|
||||
"SELECT * FROM testtable SAMPLE ROW (0)",
|
||||
|
@ -923,7 +917,29 @@ WHERE
|
|||
"SELECT a FROM test SAMPLE BLOCK (0.5) SEED (42)",
|
||||
"SELECT a FROM test TABLESAMPLE BLOCK (0.5) SEED (42)",
|
||||
)
|
||||
self.validate_identity(
|
||||
"SELECT user_id, value FROM table_name SAMPLE BERNOULLI ($s) SEED (0)",
|
||||
"SELECT user_id, value FROM table_name TABLESAMPLE BERNOULLI ($s) SEED (0)",
|
||||
)
|
||||
|
||||
self.validate_all(
|
||||
"SELECT * FROM example TABLESAMPLE BERNOULLI (3) SEED (82)",
|
||||
read={
|
||||
"duckdb": "SELECT * FROM example TABLESAMPLE BERNOULLI (3 PERCENT) REPEATABLE (82)",
|
||||
},
|
||||
write={
|
||||
"databricks": "SELECT * FROM example TABLESAMPLE (3 PERCENT) REPEATABLE (82)",
|
||||
"duckdb": "SELECT * FROM example TABLESAMPLE BERNOULLI (3 PERCENT) REPEATABLE (82)",
|
||||
"snowflake": "SELECT * FROM example TABLESAMPLE BERNOULLI (3) SEED (82)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT * FROM test AS _tmp TABLESAMPLE (5)",
|
||||
write={
|
||||
"postgres": "SELECT * FROM test AS _tmp TABLESAMPLE BERNOULLI (5)",
|
||||
"snowflake": "SELECT * FROM test AS _tmp TABLESAMPLE BERNOULLI (5)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"""
|
||||
SELECT i, j
|
||||
|
@ -933,7 +949,7 @@ WHERE
|
|||
table2 AS t2 SAMPLE (50) -- 50% of rows in table2
|
||||
WHERE t2.j = t1.i""",
|
||||
write={
|
||||
"snowflake": "SELECT i, j FROM table1 AS t1 TABLESAMPLE (25) /* 25% of rows in table1 */ INNER JOIN table2 AS t2 TABLESAMPLE (50) /* 50% of rows in table2 */ WHERE t2.j = t1.i",
|
||||
"snowflake": "SELECT i, j FROM table1 AS t1 TABLESAMPLE BERNOULLI (25) /* 25% of rows in table1 */ INNER JOIN table2 AS t2 TABLESAMPLE BERNOULLI (50) /* 50% of rows in table2 */ WHERE t2.j = t1.i",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
|
@ -945,7 +961,7 @@ WHERE
|
|||
self.validate_all(
|
||||
"SELECT * FROM (SELECT * FROM t1 join t2 on t1.a = t2.c) SAMPLE (1)",
|
||||
write={
|
||||
"snowflake": "SELECT * FROM (SELECT * FROM t1 JOIN t2 ON t1.a = t2.c) TABLESAMPLE (1)",
|
||||
"snowflake": "SELECT * FROM (SELECT * FROM t1 JOIN t2 ON t1.a = t2.c) TABLESAMPLE BERNOULLI (1)",
|
||||
"spark": "SELECT * FROM (SELECT * FROM t1 JOIN t2 ON t1.a = t2.c) TABLESAMPLE (1 PERCENT)",
|
||||
},
|
||||
)
|
||||
|
@ -1172,7 +1188,7 @@ WHERE
|
|||
location=@s2/logs/
|
||||
partition_type = user_specified
|
||||
file_format = (type = parquet)""",
|
||||
"CREATE EXTERNAL TABLE et2 (col1 DATE AS (CAST(GET_PATH(PARSE_JSON(metadata$external_table_partition), 'COL1') AS DATE)), col2 VARCHAR AS (CAST(GET_PATH(PARSE_JSON(metadata$external_table_partition), 'COL2') AS VARCHAR)), col3 DECIMAL AS (CAST(GET_PATH(PARSE_JSON(metadata$external_table_partition), 'COL3') AS DECIMAL))) LOCATION @s2/logs/ PARTITION BY (col1, col2, col3) partition_type=user_specified file_format=(type = parquet)",
|
||||
"CREATE EXTERNAL TABLE et2 (col1 DATE AS (CAST(GET_PATH(PARSE_JSON(metadata$external_table_partition), 'COL1') AS DATE)), col2 VARCHAR AS (CAST(GET_PATH(PARSE_JSON(metadata$external_table_partition), 'COL2') AS VARCHAR)), col3 DECIMAL(38, 0) AS (CAST(GET_PATH(PARSE_JSON(metadata$external_table_partition), 'COL3') AS DECIMAL(38, 0)))) LOCATION @s2/logs/ PARTITION BY (col1, col2, col3) partition_type=user_specified file_format=(type = parquet)",
|
||||
)
|
||||
self.validate_identity("CREATE OR REPLACE VIEW foo (uid) COPY GRANTS AS (SELECT 1)")
|
||||
self.validate_identity("CREATE TABLE geospatial_table (id INT, g GEOGRAPHY)")
|
||||
|
@ -1181,8 +1197,12 @@ WHERE
|
|||
self.validate_identity("CREATE SCHEMA mytestschema_clone CLONE testschema")
|
||||
self.validate_identity("CREATE TABLE IDENTIFIER('foo') (COLUMN1 VARCHAR, COLUMN2 VARCHAR)")
|
||||
self.validate_identity("CREATE TABLE IDENTIFIER($foo) (col1 VARCHAR, col2 VARCHAR)")
|
||||
self.validate_identity("CREATE TAG cost_center ALLOWED_VALUES 'a', 'b'")
|
||||
self.validate_identity(
|
||||
"DROP function my_udf (OBJECT(city VARCHAR, zipcode DECIMAL, val ARRAY(BOOLEAN)))"
|
||||
"CREATE OR REPLACE TAG IF NOT EXISTS cost_center COMMENT='cost_center tag'"
|
||||
).this.assert_is(exp.Identifier)
|
||||
self.validate_identity(
|
||||
"DROP FUNCTION my_udf (OBJECT(city VARCHAR, zipcode DECIMAL(38, 0), val ARRAY(BOOLEAN)))"
|
||||
)
|
||||
self.validate_identity(
|
||||
"CREATE TABLE orders_clone_restore CLONE orders AT (TIMESTAMP => TO_TIMESTAMP_TZ('04/05/2013 01:02:03', 'mm/dd/yyyy hh24:mi:ss'))"
|
||||
|
@ -1203,13 +1223,27 @@ WHERE
|
|||
"CREATE ICEBERG TABLE my_iceberg_table (amount ARRAY(INT)) CATALOG='SNOWFLAKE' EXTERNAL_VOLUME='my_external_volume' BASE_LOCATION='my/relative/path/from/extvol'"
|
||||
)
|
||||
self.validate_identity(
|
||||
"CREATE OR REPLACE FUNCTION my_udf(location OBJECT(city VARCHAR, zipcode DECIMAL, val ARRAY(BOOLEAN))) RETURNS VARCHAR AS $$ SELECT 'foo' $$",
|
||||
"CREATE OR REPLACE FUNCTION my_udf(location OBJECT(city VARCHAR, zipcode DECIMAL, val ARRAY(BOOLEAN))) RETURNS VARCHAR AS ' SELECT \\'foo\\' '",
|
||||
"""CREATE OR REPLACE FUNCTION ibis_udfs.public.object_values("obj" OBJECT) RETURNS ARRAY LANGUAGE JAVASCRIPT RETURNS NULL ON NULL INPUT AS ' return Object.values(obj) '"""
|
||||
)
|
||||
self.validate_identity(
|
||||
"""CREATE OR REPLACE FUNCTION ibis_udfs.public.object_values("obj" OBJECT) RETURNS ARRAY LANGUAGE JAVASCRIPT STRICT AS ' return Object.values(obj) '"""
|
||||
)
|
||||
self.validate_identity(
|
||||
"CREATE OR REPLACE FUNCTION my_udf(location OBJECT(city VARCHAR, zipcode DECIMAL(38, 0), val ARRAY(BOOLEAN))) RETURNS VARCHAR AS $$ SELECT 'foo' $$",
|
||||
"CREATE OR REPLACE FUNCTION my_udf(location OBJECT(city VARCHAR, zipcode DECIMAL(38, 0), val ARRAY(BOOLEAN))) RETURNS VARCHAR AS ' SELECT \\'foo\\' '",
|
||||
)
|
||||
self.validate_identity(
|
||||
"CREATE OR REPLACE FUNCTION my_udtf(foo BOOLEAN) RETURNS TABLE(col1 ARRAY(INT)) AS $$ WITH t AS (SELECT CAST([1, 2, 3] AS ARRAY(INT)) AS c) SELECT c FROM t $$",
|
||||
"CREATE OR REPLACE FUNCTION my_udtf(foo BOOLEAN) RETURNS TABLE (col1 ARRAY(INT)) AS ' WITH t AS (SELECT CAST([1, 2, 3] AS ARRAY(INT)) AS c) SELECT c FROM t '",
|
||||
)
|
||||
self.validate_identity(
|
||||
"CREATE SEQUENCE seq1 WITH START=1, INCREMENT=1 ORDER",
|
||||
"CREATE SEQUENCE seq1 START=1 INCREMENT BY 1 ORDER",
|
||||
)
|
||||
self.validate_identity(
|
||||
"CREATE SEQUENCE seq1 WITH START=1 INCREMENT=1 ORDER",
|
||||
"CREATE SEQUENCE seq1 START=1 INCREMENT=1 ORDER",
|
||||
)
|
||||
|
||||
self.validate_all(
|
||||
"CREATE TABLE orders_clone CLONE orders",
|
||||
|
@ -1435,6 +1469,9 @@ FROM persons AS p, LATERAL FLATTEN(input => p.c, path => 'contact') AS _flattene
|
|||
)
|
||||
|
||||
def test_values(self):
|
||||
select = exp.select("*").from_("values (map(['a'], [1]))")
|
||||
self.assertEqual(select.sql("snowflake"), "SELECT * FROM (SELECT OBJECT_CONSTRUCT('a', 1))")
|
||||
|
||||
self.validate_all(
|
||||
'SELECT "c0", "c1" FROM (VALUES (1, 2), (3, 4)) AS "t0"("c0", "c1")',
|
||||
read={
|
||||
|
@ -1832,10 +1869,7 @@ STORAGE_ALLOWED_LOCATIONS=('s3://mybucket1/path1/', 's3://mybucket2/path2/')""",
|
|||
self.assertEqual(expression.sql(dialect="snowflake"), "SELECT TRY_CAST(FOO() AS TEXT)")
|
||||
|
||||
def test_copy(self):
|
||||
self.validate_identity(
|
||||
"""COPY INTO mytable (col1, col2) FROM 's3://mybucket/data/files' FILES = ('file1', 'file2') PATTERN = 'pattern' file_format = (FORMAT_NAME = my_csv_format NULL_IF = ('str1', 'str2')) PARSE_HEADER = TRUE""",
|
||||
"""COPY INTO mytable (col1, col2) FROM 's3://mybucket/data/files' FILES = ('file1', 'file2') PATTERN = 'pattern' FILE_FORMAT = (FORMAT_NAME = my_csv_format NULL_IF = ('str1', 'str2')) PARSE_HEADER = TRUE""",
|
||||
)
|
||||
self.validate_identity("COPY INTO test (c1) FROM (SELECT $1.c1 FROM @mystage)")
|
||||
self.validate_identity(
|
||||
"""COPY INTO temp FROM @random_stage/path/ FILE_FORMAT = (TYPE = CSV FIELD_DELIMITER = '|' NULL_IF = () FIELD_OPTIONALLY_ENCLOSED_BY = '"' TIMESTAMP_FORMAT = 'TZHTZM YYYY-MM-DD HH24:MI:SS.FF9' DATE_FORMAT = 'TZHTZM YYYY-MM-DD HH24:MI:SS.FF9' BINARY_FORMAT = BASE64) VALIDATION_MODE = 'RETURN_3_ROWS'"""
|
||||
)
|
||||
|
@ -1845,3 +1879,81 @@ STORAGE_ALLOWED_LOCATIONS=('s3://mybucket1/path1/', 's3://mybucket2/path2/')""",
|
|||
self.validate_identity(
|
||||
"""COPY INTO mytable FROM 'azure://myaccount.blob.core.windows.net/mycontainer/data/files' CREDENTIALS = (AZURE_SAS_TOKEN = 'token') ENCRYPTION = (TYPE = 'AZURE_CSE' MASTER_KEY = 'kPx...') FILE_FORMAT = (FORMAT_NAME = my_csv_format)"""
|
||||
)
|
||||
self.validate_identity(
|
||||
"""COPY INTO mytable (col1, col2) FROM 's3://mybucket/data/files' FILES = ('file1', 'file2') PATTERN = 'pattern' FILE_FORMAT = (FORMAT_NAME = my_csv_format NULL_IF = ('str1', 'str2')) PARSE_HEADER = TRUE"""
|
||||
)
|
||||
self.validate_all(
|
||||
"""COPY INTO 's3://example/data.csv'
|
||||
FROM EXTRA.EXAMPLE.TABLE
|
||||
credentials = (x)
|
||||
STORAGE_INTEGRATION = S3_INTEGRATION
|
||||
FILE_FORMAT = (TYPE = CSV COMPRESSION = NONE NULL_IF = ('') FIELD_OPTIONALLY_ENCLOSED_BY = '"')
|
||||
HEADER = TRUE
|
||||
OVERWRITE = TRUE
|
||||
SINGLE = TRUE
|
||||
""",
|
||||
write={
|
||||
"": """COPY INTO 's3://example/data.csv'
|
||||
FROM EXTRA.EXAMPLE.TABLE
|
||||
CREDENTIALS = (x) WITH (
|
||||
STORAGE_INTEGRATION S3_INTEGRATION,
|
||||
FILE_FORMAT = (TYPE = CSV COMPRESSION = NONE NULL_IF = (
|
||||
''
|
||||
) FIELD_OPTIONALLY_ENCLOSED_BY = '"'),
|
||||
HEADER TRUE,
|
||||
OVERWRITE TRUE,
|
||||
SINGLE TRUE
|
||||
)""",
|
||||
"snowflake": """COPY INTO 's3://example/data.csv'
|
||||
FROM EXTRA.EXAMPLE.TABLE
|
||||
CREDENTIALS = (x)
|
||||
STORAGE_INTEGRATION = S3_INTEGRATION
|
||||
FILE_FORMAT = (TYPE = CSV COMPRESSION = NONE NULL_IF = (
|
||||
''
|
||||
) FIELD_OPTIONALLY_ENCLOSED_BY = '"')
|
||||
HEADER = TRUE
|
||||
OVERWRITE = TRUE
|
||||
SINGLE = TRUE""",
|
||||
},
|
||||
pretty=True,
|
||||
)
|
||||
self.validate_all(
|
||||
"""COPY INTO 's3://example/data.csv'
|
||||
FROM EXTRA.EXAMPLE.TABLE
|
||||
credentials = (x)
|
||||
STORAGE_INTEGRATION = S3_INTEGRATION
|
||||
FILE_FORMAT = (TYPE = CSV COMPRESSION = NONE NULL_IF = ('') FIELD_OPTIONALLY_ENCLOSED_BY = '"')
|
||||
HEADER = TRUE
|
||||
OVERWRITE = TRUE
|
||||
SINGLE = TRUE
|
||||
""",
|
||||
write={
|
||||
"": """COPY INTO 's3://example/data.csv' FROM EXTRA.EXAMPLE.TABLE CREDENTIALS = (x) WITH (STORAGE_INTEGRATION S3_INTEGRATION, FILE_FORMAT = (TYPE = CSV COMPRESSION = NONE NULL_IF = ('') FIELD_OPTIONALLY_ENCLOSED_BY = '"'), HEADER TRUE, OVERWRITE TRUE, SINGLE TRUE)""",
|
||||
"snowflake": """COPY INTO 's3://example/data.csv' FROM EXTRA.EXAMPLE.TABLE CREDENTIALS = (x) STORAGE_INTEGRATION = S3_INTEGRATION FILE_FORMAT = (TYPE = CSV COMPRESSION = NONE NULL_IF = ('') FIELD_OPTIONALLY_ENCLOSED_BY = '"') HEADER = TRUE OVERWRITE = TRUE SINGLE = TRUE""",
|
||||
},
|
||||
)
|
||||
|
||||
def test_querying_semi_structured_data(self):
|
||||
self.validate_identity("SELECT $1")
|
||||
self.validate_identity("SELECT $1.elem")
|
||||
|
||||
self.validate_identity("SELECT $1:a.b", "SELECT GET_PATH($1, 'a.b')")
|
||||
self.validate_identity("SELECT t.$23:a.b", "SELECT GET_PATH(t.$23, 'a.b')")
|
||||
self.validate_identity("SELECT t.$17:a[0].b[0].c", "SELECT GET_PATH(t.$17, 'a[0].b[0].c')")
|
||||
|
||||
def test_alter_set_unset(self):
|
||||
self.validate_identity("ALTER TABLE tbl SET DATA_RETENTION_TIME_IN_DAYS=1")
|
||||
self.validate_identity("ALTER TABLE tbl SET DEFAULT_DDL_COLLATION='test'")
|
||||
self.validate_identity("ALTER TABLE foo SET COMMENT='bar'")
|
||||
self.validate_identity("ALTER TABLE foo SET CHANGE_TRACKING=FALSE")
|
||||
self.validate_identity("ALTER TABLE table1 SET TAG foo.bar = 'baz'")
|
||||
self.validate_identity("ALTER TABLE IF EXISTS foo SET TAG a = 'a', b = 'b', c = 'c'")
|
||||
self.validate_identity(
|
||||
"""ALTER TABLE tbl SET STAGE_FILE_FORMAT = (TYPE = CSV FIELD_DELIMITER = '|' NULL_IF = () FIELD_OPTIONALLY_ENCLOSED_BY = '"' TIMESTAMP_FORMAT = 'TZHTZM YYYY-MM-DD HH24:MI:SS.FF9' DATE_FORMAT = 'TZHTZM YYYY-MM-DD HH24:MI:SS.FF9' BINARY_FORMAT = BASE64)""",
|
||||
)
|
||||
self.validate_identity(
|
||||
"""ALTER TABLE tbl SET STAGE_COPY_OPTIONS = (ON_ERROR = SKIP_FILE SIZE_LIMIT = 5 PURGE = TRUE MATCH_BY_COLUMN_NAME = CASE_SENSITIVE)"""
|
||||
)
|
||||
|
||||
self.validate_identity("ALTER TABLE foo UNSET TAG a, b, c")
|
||||
self.validate_identity("ALTER TABLE foo UNSET DATA_RETENTION_TIME_IN_DAYS, CHANGE_TRACKING")
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue