Adding upstream version 23.13.1.
Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
parent
5d33af745d
commit
e47608846f
85 changed files with 53899 additions and 50390 deletions
|
@ -10,6 +10,9 @@ class TestSnowflake(Validator):
|
|||
dialect = "snowflake"
|
||||
|
||||
def test_snowflake(self):
|
||||
self.validate_identity(
|
||||
"MERGE INTO my_db AS ids USING (SELECT new_id FROM my_model WHERE NOT col IS NULL) AS new_ids ON ids.type = new_ids.type AND ids.source = new_ids.source WHEN NOT MATCHED THEN INSERT VALUES (new_ids.new_id)"
|
||||
)
|
||||
self.validate_identity("ALTER TABLE table1 CLUSTER BY (name DESC)")
|
||||
self.validate_identity(
|
||||
"INSERT OVERWRITE TABLE t SELECT 1", "INSERT OVERWRITE INTO t SELECT 1"
|
||||
|
@ -388,7 +391,7 @@ WHERE
|
|||
"SELECT DATE_PART('year', TIMESTAMP '2020-01-01')",
|
||||
write={
|
||||
"hive": "SELECT EXTRACT(year FROM CAST('2020-01-01' AS TIMESTAMP))",
|
||||
"snowflake": "SELECT DATE_PART('year', CAST('2020-01-01' AS TIMESTAMPNTZ))",
|
||||
"snowflake": "SELECT DATE_PART('year', CAST('2020-01-01' AS TIMESTAMP))",
|
||||
"spark": "SELECT EXTRACT(year FROM CAST('2020-01-01' AS TIMESTAMP))",
|
||||
},
|
||||
)
|
||||
|
@ -591,7 +594,7 @@ WHERE
|
|||
self.validate_all(
|
||||
"SELECT DAYOFWEEK('2016-01-02T23:39:20.123-07:00'::TIMESTAMP)",
|
||||
write={
|
||||
"snowflake": "SELECT DAYOFWEEK(CAST('2016-01-02T23:39:20.123-07:00' AS TIMESTAMPNTZ))",
|
||||
"snowflake": "SELECT DAYOFWEEK(CAST('2016-01-02T23:39:20.123-07:00' AS TIMESTAMP))",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
|
@ -689,7 +692,7 @@ WHERE
|
|||
"SELECT TO_TIMESTAMP('2013-04-05 01:02:03')",
|
||||
write={
|
||||
"bigquery": "SELECT CAST('2013-04-05 01:02:03' AS DATETIME)",
|
||||
"snowflake": "SELECT CAST('2013-04-05 01:02:03' AS TIMESTAMPNTZ)",
|
||||
"snowflake": "SELECT CAST('2013-04-05 01:02:03' AS TIMESTAMP)",
|
||||
"spark": "SELECT CAST('2013-04-05 01:02:03' AS TIMESTAMP)",
|
||||
},
|
||||
)
|
||||
|
@ -878,10 +881,6 @@ WHERE
|
|||
self.validate_identity("SELECT * FROM @namespace.%table_name/path/to/file.json.gz")
|
||||
self.validate_identity("SELECT * FROM '@external/location' (FILE_FORMAT => 'path.to.csv')")
|
||||
self.validate_identity("PUT file:///dir/tmp.csv @%table", check_command_warning=True)
|
||||
self.validate_identity(
|
||||
'COPY INTO NEW_TABLE ("foo", "bar") FROM (SELECT $1, $2, $3, $4 FROM @%old_table)',
|
||||
check_command_warning=True,
|
||||
)
|
||||
self.validate_identity(
|
||||
"SELECT * FROM @foo/bar (FILE_FORMAT => ds_sandbox.test.my_csv_format, PATTERN => 'test') AS bla"
|
||||
)
|
||||
|
@ -955,12 +954,16 @@ WHERE
|
|||
self.validate_identity("SELECT CAST('12:00:00' AS TIME)")
|
||||
self.validate_identity("SELECT DATE_PART(month, a)")
|
||||
|
||||
self.validate_all(
|
||||
"SELECT CAST(a AS TIMESTAMP)",
|
||||
write={
|
||||
"snowflake": "SELECT CAST(a AS TIMESTAMPNTZ)",
|
||||
},
|
||||
)
|
||||
for data_type in (
|
||||
"TIMESTAMP",
|
||||
"TIMESTAMPLTZ",
|
||||
"TIMESTAMPNTZ",
|
||||
):
|
||||
self.validate_identity(f"CAST(a AS {data_type})")
|
||||
|
||||
self.validate_identity("CAST(a AS TIMESTAMP_NTZ)", "CAST(a AS TIMESTAMPNTZ)")
|
||||
self.validate_identity("CAST(a AS TIMESTAMP_LTZ)", "CAST(a AS TIMESTAMPLTZ)")
|
||||
|
||||
self.validate_all(
|
||||
"SELECT a::TIMESTAMP_LTZ(9)",
|
||||
write={
|
||||
|
@ -1000,14 +1003,14 @@ WHERE
|
|||
self.validate_all(
|
||||
"SELECT DATE_PART(epoch_second, foo) as ddate from table_name",
|
||||
write={
|
||||
"snowflake": "SELECT EXTRACT(epoch_second FROM CAST(foo AS TIMESTAMPNTZ)) AS ddate FROM table_name",
|
||||
"snowflake": "SELECT EXTRACT(epoch_second FROM CAST(foo AS TIMESTAMP)) AS ddate FROM table_name",
|
||||
"presto": "SELECT TO_UNIXTIME(CAST(foo AS TIMESTAMP)) AS ddate FROM table_name",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT DATE_PART(epoch_milliseconds, foo) as ddate from table_name",
|
||||
write={
|
||||
"snowflake": "SELECT EXTRACT(epoch_second FROM CAST(foo AS TIMESTAMPNTZ)) * 1000 AS ddate FROM table_name",
|
||||
"snowflake": "SELECT EXTRACT(epoch_second FROM CAST(foo AS TIMESTAMP)) * 1000 AS ddate FROM table_name",
|
||||
"presto": "SELECT TO_UNIXTIME(CAST(foo AS TIMESTAMP)) * 1000 AS ddate FROM table_name",
|
||||
},
|
||||
)
|
||||
|
@ -1138,7 +1141,7 @@ WHERE
|
|||
)
|
||||
self.validate_identity(
|
||||
"SELECT * FROM my_table AT (TIMESTAMP => 'Fri, 01 May 2015 16:20:00 -0700'::timestamp)",
|
||||
"SELECT * FROM my_table AT (TIMESTAMP => CAST('Fri, 01 May 2015 16:20:00 -0700' AS TIMESTAMPNTZ))",
|
||||
"SELECT * FROM my_table AT (TIMESTAMP => CAST('Fri, 01 May 2015 16:20:00 -0700' AS TIMESTAMP))",
|
||||
)
|
||||
self.validate_identity(
|
||||
"SELECT * FROM my_table AT(TIMESTAMP => 'Fri, 01 May 2015 16:20:00 -0700'::timestamp_tz)",
|
||||
|
@ -1581,7 +1584,7 @@ FROM persons AS p, LATERAL FLATTEN(input => p.c, path => 'contact') AS _flattene
|
|||
"REGEXP_REPLACE(subject, pattern, replacement, position, occurrence, parameters)",
|
||||
write={
|
||||
"bigquery": "REGEXP_REPLACE(subject, pattern, replacement)",
|
||||
"duckdb": "REGEXP_REPLACE(subject, pattern, replacement)",
|
||||
"duckdb": "REGEXP_REPLACE(subject, pattern, replacement, parameters)",
|
||||
"hive": "REGEXP_REPLACE(subject, pattern, replacement)",
|
||||
"snowflake": "REGEXP_REPLACE(subject, pattern, replacement, position, occurrence, parameters)",
|
||||
"spark": "REGEXP_REPLACE(subject, pattern, replacement, position)",
|
||||
|
@ -1827,3 +1830,17 @@ STORAGE_ALLOWED_LOCATIONS=('s3://mybucket1/path1/', 's3://mybucket2/path2/')""",
|
|||
|
||||
expression = annotate_types(expression)
|
||||
self.assertEqual(expression.sql(dialect="snowflake"), "SELECT TRY_CAST(FOO() AS TEXT)")
|
||||
|
||||
def test_copy(self):
|
||||
self.validate_identity(
|
||||
"""COPY INTO mytable (col1, col2) FROM 's3://mybucket/data/files' FILES = ('file1', 'file2') PATTERN = 'pattern' FILE_FORMAT = (FORMAT_NAME = my_csv_format NULL_IF = ('str1', 'str2')) PARSE_HEADER = TRUE"""
|
||||
)
|
||||
self.validate_identity(
|
||||
"""COPY INTO temp FROM @random_stage/path/ FILE_FORMAT = (TYPE = CSV FIELD_DELIMITER = '|' NULL_IF = () FIELD_OPTIONALLY_ENCLOSED_BY = '"' TIMESTAMP_FORMAT = 'TZHTZM YYYY-MM-DD HH24:MI:SS.FF9' DATE_FORMAT = 'TZHTZM YYYY-MM-DD HH24:MI:SS.FF9' BINARY_FORMAT = BASE64) VALIDATION_MODE = 'RETURN_3_ROWS'"""
|
||||
)
|
||||
self.validate_identity(
|
||||
"""COPY INTO load1 FROM @%load1/data1/ FILES = ('test1.csv', 'test2.csv') FORCE = TRUE"""
|
||||
)
|
||||
self.validate_identity(
|
||||
"""COPY INTO mytable FROM 'azure://myaccount.blob.core.windows.net/mycontainer/data/files' CREDENTIALS = (AZURE_SAS_TOKEN = 'token') ENCRYPTION = (TYPE = 'AZURE_CSE' MASTER_KEY = 'kPx...') FILE_FORMAT = (FORMAT_NAME = my_csv_format)"""
|
||||
)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue