Adding upstream version 21.1.2.
Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
parent
92ffd7746f
commit
b01402dc30
103 changed files with 18237 additions and 17794 deletions
|
@ -594,9 +594,9 @@ WHERE
|
|||
self.validate_all(
|
||||
"SELECT TO_TIMESTAMP(16599817290000, 4)",
|
||||
write={
|
||||
"bigquery": "SELECT TIMESTAMP_SECONDS(CAST(16599817290000 / POW(10, 4) AS INT64))",
|
||||
"bigquery": "SELECT TIMESTAMP_SECONDS(CAST(16599817290000 / POWER(10, 4) AS INT64))",
|
||||
"snowflake": "SELECT TO_TIMESTAMP(16599817290000, 4)",
|
||||
"spark": "SELECT TIMESTAMP_SECONDS(16599817290000 / POW(10, 4))",
|
||||
"spark": "SELECT TIMESTAMP_SECONDS(16599817290000 / POWER(10, 4))",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
|
@ -609,11 +609,11 @@ WHERE
|
|||
self.validate_all(
|
||||
"SELECT TO_TIMESTAMP(1659981729000000000, 9)",
|
||||
write={
|
||||
"bigquery": "SELECT TIMESTAMP_SECONDS(CAST(1659981729000000000 / POW(10, 9) AS INT64))",
|
||||
"duckdb": "SELECT TO_TIMESTAMP(1659981729000000000 / POW(10, 9))",
|
||||
"bigquery": "SELECT TIMESTAMP_SECONDS(CAST(1659981729000000000 / POWER(10, 9) AS INT64))",
|
||||
"duckdb": "SELECT TO_TIMESTAMP(1659981729000000000 / POWER(10, 9))",
|
||||
"presto": "SELECT FROM_UNIXTIME(CAST(1659981729000000000 AS DOUBLE) / POW(10, 9))",
|
||||
"snowflake": "SELECT TO_TIMESTAMP(1659981729000000000, 9)",
|
||||
"spark": "SELECT TIMESTAMP_SECONDS(1659981729000000000 / POW(10, 9))",
|
||||
"spark": "SELECT TIMESTAMP_SECONDS(1659981729000000000 / POWER(10, 9))",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
|
@ -1548,6 +1548,17 @@ MATCH_RECOGNIZE (
|
|||
self.assertTrue(isinstance(users_exp, exp.Show))
|
||||
self.assertEqual(users_exp.this, "USERS")
|
||||
|
||||
def test_storage_integration(self):
|
||||
self.validate_identity(
|
||||
"""CREATE STORAGE INTEGRATION s3_int
|
||||
TYPE=EXTERNAL_STAGE
|
||||
STORAGE_PROVIDER='S3'
|
||||
STORAGE_AWS_ROLE_ARN='arn:aws:iam::001234567890:role/myrole'
|
||||
ENABLED=TRUE
|
||||
STORAGE_ALLOWED_LOCATIONS=('s3://mybucket1/path1/', 's3://mybucket2/path2/')""",
|
||||
pretty=True,
|
||||
)
|
||||
|
||||
def test_swap(self):
|
||||
ast = parse_one("ALTER TABLE a SWAP WITH b", read="snowflake")
|
||||
assert isinstance(ast, exp.AlterTable)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue