1
0
Fork 0

Adding upstream version 26.9.0.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-03-09 08:41:47 +01:00
parent 4c394df415
commit 412e82cbc6
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
43 changed files with 27039 additions and 26675 deletions

View file

@ -52,7 +52,6 @@ class TestSnowflake(Validator):
self.validate_identity("SELECT OBJECT_CONSTRUCT()")
self.validate_identity("SELECT DAYOFMONTH(CURRENT_TIMESTAMP())")
self.validate_identity("SELECT DAYOFYEAR(CURRENT_TIMESTAMP())")
self.validate_identity("LISTAGG(data['some_field'], ',')")
self.validate_identity("WEEKOFYEAR(tstamp)")
self.validate_identity("SELECT QUARTER(CURRENT_TIMESTAMP())")
self.validate_identity("SELECT SUM(amount) FROM mytable GROUP BY ALL")
@ -107,6 +106,10 @@ class TestSnowflake(Validator):
self.validate_identity(
"""SELECT TO_TIMESTAMP('2025-01-16T14:45:30.123+0500', 'yyyy-mm-DD"T"hh24:mi:ss.ff3TZHTZM')"""
)
self.validate_identity(
"SELECT 1 put",
"SELECT 1 AS put",
)
self.validate_identity(
"WITH t (SELECT 1 AS c) SELECT c FROM t",
"WITH t AS (SELECT 1 AS c) SELECT c FROM t",
@ -296,6 +299,27 @@ class TestSnowflake(Validator):
"SELECT * RENAME (a AS b), c AS d FROM xxx",
)
# Support for optional trailing commas after tables in from clause
self.validate_identity(
"SELECT * FROM xxx, yyy, zzz,",
"SELECT * FROM xxx, yyy, zzz",
)
self.validate_identity(
"SELECT * FROM xxx, yyy, zzz, WHERE foo = bar",
"SELECT * FROM xxx, yyy, zzz WHERE foo = bar",
)
self.validate_identity(
"SELECT * FROM xxx, yyy, zzz",
"SELECT * FROM xxx, yyy, zzz",
)
self.validate_all(
"CREATE TABLE test_table (id NUMERIC NOT NULL AUTOINCREMENT)",
write={
"duckdb": "CREATE TABLE test_table (id DECIMAL(38, 0) NOT NULL)",
"snowflake": "CREATE TABLE test_table (id DECIMAL(38, 0) NOT NULL AUTOINCREMENT)",
},
)
self.validate_all(
"SELECT TO_TIMESTAMP('2025-01-16 14:45:30.123', 'yyyy-mm-DD hh24:mi:ss.ff6')",
write={
@ -852,13 +876,6 @@ class TestSnowflake(Validator):
"snowflake": "CASE WHEN x = a OR (x IS NULL AND a IS NULL) THEN b WHEN x = c OR (x IS NULL AND c IS NULL) THEN d ELSE e END",
},
)
self.validate_all(
"SELECT LISTAGG(col1, ', ') WITHIN GROUP (ORDER BY col2) FROM t",
write={
"duckdb": "SELECT GROUP_CONCAT(col1, ', ' ORDER BY col2) FROM t",
"snowflake": "SELECT LISTAGG(col1, ', ') WITHIN GROUP (ORDER BY col2) FROM t",
},
)
self.validate_all(
"SELECT APPROX_PERCENTILE(a, 0.5) FROM t",
read={
@ -2369,6 +2386,43 @@ SINGLE = TRUE""",
"""COPY INTO 's3://example/contacts.csv' FROM "db"."tbl" STORAGE_INTEGRATION = "PROD_S3_SIDETRADE_INTEGRATION" FILE_FORMAT = (FORMAT_NAME="my_csv_format" TYPE=CSV COMPRESSION=NONE NULL_IF=('') FIELD_OPTIONALLY_ENCLOSED_BY='"') MATCH_BY_COLUMN_NAME = CASE_SENSITIVE OVERWRITE = TRUE SINGLE = TRUE INCLUDE_METADATA = ("col1" = "METADATA$START_SCAN_TIME")""",
)
def test_put_to_stage(self):
# PUT with file path and stage ref containing spaces (wrapped in single quotes)
ast = parse_one("PUT 'file://my file.txt' '@s1/my folder'", read="snowflake")
self.assertIsInstance(ast, exp.Put)
self.assertEqual(ast.this, exp.Literal(this="file://my file.txt", is_string=True))
self.assertEqual(ast.args["target"], exp.Var(this="@s1/my folder"))
# expression with additional properties
ast = parse_one(
"PUT 'file:///tmp/my.txt' @stage1/folder PARALLEL = 1 AUTO_COMPRESS=false source_compression=gzip OVERWRITE=TRUE",
read="snowflake",
)
self.assertIsInstance(ast, exp.Put)
self.assertEqual(ast.this, exp.Literal(this="file:///tmp/my.txt", is_string=True))
self.assertEqual(ast.args["target"], exp.Var(this="@stage1/folder"))
properties = ast.args.get("properties")
props_dict = {prop.this.this: prop.args["value"].this for prop in properties.expressions}
self.assertEqual(
props_dict,
{
"PARALLEL": "1",
"AUTO_COMPRESS": False,
"source_compression": "gzip",
"OVERWRITE": True,
},
)
# validate identity for different args and properties
self.validate_identity("PUT 'file:///dir/tmp.csv' @s1/test")
# the unquoted URI variant is not fully supported yet
self.validate_identity("PUT file:///dir/tmp.csv @%table", check_command_warning=True)
self.validate_identity(
"PUT file:///dir/tmp.csv @s1/test PARALLEL=1 AUTO_COMPRESS=FALSE source_compression=gzip OVERWRITE=TRUE",
check_command_warning=True,
)
def test_querying_semi_structured_data(self):
self.validate_identity("SELECT $1")
self.validate_identity("SELECT $1.elem")
@ -2450,3 +2504,20 @@ SINGLE = TRUE""",
"trino": "SELECT 1 ORDER BY 1 OFFSET 0",
},
)
def test_listagg(self):
self.validate_identity("LISTAGG(data['some_field'], ',')")
for distinct in ("", "DISTINCT "):
self.validate_all(
f"SELECT LISTAGG({distinct}col, '|SEPARATOR|') WITHIN GROUP (ORDER BY col2) FROM t",
read={
"trino": f"SELECT LISTAGG({distinct}col, '|SEPARATOR|') WITHIN GROUP (ORDER BY col2) FROM t",
"duckdb": f"SELECT LISTAGG({distinct}col, '|SEPARATOR|' ORDER BY col2) FROM t",
},
write={
"snowflake": f"SELECT LISTAGG({distinct}col, '|SEPARATOR|') WITHIN GROUP (ORDER BY col2) FROM t",
"trino": f"SELECT LISTAGG({distinct}col, '|SEPARATOR|') WITHIN GROUP (ORDER BY col2) FROM t",
"duckdb": f"SELECT LISTAGG({distinct}col, '|SEPARATOR|' ORDER BY col2) FROM t",
},
)