Adding upstream version 10.1.3.
Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
parent
87cdb8246e
commit
b7601057ad
57 changed files with 1542 additions and 529 deletions
|
@ -10,6 +10,10 @@ class TestClickhouse(Validator):
|
|||
self.validate_identity("SELECT * FROM x AS y FINAL")
|
||||
self.validate_identity("'a' IN mapKeys(map('a', 1, 'b', 2))")
|
||||
self.validate_identity("CAST((1, 2) AS Tuple(a Int8, b Int16))")
|
||||
self.validate_identity("SELECT * FROM foo LEFT ANY JOIN bla")
|
||||
self.validate_identity("SELECT * FROM foo LEFT ASOF JOIN bla")
|
||||
self.validate_identity("SELECT * FROM foo ASOF JOIN bla")
|
||||
self.validate_identity("SELECT * FROM foo ANY JOIN bla")
|
||||
|
||||
self.validate_all(
|
||||
"SELECT fname, lname, age FROM person ORDER BY age DESC NULLS FIRST, fname ASC NULLS LAST, lname",
|
||||
|
|
|
@ -997,6 +997,13 @@ class TestDialect(Validator):
|
|||
"spark": "CONCAT_WS('-', x)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"CONCAT(a)",
|
||||
write={
|
||||
"mysql": "a",
|
||||
"tsql": "a",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"IF(x > 1, 1, 0)",
|
||||
write={
|
||||
|
@ -1263,8 +1270,8 @@ class TestDialect(Validator):
|
|||
self.validate_all(
|
||||
"""/* comment1 */
|
||||
SELECT
|
||||
x, -- comment2
|
||||
y -- comment3""",
|
||||
x, /* comment2 */
|
||||
y /* comment3 */""",
|
||||
read={
|
||||
"mysql": """SELECT # comment1
|
||||
x, # comment2
|
||||
|
|
|
@ -89,6 +89,8 @@ class TestDuckDB(Validator):
|
|||
"presto": "CAST(COL AS ARRAY(BIGINT))",
|
||||
"hive": "CAST(COL AS ARRAY<BIGINT>)",
|
||||
"spark": "CAST(COL AS ARRAY<LONG>)",
|
||||
"postgres": "CAST(COL AS BIGINT[])",
|
||||
"snowflake": "CAST(COL AS ARRAY)",
|
||||
},
|
||||
)
|
||||
|
||||
|
@ -104,6 +106,10 @@ class TestDuckDB(Validator):
|
|||
"spark": "ARRAY(0, 1, 2)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT ARRAY_LENGTH([0], 1) AS x",
|
||||
write={"duckdb": "SELECT ARRAY_LENGTH(LIST_VALUE(0), 1) AS x"},
|
||||
)
|
||||
self.validate_all(
|
||||
"REGEXP_MATCHES(x, y)",
|
||||
write={
|
||||
|
|
|
@ -139,7 +139,7 @@ class TestHive(Validator):
|
|||
"CREATE TABLE test STORED AS parquet TBLPROPERTIES ('x'='1', 'Z'='2') AS SELECT 1",
|
||||
write={
|
||||
"duckdb": "CREATE TABLE test AS SELECT 1",
|
||||
"presto": "CREATE TABLE test WITH (FORMAT='parquet', x='1', Z='2') AS SELECT 1",
|
||||
"presto": "CREATE TABLE test WITH (FORMAT='PARQUET', x='1', Z='2') AS SELECT 1",
|
||||
"hive": "CREATE TABLE test STORED AS PARQUET TBLPROPERTIES ('x'='1', 'Z'='2') AS SELECT 1",
|
||||
"spark": "CREATE TABLE test USING PARQUET TBLPROPERTIES ('x'='1', 'Z'='2') AS SELECT 1",
|
||||
},
|
||||
|
@ -459,6 +459,7 @@ class TestHive(Validator):
|
|||
"hive": "MAP(a, b, c, d)",
|
||||
"presto": "MAP(ARRAY[a, c], ARRAY[b, d])",
|
||||
"spark": "MAP(a, b, c, d)",
|
||||
"snowflake": "OBJECT_CONSTRUCT(a, b, c, d)",
|
||||
},
|
||||
write={
|
||||
"": "MAP(ARRAY(a, c), ARRAY(b, d))",
|
||||
|
@ -467,6 +468,7 @@ class TestHive(Validator):
|
|||
"presto": "MAP(ARRAY[a, c], ARRAY[b, d])",
|
||||
"hive": "MAP(a, b, c, d)",
|
||||
"spark": "MAP(a, b, c, d)",
|
||||
"snowflake": "OBJECT_CONSTRUCT(a, b, c, d)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
|
@ -476,6 +478,7 @@ class TestHive(Validator):
|
|||
"presto": "MAP(ARRAY[a], ARRAY[b])",
|
||||
"hive": "MAP(a, b)",
|
||||
"spark": "MAP(a, b)",
|
||||
"snowflake": "OBJECT_CONSTRUCT(a, b)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
|
|
|
@ -23,6 +23,8 @@ class TestMySQL(Validator):
|
|||
self.validate_identity("SELECT TRIM('bla' FROM ' XXX ')")
|
||||
self.validate_identity("@@GLOBAL.max_connections")
|
||||
|
||||
self.validate_identity("CREATE TABLE A LIKE B")
|
||||
|
||||
# SET Commands
|
||||
self.validate_identity("SET @var_name = expr")
|
||||
self.validate_identity("SET @name = 43")
|
||||
|
@ -177,14 +179,27 @@ class TestMySQL(Validator):
|
|||
"GROUP_CONCAT(DISTINCT x ORDER BY y DESC)",
|
||||
write={
|
||||
"mysql": "GROUP_CONCAT(DISTINCT x ORDER BY y DESC SEPARATOR ',')",
|
||||
"sqlite": "GROUP_CONCAT(DISTINCT x ORDER BY y DESC)",
|
||||
"sqlite": "GROUP_CONCAT(DISTINCT x)",
|
||||
"tsql": "STRING_AGG(x, ',') WITHIN GROUP (ORDER BY y DESC)",
|
||||
"postgres": "STRING_AGG(DISTINCT x, ',' ORDER BY y DESC NULLS LAST)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"GROUP_CONCAT(x ORDER BY y SEPARATOR z)",
|
||||
write={
|
||||
"mysql": "GROUP_CONCAT(x ORDER BY y SEPARATOR z)",
|
||||
"sqlite": "GROUP_CONCAT(x, z)",
|
||||
"tsql": "STRING_AGG(x, z) WITHIN GROUP (ORDER BY y)",
|
||||
"postgres": "STRING_AGG(x, z ORDER BY y NULLS FIRST)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"GROUP_CONCAT(DISTINCT x ORDER BY y DESC SEPARATOR '')",
|
||||
write={
|
||||
"mysql": "GROUP_CONCAT(DISTINCT x ORDER BY y DESC SEPARATOR '')",
|
||||
"sqlite": "GROUP_CONCAT(DISTINCT x ORDER BY y DESC, '')",
|
||||
"sqlite": "GROUP_CONCAT(DISTINCT x, '')",
|
||||
"tsql": "STRING_AGG(x, '') WITHIN GROUP (ORDER BY y DESC)",
|
||||
"postgres": "STRING_AGG(DISTINCT x, '' ORDER BY y DESC NULLS LAST)",
|
||||
},
|
||||
)
|
||||
self.validate_identity(
|
||||
|
|
|
@ -6,6 +6,9 @@ class TestPostgres(Validator):
|
|||
dialect = "postgres"
|
||||
|
||||
def test_ddl(self):
|
||||
self.validate_identity("CREATE TABLE test (foo HSTORE)")
|
||||
self.validate_identity("CREATE TABLE test (foo JSONB)")
|
||||
self.validate_identity("CREATE TABLE test (foo VARCHAR(64)[])")
|
||||
self.validate_all(
|
||||
"CREATE TABLE products (product_no INT UNIQUE, name TEXT, price DECIMAL)",
|
||||
write={
|
||||
|
@ -60,6 +63,12 @@ class TestPostgres(Validator):
|
|||
)
|
||||
|
||||
def test_postgres(self):
|
||||
self.validate_identity("SELECT ARRAY[1, 2, 3]")
|
||||
self.validate_identity("SELECT ARRAY_LENGTH(ARRAY[1, 2, 3], 1)")
|
||||
self.validate_identity("STRING_AGG(x, y)")
|
||||
self.validate_identity("STRING_AGG(x, ',' ORDER BY y)")
|
||||
self.validate_identity("STRING_AGG(x, ',' ORDER BY y DESC)")
|
||||
self.validate_identity("STRING_AGG(DISTINCT x, ',' ORDER BY y DESC)")
|
||||
self.validate_identity("SELECT CASE WHEN SUBSTRING('abcdefg') IN ('ab') THEN 1 ELSE 0 END")
|
||||
self.validate_identity(
|
||||
"SELECT CASE WHEN SUBSTRING('abcdefg' FROM 1) IN ('ab') THEN 1 ELSE 0 END"
|
||||
|
@ -86,6 +95,14 @@ class TestPostgres(Validator):
|
|||
self.validate_identity("SELECT e'\\xDEADBEEF'")
|
||||
self.validate_identity("SELECT CAST(e'\\176' AS BYTEA)")
|
||||
|
||||
self.validate_all(
|
||||
"END WORK AND NO CHAIN",
|
||||
write={"postgres": "COMMIT AND NO CHAIN"},
|
||||
)
|
||||
self.validate_all(
|
||||
"END AND CHAIN",
|
||||
write={"postgres": "COMMIT AND CHAIN"},
|
||||
)
|
||||
self.validate_all(
|
||||
"CREATE TABLE x (a UUID, b BYTEA)",
|
||||
write={
|
||||
|
@ -95,6 +112,10 @@ class TestPostgres(Validator):
|
|||
"spark": "CREATE TABLE x (a UUID, b BINARY)",
|
||||
},
|
||||
)
|
||||
|
||||
self.validate_identity(
|
||||
"CREATE TABLE A (LIKE B INCLUDING CONSTRAINT INCLUDING COMPRESSION EXCLUDING COMMENTS)"
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT SUM(x) OVER (PARTITION BY a ORDER BY d ROWS 1 PRECEDING)",
|
||||
write={
|
||||
|
|
|
@ -13,6 +13,7 @@ class TestPresto(Validator):
|
|||
"duckdb": "CAST(a AS INT[])",
|
||||
"presto": "CAST(a AS ARRAY(INTEGER))",
|
||||
"spark": "CAST(a AS ARRAY<INT>)",
|
||||
"snowflake": "CAST(a AS ARRAY)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
|
@ -31,6 +32,7 @@ class TestPresto(Validator):
|
|||
"duckdb": "CAST(LIST_VALUE(1, 2) AS BIGINT[])",
|
||||
"presto": "CAST(ARRAY[1, 2] AS ARRAY(BIGINT))",
|
||||
"spark": "CAST(ARRAY(1, 2) AS ARRAY<LONG>)",
|
||||
"snowflake": "CAST([1, 2] AS ARRAY)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
|
@ -41,6 +43,7 @@ class TestPresto(Validator):
|
|||
"presto": "CAST(MAP(ARRAY[1], ARRAY[1]) AS MAP(INTEGER, INTEGER))",
|
||||
"hive": "CAST(MAP(1, 1) AS MAP<INT, INT>)",
|
||||
"spark": "CAST(MAP_FROM_ARRAYS(ARRAY(1), ARRAY(1)) AS MAP<INT, INT>)",
|
||||
"snowflake": "CAST(OBJECT_CONSTRUCT(1, 1) AS OBJECT)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
|
@ -51,6 +54,7 @@ class TestPresto(Validator):
|
|||
"presto": "CAST(MAP(ARRAY['a', 'b', 'c'], ARRAY[ARRAY[1], ARRAY[2], ARRAY[3]]) AS MAP(VARCHAR, ARRAY(INTEGER)))",
|
||||
"hive": "CAST(MAP('a', ARRAY(1), 'b', ARRAY(2), 'c', ARRAY(3)) AS MAP<STRING, ARRAY<INT>>)",
|
||||
"spark": "CAST(MAP_FROM_ARRAYS(ARRAY('a', 'b', 'c'), ARRAY(ARRAY(1), ARRAY(2), ARRAY(3))) AS MAP<STRING, ARRAY<INT>>)",
|
||||
"snowflake": "CAST(OBJECT_CONSTRUCT('a', [1], 'b', [2], 'c', [3]) AS OBJECT)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
|
@ -393,6 +397,7 @@ class TestPresto(Validator):
|
|||
write={
|
||||
"hive": UnsupportedError,
|
||||
"spark": "MAP_FROM_ARRAYS(a, b)",
|
||||
"snowflake": UnsupportedError,
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
|
@ -401,6 +406,7 @@ class TestPresto(Validator):
|
|||
"hive": "MAP(a, c, b, d)",
|
||||
"presto": "MAP(ARRAY[a, b], ARRAY[c, d])",
|
||||
"spark": "MAP_FROM_ARRAYS(ARRAY(a, b), ARRAY(c, d))",
|
||||
"snowflake": "OBJECT_CONSTRUCT(a, c, b, d)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
|
@ -409,6 +415,7 @@ class TestPresto(Validator):
|
|||
"hive": "MAP('a', 'b')",
|
||||
"presto": "MAP(ARRAY['a'], ARRAY['b'])",
|
||||
"spark": "MAP_FROM_ARRAYS(ARRAY('a'), ARRAY('b'))",
|
||||
"snowflake": "OBJECT_CONSTRUCT('a', 'b')",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
|
|
|
@ -50,6 +50,12 @@ class TestRedshift(Validator):
|
|||
"redshift": 'SELECT tablename, "column" FROM pg_table_def WHERE "column" LIKE \'%start\\\\_%\' LIMIT 5'
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT DISTINCT ON (a) a, b FROM x ORDER BY c DESC",
|
||||
write={
|
||||
"redshift": 'SELECT a, b FROM (SELECT a, b, ROW_NUMBER() OVER (PARTITION BY a ORDER BY c DESC) AS "_row_number" FROM x) WHERE "_row_number" = 1',
|
||||
},
|
||||
)
|
||||
|
||||
def test_identity(self):
|
||||
self.validate_identity("CAST('bla' AS SUPER)")
|
||||
|
@ -64,3 +70,13 @@ class TestRedshift(Validator):
|
|||
self.validate_identity(
|
||||
"SELECT COUNT(*) FROM event WHERE eventname LIKE '%Ring%' OR eventname LIKE '%Die%'"
|
||||
)
|
||||
self.validate_identity("CREATE TABLE SOUP DISTKEY(soup1) SORTKEY(soup2) DISTSTYLE AUTO")
|
||||
self.validate_identity(
|
||||
"CREATE TABLE sales (salesid INTEGER NOT NULL) DISTKEY(listid) COMPOUND SORTKEY(listid, sellerid)"
|
||||
)
|
||||
self.validate_identity(
|
||||
"COPY customer FROM 's3://mybucket/customer' IAM_ROLE 'arn:aws:iam::0123456789012:role/MyRedshiftRole'"
|
||||
)
|
||||
self.validate_identity(
|
||||
"UNLOAD ('select * from venue') TO 's3://mybucket/unload/' IAM_ROLE 'arn:aws:iam::0123456789012:role/MyRedshiftRole'"
|
||||
)
|
||||
|
|
|
@ -172,13 +172,28 @@ class TestSnowflake(Validator):
|
|||
self.validate_all(
|
||||
"trim(date_column, 'UTC')",
|
||||
write={
|
||||
"bigquery": "TRIM(date_column, 'UTC')",
|
||||
"snowflake": "TRIM(date_column, 'UTC')",
|
||||
"postgres": "TRIM('UTC' FROM date_column)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"trim(date_column)",
|
||||
write={"snowflake": "TRIM(date_column)"},
|
||||
write={
|
||||
"snowflake": "TRIM(date_column)",
|
||||
"bigquery": "TRIM(date_column)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"DECODE(x, a, b, c, d)",
|
||||
read={
|
||||
"": "MATCHES(x, a, b, c, d)",
|
||||
},
|
||||
write={
|
||||
"": "MATCHES(x, a, b, c, d)",
|
||||
"oracle": "DECODE(x, a, b, c, d)",
|
||||
"snowflake": "DECODE(x, a, b, c, d)",
|
||||
},
|
||||
)
|
||||
|
||||
def test_null_treatment(self):
|
||||
|
@ -370,7 +385,8 @@ class TestSnowflake(Validator):
|
|||
)
|
||||
|
||||
self.validate_all(
|
||||
r"""SELECT * FROM TABLE(?)""", write={"snowflake": r"""SELECT * FROM TABLE(?)"""}
|
||||
r"""SELECT * FROM TABLE(?)""",
|
||||
write={"snowflake": r"""SELECT * FROM TABLE(?)"""},
|
||||
)
|
||||
|
||||
self.validate_all(
|
||||
|
|
|
@ -32,13 +32,14 @@ class TestSpark(Validator):
|
|||
"presto": "CREATE TABLE db.example_table (col_a ARRAY(INTEGER), col_b ARRAY(ARRAY(INTEGER)))",
|
||||
"hive": "CREATE TABLE db.example_table (col_a ARRAY<INT>, col_b ARRAY<ARRAY<INT>>)",
|
||||
"spark": "CREATE TABLE db.example_table (col_a ARRAY<INT>, col_b ARRAY<ARRAY<INT>>)",
|
||||
"snowflake": "CREATE TABLE db.example_table (col_a ARRAY, col_b ARRAY)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"CREATE TABLE x USING ICEBERG PARTITIONED BY (MONTHS(y)) LOCATION 's3://z'",
|
||||
write={
|
||||
"duckdb": "CREATE TABLE x",
|
||||
"presto": "CREATE TABLE x WITH (TABLE_FORMAT = 'ICEBERG', PARTITIONED_BY=ARRAY['MONTHS'])",
|
||||
"presto": "CREATE TABLE x WITH (TABLE_FORMAT='ICEBERG', PARTITIONED_BY=ARRAY['MONTHS'])",
|
||||
"hive": "CREATE TABLE x USING ICEBERG PARTITIONED BY (MONTHS(y)) LOCATION 's3://z'",
|
||||
"spark": "CREATE TABLE x USING ICEBERG PARTITIONED BY (MONTHS(y)) LOCATION 's3://z'",
|
||||
},
|
||||
|
@ -94,6 +95,13 @@ TBLPROPERTIES (
|
|||
pretty=True,
|
||||
)
|
||||
|
||||
self.validate_all(
|
||||
"CACHE TABLE testCache OPTIONS ('storageLevel' 'DISK_ONLY') SELECT * FROM testData",
|
||||
write={
|
||||
"spark": "CACHE TABLE testCache OPTIONS('storageLevel' = 'DISK_ONLY') AS SELECT * FROM testData"
|
||||
},
|
||||
)
|
||||
|
||||
def test_to_date(self):
|
||||
self.validate_all(
|
||||
"TO_DATE(x, 'yyyy-MM-dd')",
|
||||
|
@ -271,6 +279,7 @@ TBLPROPERTIES (
|
|||
"presto": "MAP(ARRAY[1], c)",
|
||||
"hive": "MAP(ARRAY(1), c)",
|
||||
"spark": "MAP_FROM_ARRAYS(ARRAY(1), c)",
|
||||
"snowflake": "OBJECT_CONSTRUCT([1], c)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
|
|
|
@ -5,6 +5,10 @@ class TestSQLite(Validator):
|
|||
dialect = "sqlite"
|
||||
|
||||
def test_ddl(self):
|
||||
self.validate_all(
|
||||
"CREATE TABLE foo (id INTEGER PRIMARY KEY ASC)",
|
||||
write={"sqlite": "CREATE TABLE foo (id INTEGER PRIMARY KEY ASC)"},
|
||||
)
|
||||
self.validate_all(
|
||||
"""
|
||||
CREATE TABLE "Track"
|
||||
|
|
|
@ -17,7 +17,6 @@ class TestTSQL(Validator):
|
|||
"spark": "SELECT CAST(`a`.`b` AS SHORT) FROM foo",
|
||||
},
|
||||
)
|
||||
|
||||
self.validate_all(
|
||||
"CONVERT(INT, CONVERT(NUMERIC, '444.75'))",
|
||||
write={
|
||||
|
@ -25,6 +24,33 @@ class TestTSQL(Validator):
|
|||
"tsql": "CAST(CAST('444.75' AS NUMERIC) AS INTEGER)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"STRING_AGG(x, y) WITHIN GROUP (ORDER BY z DESC)",
|
||||
write={
|
||||
"tsql": "STRING_AGG(x, y) WITHIN GROUP (ORDER BY z DESC)",
|
||||
"mysql": "GROUP_CONCAT(x ORDER BY z DESC SEPARATOR y)",
|
||||
"sqlite": "GROUP_CONCAT(x, y)",
|
||||
"postgres": "STRING_AGG(x, y ORDER BY z DESC NULLS LAST)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"STRING_AGG(x, '|') WITHIN GROUP (ORDER BY z ASC)",
|
||||
write={
|
||||
"tsql": "STRING_AGG(x, '|') WITHIN GROUP (ORDER BY z)",
|
||||
"mysql": "GROUP_CONCAT(x ORDER BY z SEPARATOR '|')",
|
||||
"sqlite": "GROUP_CONCAT(x, '|')",
|
||||
"postgres": "STRING_AGG(x, '|' ORDER BY z NULLS FIRST)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"STRING_AGG(x, '|')",
|
||||
write={
|
||||
"tsql": "STRING_AGG(x, '|')",
|
||||
"mysql": "GROUP_CONCAT(x SEPARATOR '|')",
|
||||
"sqlite": "GROUP_CONCAT(x, '|')",
|
||||
"postgres": "STRING_AGG(x, '|')",
|
||||
},
|
||||
)
|
||||
|
||||
def test_types(self):
|
||||
self.validate_identity("CAST(x AS XML)")
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue