Adding upstream version 10.6.0.
Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
parent
26f20be569
commit
fafa4a7b95
36 changed files with 1007 additions and 270 deletions
|
@ -102,3 +102,18 @@ class TestDatabricks(Validator):
|
|||
"databricks": "SELECT DATEADD(DAY, 1, '2020-01-01')",
|
||||
},
|
||||
)
|
||||
|
||||
def test_without_as(self):
|
||||
self.validate_all(
|
||||
"CREATE TABLE x (SELECT 1)",
|
||||
write={
|
||||
"databricks": "CREATE TABLE x AS (SELECT 1)",
|
||||
},
|
||||
)
|
||||
|
||||
self.validate_all(
|
||||
"WITH x (select 1) SELECT * FROM x",
|
||||
write={
|
||||
"databricks": "WITH x AS (SELECT 1) SELECT * FROM x",
|
||||
},
|
||||
)
|
||||
|
|
|
@ -9,9 +9,9 @@ class Validator(unittest.TestCase):
|
|||
def parse_one(self, sql):
|
||||
return parse_one(sql, read=self.dialect)
|
||||
|
||||
def validate_identity(self, sql, write_sql=None):
|
||||
def validate_identity(self, sql, write_sql=None, pretty=False):
|
||||
expression = self.parse_one(sql)
|
||||
self.assertEqual(write_sql or sql, expression.sql(dialect=self.dialect))
|
||||
self.assertEqual(write_sql or sql, expression.sql(dialect=self.dialect, pretty=pretty))
|
||||
return expression
|
||||
|
||||
def validate_all(self, sql, read=None, write=None, pretty=False, identify=False):
|
||||
|
|
|
@ -75,6 +75,19 @@ class TestDuckDB(Validator):
|
|||
)
|
||||
|
||||
def test_duckdb(self):
|
||||
self.validate_identity("SELECT {'a': 1} AS x")
|
||||
self.validate_identity("SELECT {'a': {'b': {'c': 1}}, 'd': {'e': 2}} AS x")
|
||||
self.validate_identity("SELECT {'x': 1, 'y': 2, 'z': 3}")
|
||||
self.validate_identity(
|
||||
"SELECT {'yes': 'duck', 'maybe': 'goose', 'huh': NULL, 'no': 'heron'}"
|
||||
)
|
||||
self.validate_identity("SELECT {'key1': 'string', 'key2': 1, 'key3': 12.345}")
|
||||
self.validate_identity("SELECT ROW(x, x + 1, y) FROM (SELECT 1 AS x, 'a' AS y)")
|
||||
self.validate_identity("SELECT (x, x + 1, y) FROM (SELECT 1 AS x, 'a' AS y)")
|
||||
self.validate_identity("SELECT a.x FROM (SELECT {'x': 1, 'y': 2, 'z': 3} AS a)")
|
||||
self.validate_identity(
|
||||
"SELECT a['x space'] FROM (SELECT {'x space': 1, 'y': 2, 'z': 3} AS a)"
|
||||
)
|
||||
self.validate_all(
|
||||
"CREATE TABLE IF NOT EXISTS table (cola INT, colb STRING) USING ICEBERG PARTITIONED BY (colb)",
|
||||
write={
|
||||
|
@ -229,10 +242,17 @@ class TestDuckDB(Validator):
|
|||
self.validate_all(
|
||||
"STRUCT_PACK(x := 1, y := '2')",
|
||||
write={
|
||||
"duckdb": "STRUCT_PACK(x := 1, y := '2')",
|
||||
"duckdb": "{'x': 1, 'y': '2'}",
|
||||
"spark": "STRUCT(x = 1, y = '2')",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"STRUCT_PACK(key1 := 'value1', key2 := 42)",
|
||||
write={
|
||||
"duckdb": "{'key1': 'value1', 'key2': 42}",
|
||||
"spark": "STRUCT(key1 = 'value1', key2 = 42)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"ARRAY_SORT(x)",
|
||||
write={
|
||||
|
|
|
@ -338,6 +338,27 @@ class TestHive(Validator):
|
|||
)
|
||||
|
||||
def test_hive(self):
|
||||
self.validate_identity(
|
||||
"INSERT OVERWRITE TABLE zipcodes PARTITION(state = '0') VALUES (896, 'US', 'TAMPA', 33607)"
|
||||
)
|
||||
self.validate_identity(
|
||||
"INSERT OVERWRITE TABLE zipcodes PARTITION(state = 0) VALUES (896, 'US', 'TAMPA', 33607)"
|
||||
)
|
||||
self.validate_identity(
|
||||
"SELECT a, b, SUM(c) FROM tabl AS t GROUP BY a, b GROUPING SETS ((a, b), a)"
|
||||
)
|
||||
self.validate_identity(
|
||||
"SELECT a, b, SUM(c) FROM tabl AS t GROUP BY a, b GROUPING SETS ((t.a, b), a)"
|
||||
)
|
||||
self.validate_identity(
|
||||
"SELECT a, b, SUM(c) FROM tabl AS t GROUP BY a, FOO(b) GROUPING SETS ((a, FOO(b)), a)"
|
||||
)
|
||||
self.validate_identity(
|
||||
"SELECT key, value, GROUPING__ID, COUNT(*) FROM T1 GROUP BY key, value WITH CUBE"
|
||||
)
|
||||
self.validate_identity(
|
||||
"SELECT key, value, GROUPING__ID, COUNT(*) FROM T1 GROUP BY key, value WITH ROLLUP"
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT A.1a AS b FROM test_a AS A",
|
||||
write={
|
||||
|
@ -615,3 +636,20 @@ class TestHive(Validator):
|
|||
"spark": "SELECT * FROM x TABLESAMPLE(1) AS foo",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT * FROM x TABLESAMPLE(1) AS foo",
|
||||
read={
|
||||
"presto": "SELECT * FROM x AS foo TABLESAMPLE(1)",
|
||||
},
|
||||
write={
|
||||
"presto": "SELECT * FROM x AS foo TABLESAMPLE(1)",
|
||||
"hive": "SELECT * FROM x TABLESAMPLE(1) AS foo",
|
||||
"spark": "SELECT * FROM x TABLESAMPLE(1) AS foo",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT a, SUM(c) FROM t GROUP BY a, DATE_FORMAT(b, 'yyyy') GROUPING SETS ((a, DATE_FORMAT(b, 'yyyy')), a)",
|
||||
write={
|
||||
"hive": "SELECT a, SUM(c) FROM t GROUP BY a, DATE_FORMAT(CAST(b AS TIMESTAMP), 'yyyy') GROUPING SETS ((a, DATE_FORMAT(CAST(b AS TIMESTAMP), 'yyyy')), a)",
|
||||
},
|
||||
)
|
||||
|
|
|
@ -56,7 +56,22 @@ class TestPostgres(Validator):
|
|||
)
|
||||
|
||||
def test_postgres(self):
|
||||
self.validate_all(
|
||||
"x ^ y",
|
||||
write={
|
||||
"": "POWER(x, y)",
|
||||
"postgres": "x ^ y",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"x # y",
|
||||
write={
|
||||
"": "x ^ y",
|
||||
"postgres": "x # y",
|
||||
},
|
||||
)
|
||||
self.validate_identity("SELECT ARRAY[1, 2, 3]")
|
||||
self.validate_identity("SELECT ARRAY(SELECT 1)")
|
||||
self.validate_identity("SELECT ARRAY_LENGTH(ARRAY[1, 2, 3], 1)")
|
||||
self.validate_identity("STRING_AGG(x, y)")
|
||||
self.validate_identity("STRING_AGG(x, ',' ORDER BY y)")
|
||||
|
@ -88,6 +103,14 @@ class TestPostgres(Validator):
|
|||
self.validate_identity("SELECT e'\\xDEADBEEF'")
|
||||
self.validate_identity("SELECT CAST(e'\\176' AS BYTEA)")
|
||||
self.validate_identity("""SELECT * FROM JSON_TO_RECORDSET(z) AS y("rank" INT)""")
|
||||
self.validate_identity(
|
||||
"SELECT SUM(x) OVER a, SUM(y) OVER b FROM c WINDOW a AS (PARTITION BY d), b AS (PARTITION BY e)"
|
||||
)
|
||||
self.validate_identity(
|
||||
"CREATE TABLE A (LIKE B INCLUDING CONSTRAINT INCLUDING COMPRESSION EXCLUDING COMMENTS)"
|
||||
)
|
||||
self.validate_identity("x ~ 'y'")
|
||||
self.validate_identity("x ~* 'y'")
|
||||
|
||||
self.validate_all(
|
||||
"END WORK AND NO CHAIN",
|
||||
|
@ -118,10 +141,6 @@ class TestPostgres(Validator):
|
|||
"SELECT to_timestamp(123)::time without time zone",
|
||||
write={"postgres": "SELECT CAST(TO_TIMESTAMP(123) AS TIME)"},
|
||||
)
|
||||
|
||||
self.validate_identity(
|
||||
"CREATE TABLE A (LIKE B INCLUDING CONSTRAINT INCLUDING COMPRESSION EXCLUDING COMMENTS)"
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT SUM(x) OVER (PARTITION BY a ORDER BY d ROWS 1 PRECEDING)",
|
||||
write={
|
||||
|
@ -283,9 +302,6 @@ class TestPostgres(Validator):
|
|||
"UPDATE MYTABLE T1 SET T1.COL = 13",
|
||||
write={"postgres": "UPDATE MYTABLE AS T1 SET T1.COL = 13"},
|
||||
)
|
||||
|
||||
self.validate_identity("x ~ 'y'")
|
||||
self.validate_identity("x ~* 'y'")
|
||||
self.validate_all(
|
||||
"x !~ 'y'",
|
||||
write={"postgres": "NOT x ~ 'y'"},
|
||||
|
@ -319,13 +335,20 @@ class TestPostgres(Validator):
|
|||
"'x' 'y' 'z'",
|
||||
write={"postgres": "CONCAT('x', 'y', 'z')"},
|
||||
)
|
||||
self.validate_identity("SELECT ARRAY(SELECT 1)")
|
||||
|
||||
self.validate_all(
|
||||
"x::cstring",
|
||||
write={"postgres": "CAST(x AS CSTRING)"},
|
||||
)
|
||||
|
||||
self.validate_identity(
|
||||
"SELECT SUM(x) OVER a, SUM(y) OVER b FROM c WINDOW a AS (PARTITION BY d), b AS (PARTITION BY e)"
|
||||
self.validate_all(
|
||||
"TRIM(BOTH 'as' FROM 'as string as')",
|
||||
write={
|
||||
"postgres": "TRIM(BOTH 'as' FROM 'as string as')",
|
||||
"spark": "TRIM(BOTH 'as' FROM 'as string as')",
|
||||
},
|
||||
)
|
||||
|
||||
def test_bool_or(self):
|
||||
self.validate_all(
|
||||
"SELECT a, LOGICAL_OR(b) FROM table GROUP BY a",
|
||||
write={"postgres": "SELECT a, BOOL_OR(b) FROM table GROUP BY a"},
|
||||
)
|
||||
|
|
|
@ -174,6 +174,13 @@ class TestPresto(Validator):
|
|||
"spark": "DATE_ADD(x, 1)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"NOW()",
|
||||
write={
|
||||
"presto": "CURRENT_TIMESTAMP()",
|
||||
"hive": "CURRENT_TIMESTAMP()",
|
||||
},
|
||||
)
|
||||
|
||||
def test_ddl(self):
|
||||
self.validate_all(
|
||||
|
|
|
@ -571,3 +571,35 @@ FROM persons AS p, LATERAL FLATTEN(input => p.c, path => 'contact') AS f, LATERA
|
|||
"spark": "DESCRIBE db.table",
|
||||
},
|
||||
)
|
||||
|
||||
def test_match_recognize(self):
|
||||
for row in (
|
||||
"ONE ROW PER MATCH",
|
||||
"ALL ROWS PER MATCH",
|
||||
"ALL ROWS PER MATCH SHOW EMPTY MATCHES",
|
||||
"ALL ROWS PER MATCH OMIT EMPTY MATCHES",
|
||||
"ALL ROWS PER MATCH WITH UNMATCHED ROWS",
|
||||
):
|
||||
for after in (
|
||||
"AFTER MATCH SKIP",
|
||||
"AFTER MATCH SKIP PAST LAST ROW",
|
||||
"AFTER MATCH SKIP TO NEXT ROW",
|
||||
"AFTER MATCH SKIP TO FIRST x",
|
||||
"AFTER MATCH SKIP TO LAST x",
|
||||
):
|
||||
self.validate_identity(
|
||||
f"""SELECT
|
||||
*
|
||||
FROM x
|
||||
MATCH_RECOGNIZE (
|
||||
PARTITION BY a, b
|
||||
ORDER BY
|
||||
x DESC
|
||||
MEASURES y AS b
|
||||
{row}
|
||||
{after}
|
||||
PATTERN (^ S1 S2*? ( {{- S3 -}} S4 )+ | PERMUTE(S1, S2){{1,2}} $)
|
||||
DEFINE x AS y
|
||||
)""",
|
||||
pretty=True,
|
||||
)
|
||||
|
|
|
@ -208,6 +208,13 @@ TBLPROPERTIES (
|
|||
|
||||
def test_spark(self):
|
||||
self.validate_identity("SELECT UNIX_TIMESTAMP()")
|
||||
self.validate_identity("TRIM(' SparkSQL ')")
|
||||
self.validate_identity("TRIM(BOTH 'SL' FROM 'SSparkSQLS')")
|
||||
self.validate_identity("TRIM(LEADING 'SL' FROM 'SSparkSQLS')")
|
||||
self.validate_identity("TRIM(TRAILING 'SL' FROM 'SSparkSQLS')")
|
||||
self.validate_all(
|
||||
"TRIM('SL', 'SSparkSQLS')", write={"spark": "TRIM('SL' FROM 'SSparkSQLS')"}
|
||||
)
|
||||
self.validate_all(
|
||||
"ARRAY_SORT(x, (left, right) -> -1)",
|
||||
write={
|
||||
|
@ -314,5 +321,5 @@ TBLPROPERTIES (
|
|||
def test_bool_or(self):
|
||||
self.validate_all(
|
||||
"SELECT a, LOGICAL_OR(b) FROM table GROUP BY a",
|
||||
write={"duckdb": "SELECT a, BOOL_OR(b) FROM table GROUP BY a"},
|
||||
write={"spark": "SELECT a, BOOL_OR(b) FROM table GROUP BY a"},
|
||||
)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue