Merging upstream version 20.1.0.
Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
parent
d4fe7bdb16
commit
90988d8258
127 changed files with 73384 additions and 73067 deletions
|
@ -1,4 +1,5 @@
|
|||
from sqlglot import ErrorLevel, UnsupportedError, exp, parse_one, transpile
|
||||
from sqlglot.helper import logger as helper_logger
|
||||
from tests.dialects.test_dialect import Validator
|
||||
|
||||
|
||||
|
@ -71,7 +72,7 @@ class TestDuckDB(Validator):
|
|||
"SELECT UNNEST(ARRAY[1, 2, 3]), UNNEST(ARRAY[4, 5]), UNNEST(ARRAY[6])",
|
||||
write={
|
||||
"bigquery": "SELECT IF(pos = pos_2, col, NULL) AS col, IF(pos = pos_3, col_2, NULL) AS col_2, IF(pos = pos_4, col_3, NULL) AS col_3 FROM UNNEST(GENERATE_ARRAY(0, GREATEST(ARRAY_LENGTH([1, 2, 3]), ARRAY_LENGTH([4, 5]), ARRAY_LENGTH([6])) - 1)) AS pos CROSS JOIN UNNEST([1, 2, 3]) AS col WITH OFFSET AS pos_2 CROSS JOIN UNNEST([4, 5]) AS col_2 WITH OFFSET AS pos_3 CROSS JOIN UNNEST([6]) AS col_3 WITH OFFSET AS pos_4 WHERE ((pos = pos_2 OR (pos > (ARRAY_LENGTH([1, 2, 3]) - 1) AND pos_2 = (ARRAY_LENGTH([1, 2, 3]) - 1))) AND (pos = pos_3 OR (pos > (ARRAY_LENGTH([4, 5]) - 1) AND pos_3 = (ARRAY_LENGTH([4, 5]) - 1)))) AND (pos = pos_4 OR (pos > (ARRAY_LENGTH([6]) - 1) AND pos_4 = (ARRAY_LENGTH([6]) - 1)))",
|
||||
"presto": "SELECT IF(pos = pos_2, col) AS col, IF(pos = pos_3, col_2) AS col_2, IF(pos = pos_4, col_3) AS col_3 FROM UNNEST(SEQUENCE(1, GREATEST(CARDINALITY(ARRAY[1, 2, 3]), CARDINALITY(ARRAY[4, 5]), CARDINALITY(ARRAY[6])))) AS _u(pos) CROSS JOIN UNNEST(ARRAY[1, 2, 3]) WITH ORDINALITY AS _u_2(col, pos_2) CROSS JOIN UNNEST(ARRAY[4, 5]) WITH ORDINALITY AS _u_3(col_2, pos_3) CROSS JOIN UNNEST(ARRAY[6]) WITH ORDINALITY AS _u_4(col_3, pos_4) WHERE ((pos = pos_2 OR (pos > CARDINALITY(ARRAY[1, 2, 3]) AND pos_2 = CARDINALITY(ARRAY[1, 2, 3]))) AND (pos = pos_3 OR (pos > CARDINALITY(ARRAY[4, 5]) AND pos_3 = CARDINALITY(ARRAY[4, 5])))) AND (pos = pos_4 OR (pos > CARDINALITY(ARRAY[6]) AND pos_4 = CARDINALITY(ARRAY[6])))",
|
||||
"presto": "SELECT IF(_u.pos = _u_2.pos_2, _u_2.col) AS col, IF(_u.pos = _u_3.pos_3, _u_3.col_2) AS col_2, IF(_u.pos = _u_4.pos_4, _u_4.col_3) AS col_3 FROM UNNEST(SEQUENCE(1, GREATEST(CARDINALITY(ARRAY[1, 2, 3]), CARDINALITY(ARRAY[4, 5]), CARDINALITY(ARRAY[6])))) AS _u(pos) CROSS JOIN UNNEST(ARRAY[1, 2, 3]) WITH ORDINALITY AS _u_2(col, pos_2) CROSS JOIN UNNEST(ARRAY[4, 5]) WITH ORDINALITY AS _u_3(col_2, pos_3) CROSS JOIN UNNEST(ARRAY[6]) WITH ORDINALITY AS _u_4(col_3, pos_4) WHERE ((_u.pos = _u_2.pos_2 OR (_u.pos > CARDINALITY(ARRAY[1, 2, 3]) AND _u_2.pos_2 = CARDINALITY(ARRAY[1, 2, 3]))) AND (_u.pos = _u_3.pos_3 OR (_u.pos > CARDINALITY(ARRAY[4, 5]) AND _u_3.pos_3 = CARDINALITY(ARRAY[4, 5])))) AND (_u.pos = _u_4.pos_4 OR (_u.pos > CARDINALITY(ARRAY[6]) AND _u_4.pos_4 = CARDINALITY(ARRAY[6])))",
|
||||
},
|
||||
)
|
||||
|
||||
|
@ -79,7 +80,7 @@ class TestDuckDB(Validator):
|
|||
"SELECT UNNEST(ARRAY[1, 2, 3]), UNNEST(ARRAY[4, 5]), UNNEST(ARRAY[6]) FROM x",
|
||||
write={
|
||||
"bigquery": "SELECT IF(pos = pos_2, col, NULL) AS col, IF(pos = pos_3, col_2, NULL) AS col_2, IF(pos = pos_4, col_3, NULL) AS col_3 FROM x, UNNEST(GENERATE_ARRAY(0, GREATEST(ARRAY_LENGTH([1, 2, 3]), ARRAY_LENGTH([4, 5]), ARRAY_LENGTH([6])) - 1)) AS pos CROSS JOIN UNNEST([1, 2, 3]) AS col WITH OFFSET AS pos_2 CROSS JOIN UNNEST([4, 5]) AS col_2 WITH OFFSET AS pos_3 CROSS JOIN UNNEST([6]) AS col_3 WITH OFFSET AS pos_4 WHERE ((pos = pos_2 OR (pos > (ARRAY_LENGTH([1, 2, 3]) - 1) AND pos_2 = (ARRAY_LENGTH([1, 2, 3]) - 1))) AND (pos = pos_3 OR (pos > (ARRAY_LENGTH([4, 5]) - 1) AND pos_3 = (ARRAY_LENGTH([4, 5]) - 1)))) AND (pos = pos_4 OR (pos > (ARRAY_LENGTH([6]) - 1) AND pos_4 = (ARRAY_LENGTH([6]) - 1)))",
|
||||
"presto": "SELECT IF(pos = pos_2, col) AS col, IF(pos = pos_3, col_2) AS col_2, IF(pos = pos_4, col_3) AS col_3 FROM x, UNNEST(SEQUENCE(1, GREATEST(CARDINALITY(ARRAY[1, 2, 3]), CARDINALITY(ARRAY[4, 5]), CARDINALITY(ARRAY[6])))) AS _u(pos) CROSS JOIN UNNEST(ARRAY[1, 2, 3]) WITH ORDINALITY AS _u_2(col, pos_2) CROSS JOIN UNNEST(ARRAY[4, 5]) WITH ORDINALITY AS _u_3(col_2, pos_3) CROSS JOIN UNNEST(ARRAY[6]) WITH ORDINALITY AS _u_4(col_3, pos_4) WHERE ((pos = pos_2 OR (pos > CARDINALITY(ARRAY[1, 2, 3]) AND pos_2 = CARDINALITY(ARRAY[1, 2, 3]))) AND (pos = pos_3 OR (pos > CARDINALITY(ARRAY[4, 5]) AND pos_3 = CARDINALITY(ARRAY[4, 5])))) AND (pos = pos_4 OR (pos > CARDINALITY(ARRAY[6]) AND pos_4 = CARDINALITY(ARRAY[6])))",
|
||||
"presto": "SELECT IF(_u.pos = _u_2.pos_2, _u_2.col) AS col, IF(_u.pos = _u_3.pos_3, _u_3.col_2) AS col_2, IF(_u.pos = _u_4.pos_4, _u_4.col_3) AS col_3 FROM x, UNNEST(SEQUENCE(1, GREATEST(CARDINALITY(ARRAY[1, 2, 3]), CARDINALITY(ARRAY[4, 5]), CARDINALITY(ARRAY[6])))) AS _u(pos) CROSS JOIN UNNEST(ARRAY[1, 2, 3]) WITH ORDINALITY AS _u_2(col, pos_2) CROSS JOIN UNNEST(ARRAY[4, 5]) WITH ORDINALITY AS _u_3(col_2, pos_3) CROSS JOIN UNNEST(ARRAY[6]) WITH ORDINALITY AS _u_4(col_3, pos_4) WHERE ((_u.pos = _u_2.pos_2 OR (_u.pos > CARDINALITY(ARRAY[1, 2, 3]) AND _u_2.pos_2 = CARDINALITY(ARRAY[1, 2, 3]))) AND (_u.pos = _u_3.pos_3 OR (_u.pos > CARDINALITY(ARRAY[4, 5]) AND _u_3.pos_3 = CARDINALITY(ARRAY[4, 5])))) AND (_u.pos = _u_4.pos_4 OR (_u.pos > CARDINALITY(ARRAY[6]) AND _u_4.pos_4 = CARDINALITY(ARRAY[6])))",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
|
@ -96,7 +97,6 @@ class TestDuckDB(Validator):
|
|||
)
|
||||
|
||||
self.validate_identity("SELECT i FROM RANGE(5) AS _(i) ORDER BY i ASC")
|
||||
self.validate_identity("[x.STRING_SPLIT(' ')[1] FOR x IN ['1', '2', 3] IF x.CONTAINS('1')]")
|
||||
self.validate_identity("INSERT INTO x BY NAME SELECT 1 AS y")
|
||||
self.validate_identity("SELECT 1 AS x UNION ALL BY NAME SELECT 2 AS x")
|
||||
self.validate_identity("SELECT SUM(x) FILTER (x = 1)", "SELECT SUM(x) FILTER(WHERE x = 1)")
|
||||
|
@ -109,6 +109,10 @@ class TestDuckDB(Validator):
|
|||
parse_one("a // b", read="duckdb").assert_is(exp.IntDiv).sql(dialect="duckdb"), "a // b"
|
||||
)
|
||||
|
||||
self.validate_identity("SELECT EPOCH_MS(10) AS t")
|
||||
self.validate_identity("SELECT MAKE_TIMESTAMP(10) AS t")
|
||||
self.validate_identity("SELECT TO_TIMESTAMP(10) AS t")
|
||||
self.validate_identity("SELECT UNNEST(column, recursive := TRUE) FROM table")
|
||||
self.validate_identity("VAR_POP(a)")
|
||||
self.validate_identity("SELECT * FROM foo ASOF LEFT JOIN bar ON a = b")
|
||||
self.validate_identity("PIVOT Cities ON Year USING SUM(Population)")
|
||||
|
@ -151,11 +155,18 @@ class TestDuckDB(Validator):
|
|||
self.validate_all("0x1010", write={"": "0 AS x1010"})
|
||||
self.validate_all("x ~ y", write={"duckdb": "REGEXP_MATCHES(x, y)"})
|
||||
self.validate_all("SELECT * FROM 'x.y'", write={"duckdb": 'SELECT * FROM "x.y"'})
|
||||
self.validate_all(
|
||||
"SELECT * FROM produce PIVOT(SUM(sales) FOR quarter IN ('Q1', 'Q2'))",
|
||||
read={
|
||||
"duckdb": "SELECT * FROM produce PIVOT(SUM(sales) FOR quarter IN ('Q1', 'Q2'))",
|
||||
"snowflake": "SELECT * FROM produce PIVOT(SUM(produce.sales) FOR produce.quarter IN ('Q1', 'Q2'))",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT UNNEST([1, 2, 3])",
|
||||
write={
|
||||
"duckdb": "SELECT UNNEST([1, 2, 3])",
|
||||
"snowflake": "SELECT IFF(pos = pos_2, col, NULL) AS col FROM (SELECT value FROM TABLE(FLATTEN(INPUT => ARRAY_GENERATE_RANGE(0, (GREATEST(ARRAY_SIZE([1, 2, 3])) - 1) + 1)))) AS _u(pos) CROSS JOIN (SELECT value, index FROM TABLE(FLATTEN(INPUT => [1, 2, 3]))) AS _u_2(col, pos_2) WHERE pos = pos_2 OR (pos > (ARRAY_SIZE([1, 2, 3]) - 1) AND pos_2 = (ARRAY_SIZE([1, 2, 3]) - 1))",
|
||||
"snowflake": "SELECT IFF(_u.pos = _u_2.pos_2, _u_2.col, NULL) AS col FROM TABLE(FLATTEN(INPUT => ARRAY_GENERATE_RANGE(0, (GREATEST(ARRAY_SIZE([1, 2, 3])) - 1) + 1))) AS _u(seq, key, path, index, pos, this) CROSS JOIN TABLE(FLATTEN(INPUT => [1, 2, 3])) AS _u_2(seq, key, path, pos_2, col, this) WHERE _u.pos = _u_2.pos_2 OR (_u.pos > (ARRAY_SIZE([1, 2, 3]) - 1) AND _u_2.pos_2 = (ARRAY_SIZE([1, 2, 3]) - 1))",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
|
@ -355,14 +366,14 @@ class TestDuckDB(Validator):
|
|||
"STRUCT_PACK(x := 1, y := '2')",
|
||||
write={
|
||||
"duckdb": "{'x': 1, 'y': '2'}",
|
||||
"spark": "STRUCT(x = 1, y = '2')",
|
||||
"spark": "STRUCT(1 AS x, '2' AS y)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"STRUCT_PACK(key1 := 'value1', key2 := 42)",
|
||||
write={
|
||||
"duckdb": "{'key1': 'value1', 'key2': 42}",
|
||||
"spark": "STRUCT(key1 = 'value1', key2 = 42)",
|
||||
"spark": "STRUCT('value1' AS key1, 42 AS key2)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
|
@ -440,6 +451,16 @@ class TestDuckDB(Validator):
|
|||
"hive": "SELECT DATE_ADD(TO_DATE(x), 1)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT CAST('2018-01-01 00:00:00' AS DATE) + INTERVAL 3 DAY",
|
||||
read={
|
||||
"hive": "SELECT DATE_ADD('2018-01-01 00:00:00', 3)",
|
||||
},
|
||||
write={
|
||||
"duckdb": "SELECT CAST('2018-01-01 00:00:00' AS DATE) + INTERVAL '3' DAY",
|
||||
"hive": "SELECT CAST('2018-01-01 00:00:00' AS DATE) + INTERVAL '3' DAY",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT CAST('2020-05-06' AS DATE) - INTERVAL 5 DAY",
|
||||
read={"bigquery": "SELECT DATE_SUB(CAST('2020-05-06' AS DATE), INTERVAL 5 DAY)"},
|
||||
|
@ -483,6 +504,35 @@ class TestDuckDB(Validator):
|
|||
|
||||
self.validate_identity("SELECT ISNAN(x)")
|
||||
|
||||
def test_array_index(self):
|
||||
with self.assertLogs(helper_logger) as cm:
|
||||
self.validate_all(
|
||||
"SELECT some_arr[1] AS first FROM blah",
|
||||
read={
|
||||
"bigquery": "SELECT some_arr[0] AS first FROM blah",
|
||||
},
|
||||
write={
|
||||
"bigquery": "SELECT some_arr[0] AS first FROM blah",
|
||||
"duckdb": "SELECT some_arr[1] AS first FROM blah",
|
||||
"presto": "SELECT some_arr[1] AS first FROM blah",
|
||||
},
|
||||
)
|
||||
self.validate_identity(
|
||||
"[x.STRING_SPLIT(' ')[1] FOR x IN ['1', '2', 3] IF x.CONTAINS('1')]"
|
||||
)
|
||||
|
||||
self.assertEqual(
|
||||
cm.output,
|
||||
[
|
||||
"WARNING:sqlglot:Applying array index offset (-1)",
|
||||
"WARNING:sqlglot:Applying array index offset (1)",
|
||||
"WARNING:sqlglot:Applying array index offset (1)",
|
||||
"WARNING:sqlglot:Applying array index offset (1)",
|
||||
"WARNING:sqlglot:Applying array index offset (-1)",
|
||||
"WARNING:sqlglot:Applying array index offset (1)",
|
||||
],
|
||||
)
|
||||
|
||||
def test_time(self):
|
||||
self.validate_identity("SELECT CURRENT_DATE")
|
||||
self.validate_identity("SELECT CURRENT_TIMESTAMP")
|
||||
|
@ -533,16 +583,16 @@ class TestDuckDB(Validator):
|
|||
self.validate_all(
|
||||
"EPOCH_MS(x)",
|
||||
write={
|
||||
"bigquery": "UNIX_TO_TIME(x / 1000)",
|
||||
"duckdb": "TO_TIMESTAMP(x / 1000)",
|
||||
"presto": "FROM_UNIXTIME(x / 1000)",
|
||||
"spark": "CAST(FROM_UNIXTIME(x / 1000) AS TIMESTAMP)",
|
||||
"bigquery": "TIMESTAMP_MILLIS(x)",
|
||||
"duckdb": "EPOCH_MS(x)",
|
||||
"presto": "FROM_UNIXTIME(CAST(x AS DOUBLE) / 1000)",
|
||||
"spark": "TIMESTAMP_MILLIS(x)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"STRFTIME(x, '%y-%-m-%S')",
|
||||
write={
|
||||
"bigquery": "TIME_TO_STR(x, '%y-%-m-%S')",
|
||||
"bigquery": "FORMAT_DATE('%y-%-m-%S', x)",
|
||||
"duckdb": "STRFTIME(x, '%y-%-m-%S')",
|
||||
"postgres": "TO_CHAR(x, 'YY-FMMM-SS')",
|
||||
"presto": "DATE_FORMAT(x, '%y-%c-%s')",
|
||||
|
@ -552,6 +602,7 @@ class TestDuckDB(Validator):
|
|||
self.validate_all(
|
||||
"STRFTIME(x, '%Y-%m-%d %H:%M:%S')",
|
||||
write={
|
||||
"bigquery": "FORMAT_DATE('%Y-%m-%d %H:%M:%S', x)",
|
||||
"duckdb": "STRFTIME(x, '%Y-%m-%d %H:%M:%S')",
|
||||
"presto": "DATE_FORMAT(x, '%Y-%m-%d %T')",
|
||||
"hive": "DATE_FORMAT(x, 'yyyy-MM-dd HH:mm:ss')",
|
||||
|
@ -570,7 +621,7 @@ class TestDuckDB(Validator):
|
|||
self.validate_all(
|
||||
"TO_TIMESTAMP(x)",
|
||||
write={
|
||||
"bigquery": "UNIX_TO_TIME(x)",
|
||||
"bigquery": "TIMESTAMP_SECONDS(x)",
|
||||
"duckdb": "TO_TIMESTAMP(x)",
|
||||
"presto": "FROM_UNIXTIME(x)",
|
||||
"hive": "FROM_UNIXTIME(x)",
|
||||
|
@ -651,22 +702,25 @@ class TestDuckDB(Validator):
|
|||
"CAST(ROW(1, ROW(1)) AS STRUCT(number BIGINT, row STRUCT(number BIGINT)))"
|
||||
)
|
||||
|
||||
self.validate_all("CAST(x AS NUMERIC(1, 2))", write={"duckdb": "CAST(x AS DECIMAL(1, 2))"})
|
||||
self.validate_all("CAST(x AS HUGEINT)", write={"duckdb": "CAST(x AS INT128)"})
|
||||
self.validate_all("CAST(x AS CHAR)", write={"duckdb": "CAST(x AS TEXT)"})
|
||||
self.validate_all("CAST(x AS BPCHAR)", write={"duckdb": "CAST(x AS TEXT)"})
|
||||
self.validate_all("CAST(x AS STRING)", write={"duckdb": "CAST(x AS TEXT)"})
|
||||
self.validate_all("CAST(x AS INT1)", write={"duckdb": "CAST(x AS TINYINT)"})
|
||||
self.validate_all("CAST(x AS FLOAT4)", write={"duckdb": "CAST(x AS REAL)"})
|
||||
self.validate_all("CAST(x AS FLOAT)", write={"duckdb": "CAST(x AS REAL)"})
|
||||
self.validate_all("CAST(x AS INT4)", write={"duckdb": "CAST(x AS INT)"})
|
||||
self.validate_all("CAST(x AS INTEGER)", write={"duckdb": "CAST(x AS INT)"})
|
||||
self.validate_all("CAST(x AS SIGNED)", write={"duckdb": "CAST(x AS INT)"})
|
||||
self.validate_all("CAST(x AS BLOB)", write={"duckdb": "CAST(x AS BLOB)"})
|
||||
self.validate_all("CAST(x AS BYTEA)", write={"duckdb": "CAST(x AS BLOB)"})
|
||||
self.validate_all("CAST(x AS BINARY)", write={"duckdb": "CAST(x AS BLOB)"})
|
||||
self.validate_all("CAST(x AS VARBINARY)", write={"duckdb": "CAST(x AS BLOB)"})
|
||||
self.validate_all("CAST(x AS LOGICAL)", write={"duckdb": "CAST(x AS BOOLEAN)"})
|
||||
self.validate_identity("CAST(x AS INT64)", "CAST(x AS BIGINT)")
|
||||
self.validate_identity("CAST(x AS INT32)", "CAST(x AS INT)")
|
||||
self.validate_identity("CAST(x AS INT16)", "CAST(x AS SMALLINT)")
|
||||
self.validate_identity("CAST(x AS NUMERIC(1, 2))", "CAST(x AS DECIMAL(1, 2))")
|
||||
self.validate_identity("CAST(x AS HUGEINT)", "CAST(x AS INT128)")
|
||||
self.validate_identity("CAST(x AS CHAR)", "CAST(x AS TEXT)")
|
||||
self.validate_identity("CAST(x AS BPCHAR)", "CAST(x AS TEXT)")
|
||||
self.validate_identity("CAST(x AS STRING)", "CAST(x AS TEXT)")
|
||||
self.validate_identity("CAST(x AS INT1)", "CAST(x AS TINYINT)")
|
||||
self.validate_identity("CAST(x AS FLOAT4)", "CAST(x AS REAL)")
|
||||
self.validate_identity("CAST(x AS FLOAT)", "CAST(x AS REAL)")
|
||||
self.validate_identity("CAST(x AS INT4)", "CAST(x AS INT)")
|
||||
self.validate_identity("CAST(x AS INTEGER)", "CAST(x AS INT)")
|
||||
self.validate_identity("CAST(x AS SIGNED)", "CAST(x AS INT)")
|
||||
self.validate_identity("CAST(x AS BLOB)", "CAST(x AS BLOB)")
|
||||
self.validate_identity("CAST(x AS BYTEA)", "CAST(x AS BLOB)")
|
||||
self.validate_identity("CAST(x AS BINARY)", "CAST(x AS BLOB)")
|
||||
self.validate_identity("CAST(x AS VARBINARY)", "CAST(x AS BLOB)")
|
||||
self.validate_identity("CAST(x AS LOGICAL)", "CAST(x AS BOOLEAN)")
|
||||
self.validate_all(
|
||||
"CAST(x AS NUMERIC)",
|
||||
write={
|
||||
|
@ -799,3 +853,17 @@ class TestDuckDB(Validator):
|
|||
"duckdb": "SELECT CAST(w AS TIMESTAMP_S), CAST(x AS TIMESTAMP_MS), CAST(y AS TIMESTAMP), CAST(z AS TIMESTAMP_NS)",
|
||||
},
|
||||
)
|
||||
|
||||
def test_isnan(self):
|
||||
self.validate_all(
|
||||
"ISNAN(x)",
|
||||
read={"bigquery": "IS_NAN(x)"},
|
||||
write={"bigquery": "IS_NAN(x)", "duckdb": "ISNAN(x)"},
|
||||
)
|
||||
|
||||
def test_isinf(self):
|
||||
self.validate_all(
|
||||
"ISINF(x)",
|
||||
read={"bigquery": "IS_INF(x)"},
|
||||
write={"bigquery": "IS_INF(x)", "duckdb": "ISINF(x)"},
|
||||
)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue