1
0
Fork 0

Merging upstream version 10.6.3.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-02-13 15:09:58 +01:00
parent d03a55eda6
commit ece6881255
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
48 changed files with 906 additions and 266 deletions

View file

@ -170,7 +170,7 @@ class TestBigQuery(Validator):
"bigquery": "CURRENT_TIMESTAMP()",
"duckdb": "CURRENT_TIMESTAMP()",
"postgres": "CURRENT_TIMESTAMP",
"presto": "CURRENT_TIMESTAMP()",
"presto": "CURRENT_TIMESTAMP",
"hive": "CURRENT_TIMESTAMP()",
"spark": "CURRENT_TIMESTAMP()",
},
@ -181,7 +181,7 @@ class TestBigQuery(Validator):
"bigquery": "CURRENT_TIMESTAMP()",
"duckdb": "CURRENT_TIMESTAMP()",
"postgres": "CURRENT_TIMESTAMP",
"presto": "CURRENT_TIMESTAMP()",
"presto": "CURRENT_TIMESTAMP",
"hive": "CURRENT_TIMESTAMP()",
"spark": "CURRENT_TIMESTAMP()",
},

View file

@ -1,6 +1,7 @@
import unittest
from sqlglot import Dialect, Dialects, ErrorLevel, UnsupportedError, parse_one
from sqlglot.dialects import Hive
class Validator(unittest.TestCase):
@ -67,6 +68,11 @@ class TestDialect(Validator):
self.assertIsNotNone(Dialect.get_or_raise(dialect))
self.assertIsNotNone(Dialect[dialect.value])
def test_get_or_raise(self):
self.assertEqual(Dialect.get_or_raise(Hive), Hive)
self.assertEqual(Dialect.get_or_raise(Hive()), Hive)
self.assertEqual(Dialect.get_or_raise("hive"), Hive)
def test_cast(self):
self.validate_all(
"CAST(a AS TEXT)",
@ -280,6 +286,21 @@ class TestDialect(Validator):
write={"oracle": "CAST(a AS NUMBER)"},
)
def test_if_null(self):
self.validate_all(
"SELECT IFNULL(1, NULL) FROM foo",
write={
"": "SELECT COALESCE(1, NULL) FROM foo",
"redshift": "SELECT COALESCE(1, NULL) FROM foo",
"postgres": "SELECT COALESCE(1, NULL) FROM foo",
"mysql": "SELECT COALESCE(1, NULL) FROM foo",
"duckdb": "SELECT COALESCE(1, NULL) FROM foo",
"spark": "SELECT COALESCE(1, NULL) FROM foo",
"bigquery": "SELECT COALESCE(1, NULL) FROM foo",
"presto": "SELECT COALESCE(1, NULL) FROM foo",
},
)
def test_time(self):
self.validate_all(
"STR_TO_TIME(x, '%Y-%m-%dT%H:%M:%S')",
@ -287,10 +308,10 @@ class TestDialect(Validator):
"duckdb": "STRPTIME(x, '%Y-%m-%dT%H:%M:%S')",
},
write={
"mysql": "STR_TO_DATE(x, '%Y-%m-%dT%H:%i:%S')",
"mysql": "STR_TO_DATE(x, '%Y-%m-%dT%T')",
"duckdb": "STRPTIME(x, '%Y-%m-%dT%H:%M:%S')",
"hive": "CAST(FROM_UNIXTIME(UNIX_TIMESTAMP(x, 'yyyy-MM-ddTHH:mm:ss')) AS TIMESTAMP)",
"presto": "DATE_PARSE(x, '%Y-%m-%dT%H:%i:%S')",
"presto": "DATE_PARSE(x, '%Y-%m-%dT%T')",
"drill": "TO_TIMESTAMP(x, 'yyyy-MM-dd''T''HH:mm:ss')",
"redshift": "TO_TIMESTAMP(x, 'YYYY-MM-DDTHH:MI:SS')",
"spark": "TO_TIMESTAMP(x, 'yyyy-MM-ddTHH:mm:ss')",
@ -356,7 +377,7 @@ class TestDialect(Validator):
write={
"duckdb": "EPOCH(CAST('2020-01-01' AS TIMESTAMP))",
"hive": "UNIX_TIMESTAMP('2020-01-01')",
"presto": "TO_UNIXTIME(DATE_PARSE('2020-01-01', '%Y-%m-%d %H:%i:%S'))",
"presto": "TO_UNIXTIME(DATE_PARSE('2020-01-01', '%Y-%m-%d %T'))",
},
)
self.validate_all(
@ -418,7 +439,7 @@ class TestDialect(Validator):
self.validate_all(
"UNIX_TO_STR(x, y)",
write={
"duckdb": "STRFTIME(TO_TIMESTAMP(CAST(x AS BIGINT)), y)",
"duckdb": "STRFTIME(TO_TIMESTAMP(x), y)",
"hive": "FROM_UNIXTIME(x, y)",
"presto": "DATE_FORMAT(FROM_UNIXTIME(x), y)",
"starrocks": "FROM_UNIXTIME(x, y)",
@ -427,7 +448,7 @@ class TestDialect(Validator):
self.validate_all(
"UNIX_TO_TIME(x)",
write={
"duckdb": "TO_TIMESTAMP(CAST(x AS BIGINT))",
"duckdb": "TO_TIMESTAMP(x)",
"hive": "FROM_UNIXTIME(x)",
"oracle": "TO_DATE('1970-01-01','YYYY-MM-DD') + (x / 86400)",
"postgres": "TO_TIMESTAMP(x)",
@ -438,7 +459,7 @@ class TestDialect(Validator):
self.validate_all(
"UNIX_TO_TIME_STR(x)",
write={
"duckdb": "CAST(TO_TIMESTAMP(CAST(x AS BIGINT)) AS TEXT)",
"duckdb": "CAST(TO_TIMESTAMP(x) AS TEXT)",
"hive": "FROM_UNIXTIME(x)",
"presto": "CAST(FROM_UNIXTIME(x) AS VARCHAR)",
},
@ -575,10 +596,10 @@ class TestDialect(Validator):
},
write={
"drill": "TO_DATE(x, 'yyyy-MM-dd''T''HH:mm:ss')",
"mysql": "STR_TO_DATE(x, '%Y-%m-%dT%H:%i:%S')",
"starrocks": "STR_TO_DATE(x, '%Y-%m-%dT%H:%i:%S')",
"mysql": "STR_TO_DATE(x, '%Y-%m-%dT%T')",
"starrocks": "STR_TO_DATE(x, '%Y-%m-%dT%T')",
"hive": "CAST(FROM_UNIXTIME(UNIX_TIMESTAMP(x, 'yyyy-MM-ddTHH:mm:ss')) AS DATE)",
"presto": "CAST(DATE_PARSE(x, '%Y-%m-%dT%H:%i:%S') AS DATE)",
"presto": "CAST(DATE_PARSE(x, '%Y-%m-%dT%T') AS DATE)",
"spark": "TO_DATE(x, 'yyyy-MM-ddTHH:mm:ss')",
},
)
@ -709,6 +730,7 @@ class TestDialect(Validator):
"hive": "REDUCE(x, 0, (acc, x) -> acc + x, acc -> acc)",
"presto": "REDUCE(x, 0, (acc, x) -> acc + x, acc -> acc)",
"spark": "AGGREGATE(x, 0, (acc, x) -> acc + x, acc -> acc)",
"presto": "REDUCE(x, 0, (acc, x) -> acc + x, acc -> acc)",
},
)
@ -1381,3 +1403,21 @@ SELECT
"spark": "MERGE INTO a AS b USING c AS d ON b.id = d.id WHEN MATCHED AND EXISTS(SELECT b.name EXCEPT SELECT d.name) THEN UPDATE SET b.name = d.name",
},
)
def test_substring(self):
self.validate_all(
"SUBSTR('123456', 2, 3)",
write={
"bigquery": "SUBSTR('123456', 2, 3)",
"oracle": "SUBSTR('123456', 2, 3)",
"postgres": "SUBSTR('123456', 2, 3)",
},
)
self.validate_all(
"SUBSTRING('123456', 2, 3)",
write={
"bigquery": "SUBSTRING('123456', 2, 3)",
"oracle": "SUBSTR('123456', 2, 3)",
"postgres": "SUBSTRING('123456' FROM 2 FOR 3)",
},
)

View file

@ -22,7 +22,7 @@ class TestDuckDB(Validator):
"EPOCH_MS(x)",
write={
"bigquery": "UNIX_TO_TIME(x / 1000)",
"duckdb": "TO_TIMESTAMP(CAST(x / 1000 AS BIGINT))",
"duckdb": "TO_TIMESTAMP(x / 1000)",
"presto": "FROM_UNIXTIME(x / 1000)",
"spark": "FROM_UNIXTIME(x / 1000)",
},
@ -41,7 +41,7 @@ class TestDuckDB(Validator):
"STRFTIME(x, '%Y-%m-%d %H:%M:%S')",
write={
"duckdb": "STRFTIME(x, '%Y-%m-%d %H:%M:%S')",
"presto": "DATE_FORMAT(x, '%Y-%m-%d %H:%i:%S')",
"presto": "DATE_FORMAT(x, '%Y-%m-%d %T')",
"hive": "DATE_FORMAT(x, 'yyyy-MM-dd HH:mm:ss')",
},
)
@ -58,9 +58,10 @@ class TestDuckDB(Validator):
self.validate_all(
"TO_TIMESTAMP(x)",
write={
"duckdb": "CAST(x AS TIMESTAMP)",
"presto": "CAST(x AS TIMESTAMP)",
"hive": "CAST(x AS TIMESTAMP)",
"bigquery": "UNIX_TO_TIME(x)",
"duckdb": "TO_TIMESTAMP(x)",
"presto": "FROM_UNIXTIME(x)",
"hive": "FROM_UNIXTIME(x)",
},
)
self.validate_all(
@ -334,6 +335,14 @@ class TestDuckDB(Validator):
},
)
self.validate_all(
"cast([[1]] as int[][])",
write={
"duckdb": "CAST(LIST_VALUE(LIST_VALUE(1)) AS INT[][])",
"spark": "CAST(ARRAY(ARRAY(1)) AS ARRAY<ARRAY<INT>>)",
},
)
def test_bool_or(self):
self.validate_all(
"SELECT a, LOGICAL_OR(b) FROM table GROUP BY a",

View file

@ -259,7 +259,7 @@ class TestHive(Validator):
self.validate_all(
"""from_unixtime(x, "yyyy-MM-dd'T'HH")""",
write={
"duckdb": "STRFTIME(TO_TIMESTAMP(CAST(x AS BIGINT)), '%Y-%m-%d''T''%H')",
"duckdb": "STRFTIME(TO_TIMESTAMP(x), '%Y-%m-%d''T''%H')",
"presto": "DATE_FORMAT(FROM_UNIXTIME(x), '%Y-%m-%d''T''%H')",
"hive": "FROM_UNIXTIME(x, 'yyyy-MM-dd\\'T\\'HH')",
"spark": "FROM_UNIXTIME(x, 'yyyy-MM-dd\\'T\\'HH')",
@ -269,7 +269,7 @@ class TestHive(Validator):
"DATE_FORMAT('2020-01-01', 'yyyy-MM-dd HH:mm:ss')",
write={
"duckdb": "STRFTIME(CAST('2020-01-01' AS TIMESTAMP), '%Y-%m-%d %H:%M:%S')",
"presto": "DATE_FORMAT(CAST('2020-01-01' AS TIMESTAMP), '%Y-%m-%d %H:%i:%S')",
"presto": "DATE_FORMAT(CAST('2020-01-01' AS TIMESTAMP), '%Y-%m-%d %T')",
"hive": "DATE_FORMAT(CAST('2020-01-01' AS TIMESTAMP), 'yyyy-MM-dd HH:mm:ss')",
"spark": "DATE_FORMAT(CAST('2020-01-01' AS TIMESTAMP), 'yyyy-MM-dd HH:mm:ss')",
},
@ -308,7 +308,7 @@ class TestHive(Validator):
"UNIX_TIMESTAMP(x)",
write={
"duckdb": "EPOCH(STRPTIME(x, '%Y-%m-%d %H:%M:%S'))",
"presto": "TO_UNIXTIME(DATE_PARSE(x, '%Y-%m-%d %H:%i:%S'))",
"presto": "TO_UNIXTIME(DATE_PARSE(x, '%Y-%m-%d %T'))",
"hive": "UNIX_TIMESTAMP(x)",
"spark": "UNIX_TIMESTAMP(x)",
"": "STR_TO_UNIX(x, '%Y-%m-%d %H:%M:%S')",

View file

@ -195,6 +195,26 @@ class TestMySQL(Validator):
)
def test_mysql(self):
self.validate_all(
"SELECT a FROM tbl FOR UPDATE",
write={
"": "SELECT a FROM tbl",
"mysql": "SELECT a FROM tbl FOR UPDATE",
"oracle": "SELECT a FROM tbl FOR UPDATE",
"postgres": "SELECT a FROM tbl FOR UPDATE",
"tsql": "SELECT a FROM tbl FOR UPDATE",
},
)
self.validate_all(
"SELECT a FROM tbl FOR SHARE",
write={
"": "SELECT a FROM tbl",
"mysql": "SELECT a FROM tbl FOR SHARE",
"oracle": "SELECT a FROM tbl FOR SHARE",
"postgres": "SELECT a FROM tbl FOR SHARE",
"tsql": "SELECT a FROM tbl FOR SHARE",
},
)
self.validate_all(
"GROUP_CONCAT(DISTINCT x ORDER BY y DESC)",
write={

View file

@ -112,6 +112,22 @@ class TestPostgres(Validator):
self.validate_identity("x ~ 'y'")
self.validate_identity("x ~* 'y'")
self.validate_all(
"GENERATE_SERIES(a, b, ' 2 days ')",
write={
"postgres": "GENERATE_SERIES(a, b, INTERVAL '2' days)",
"presto": "SEQUENCE(a, b, INTERVAL '2' days)",
"trino": "SEQUENCE(a, b, INTERVAL '2' days)",
},
)
self.validate_all(
"GENERATE_SERIES('2019-01-01'::TIMESTAMP, NOW(), '1day')",
write={
"postgres": "GENERATE_SERIES(CAST('2019-01-01' AS TIMESTAMP), CURRENT_TIMESTAMP, INTERVAL '1' day)",
"presto": "SEQUENCE(CAST('2019-01-01' AS TIMESTAMP), CAST(CURRENT_TIMESTAMP AS TIMESTAMP), INTERVAL '1' day)",
"trino": "SEQUENCE(CAST('2019-01-01' AS TIMESTAMP), CAST(CURRENT_TIMESTAMP AS TIMESTAMP), INTERVAL '1' day)",
},
)
self.validate_all(
"END WORK AND NO CHAIN",
write={"postgres": "COMMIT AND NO CHAIN"},
@ -249,7 +265,7 @@ class TestPostgres(Validator):
)
self.validate_all(
"'[1,2,3]'::json->2",
write={"postgres": "CAST('[1,2,3]' AS JSON) -> '2'"},
write={"postgres": "CAST('[1,2,3]' AS JSON) -> 2"},
)
self.validate_all(
"""'{"a":1,"b":2}'::json->'b'""",
@ -265,7 +281,7 @@ class TestPostgres(Validator):
)
self.validate_all(
"""'[1,2,3]'::json->>2""",
write={"postgres": "CAST('[1,2,3]' AS JSON) ->> '2'"},
write={"postgres": "CAST('[1,2,3]' AS JSON) ->> 2"},
)
self.validate_all(
"""'{"a":1,"b":2}'::json->>'b'""",

View file

@ -111,7 +111,7 @@ class TestPresto(Validator):
"DATE_FORMAT(x, '%Y-%m-%d %H:%i:%S')",
write={
"duckdb": "STRFTIME(x, '%Y-%m-%d %H:%M:%S')",
"presto": "DATE_FORMAT(x, '%Y-%m-%d %H:%i:%S')",
"presto": "DATE_FORMAT(x, '%Y-%m-%d %T')",
"hive": "DATE_FORMAT(x, 'yyyy-MM-dd HH:mm:ss')",
"spark": "DATE_FORMAT(x, 'yyyy-MM-dd HH:mm:ss')",
},
@ -120,7 +120,7 @@ class TestPresto(Validator):
"DATE_PARSE(x, '%Y-%m-%d %H:%i:%S')",
write={
"duckdb": "STRPTIME(x, '%Y-%m-%d %H:%M:%S')",
"presto": "DATE_PARSE(x, '%Y-%m-%d %H:%i:%S')",
"presto": "DATE_PARSE(x, '%Y-%m-%d %T')",
"hive": "CAST(x AS TIMESTAMP)",
"spark": "TO_TIMESTAMP(x, 'yyyy-MM-dd HH:mm:ss')",
},
@ -134,6 +134,12 @@ class TestPresto(Validator):
"spark": "TO_TIMESTAMP(x, 'yyyy-MM-dd')",
},
)
self.validate_all(
"DATE_FORMAT(x, '%T')",
write={
"hive": "DATE_FORMAT(x, 'HH:mm:ss')",
},
)
self.validate_all(
"DATE_PARSE(SUBSTR(x, 1, 10), '%Y-%m-%d')",
write={
@ -146,7 +152,7 @@ class TestPresto(Validator):
self.validate_all(
"FROM_UNIXTIME(x)",
write={
"duckdb": "TO_TIMESTAMP(CAST(x AS BIGINT))",
"duckdb": "TO_TIMESTAMP(x)",
"presto": "FROM_UNIXTIME(x)",
"hive": "FROM_UNIXTIME(x)",
"spark": "FROM_UNIXTIME(x)",
@ -177,11 +183,51 @@ class TestPresto(Validator):
self.validate_all(
"NOW()",
write={
"presto": "CURRENT_TIMESTAMP()",
"presto": "CURRENT_TIMESTAMP",
"hive": "CURRENT_TIMESTAMP()",
},
)
self.validate_all(
"DAY_OF_WEEK(timestamp '2012-08-08 01:00')",
write={
"spark": "DAYOFWEEK(CAST('2012-08-08 01:00' AS TIMESTAMP))",
"presto": "DAY_OF_WEEK(CAST('2012-08-08 01:00' AS TIMESTAMP))",
},
)
self.validate_all(
"DAY_OF_MONTH(timestamp '2012-08-08 01:00')",
write={
"spark": "DAYOFMONTH(CAST('2012-08-08 01:00' AS TIMESTAMP))",
"presto": "DAY_OF_MONTH(CAST('2012-08-08 01:00' AS TIMESTAMP))",
},
)
self.validate_all(
"DAY_OF_YEAR(timestamp '2012-08-08 01:00')",
write={
"spark": "DAYOFYEAR(CAST('2012-08-08 01:00' AS TIMESTAMP))",
"presto": "DAY_OF_YEAR(CAST('2012-08-08 01:00' AS TIMESTAMP))",
},
)
self.validate_all(
"WEEK_OF_YEAR(timestamp '2012-08-08 01:00')",
write={
"spark": "WEEKOFYEAR(CAST('2012-08-08 01:00' AS TIMESTAMP))",
"presto": "WEEK_OF_YEAR(CAST('2012-08-08 01:00' AS TIMESTAMP))",
},
)
self.validate_all(
"SELECT timestamp '2012-10-31 00:00' AT TIME ZONE 'America/Sao_Paulo'",
write={
"spark": "SELECT FROM_UTC_TIMESTAMP(CAST('2012-10-31 00:00' AS TIMESTAMP), 'America/Sao_Paulo')",
"presto": "SELECT CAST('2012-10-31 00:00' AS TIMESTAMP) AT TIME ZONE 'America/Sao_Paulo'",
},
)
def test_ddl(self):
self.validate_all(
"CREATE TABLE test WITH (FORMAT = 'PARQUET') AS SELECT 1",
@ -314,6 +360,11 @@ class TestPresto(Validator):
def test_presto(self):
self.validate_identity("SELECT BOOL_OR(a > 10) FROM asd AS T(a)")
self.validate_identity("SELECT * FROM (VALUES (1))")
self.validate_identity("START TRANSACTION READ WRITE, ISOLATION LEVEL SERIALIZABLE")
self.validate_identity("START TRANSACTION ISOLATION LEVEL REPEATABLE READ")
self.validate_identity("APPROX_PERCENTILE(a, b, c, d)")
self.validate_all(
'SELECT a."b" FROM "foo"',
write={
@ -455,10 +506,6 @@ class TestPresto(Validator):
"spark": UnsupportedError,
},
)
self.validate_identity("SELECT * FROM (VALUES (1))")
self.validate_identity("START TRANSACTION READ WRITE, ISOLATION LEVEL SERIALIZABLE")
self.validate_identity("START TRANSACTION ISOLATION LEVEL REPEATABLE READ")
self.validate_identity("APPROX_PERCENTILE(a, b, c, d)")
def test_encode_decode(self):
self.validate_all(
@ -529,3 +576,27 @@ class TestPresto(Validator):
"presto": "FROM_HEX(x)",
},
)
def test_json(self):
self.validate_all(
"SELECT CAST(JSON '[1,23,456]' AS ARRAY(INTEGER))",
write={
"spark": "SELECT FROM_JSON('[1,23,456]', 'ARRAY<INT>')",
"presto": "SELECT CAST(CAST('[1,23,456]' AS JSON) AS ARRAY(INTEGER))",
},
)
self.validate_all(
"""SELECT CAST(JSON '{"k1":1,"k2":23,"k3":456}' AS MAP(VARCHAR, INTEGER))""",
write={
"spark": 'SELECT FROM_JSON(\'{"k1":1,"k2":23,"k3":456}\', \'MAP<STRING, INT>\')',
"presto": 'SELECT CAST(CAST(\'{"k1":1,"k2":23,"k3":456}\' AS JSON) AS MAP(VARCHAR, INTEGER))',
},
)
self.validate_all(
"SELECT CAST(ARRAY [1, 23, 456] AS JSON)",
write={
"spark": "SELECT TO_JSON(ARRAY(1, 23, 456))",
"presto": "SELECT CAST(ARRAY[1, 23, 456] AS JSON)",
},
)

View file

@ -212,6 +212,17 @@ TBLPROPERTIES (
self.validate_identity("TRIM(BOTH 'SL' FROM 'SSparkSQLS')")
self.validate_identity("TRIM(LEADING 'SL' FROM 'SSparkSQLS')")
self.validate_identity("TRIM(TRAILING 'SL' FROM 'SSparkSQLS')")
self.validate_all(
"AGGREGATE(my_arr, 0, (acc, x) -> acc + x, s -> s * 2)",
write={
"trino": "REDUCE(my_arr, 0, (acc, x) -> acc + x, s -> s * 2)",
"duckdb": "REDUCE(my_arr, 0, (acc, x) -> acc + x, s -> s * 2)",
"hive": "REDUCE(my_arr, 0, (acc, x) -> acc + x, s -> s * 2)",
"presto": "REDUCE(my_arr, 0, (acc, x) -> acc + x, s -> s * 2)",
"spark": "AGGREGATE(my_arr, 0, (acc, x) -> acc + x, s -> s * 2)",
},
)
self.validate_all(
"TRIM('SL', 'SSparkSQLS')", write={"spark": "TRIM('SL' FROM 'SSparkSQLS')"}
)

View file

@ -92,3 +92,9 @@ class TestSQLite(Validator):
"sqlite": "SELECT FIRST_VALUE(Name) OVER (PARTITION BY AlbumId ORDER BY Bytes DESC) AS LargestTrack FROM tracks"
},
)
def test_longvarchar_dtype(self):
self.validate_all(
"CREATE TABLE foo (bar LONGVARCHAR)",
write={"sqlite": "CREATE TABLE foo (bar TEXT)"},
)

View file

@ -21,3 +21,6 @@ class TestTeradata(Validator):
"mysql": "UPDATE A SET col2 = '' FROM schema.tableA AS A, (SELECT col1 FROM schema.tableA GROUP BY col1) AS B WHERE A.col1 = B.col1",
},
)
def test_create(self):
self.validate_identity("CREATE TABLE x (y INT) PRIMARY INDEX (y) PARTITION BY y INDEX (y)")

View file

@ -161,6 +161,7 @@ SELECT 1 FROM test
SELECT * FROM a, b, (SELECT 1) AS c
SELECT a FROM test
SELECT 1 AS filter
SELECT 1 AS "quoted alias"
SELECT SUM(x) AS filter
SELECT 1 AS range FROM test
SELECT 1 AS count FROM test
@ -264,7 +265,9 @@ SELECT a FROM test GROUP BY GROUPING SETS (x, ())
SELECT a FROM test GROUP BY GROUPING SETS (x, (x, y), (x, y, z), q)
SELECT a FROM test GROUP BY CUBE (x)
SELECT a FROM test GROUP BY ROLLUP (x)
SELECT a FROM test GROUP BY CUBE (x) ROLLUP (x, y, z)
SELECT t.a FROM test AS t GROUP BY ROLLUP (t.x)
SELECT a FROM test GROUP BY GROUPING SETS ((x, y)), ROLLUP (b)
SELECT a FROM test GROUP BY CUBE (x), ROLLUP (x, y, z)
SELECT CASE WHEN a < b THEN 1 WHEN a < c THEN 2 ELSE 3 END FROM test
SELECT CASE 1 WHEN 1 THEN 1 ELSE 2 END
SELECT CASE 1 WHEN 1 THEN MAP('a', 'b') ELSE MAP('b', 'c') END['a']
@ -339,7 +342,6 @@ SELECT CAST(a AS ARRAY<INT>) FROM test
SELECT CAST(a AS VARIANT) FROM test
SELECT TRY_CAST(a AS INT) FROM test
SELECT COALESCE(a, b, c) FROM test
SELECT IFNULL(a, b) FROM test
SELECT ANY_VALUE(a) FROM test
SELECT 1 FROM a JOIN b ON a.x = b.x
SELECT 1 FROM a JOIN b AS c ON a.x = b.x
@ -510,6 +512,14 @@ CREATE TABLE z (a INT UNIQUE AUTO_INCREMENT)
CREATE TABLE z (a INT REFERENCES parent(b, c))
CREATE TABLE z (a INT PRIMARY KEY, b INT REFERENCES foo(id))
CREATE TABLE z (a INT, FOREIGN KEY (a) REFERENCES parent(b, c))
CREATE TABLE foo (bar INT REFERENCES baz(baz_id) ON DELETE NO ACTION)
CREATE TABLE foo (bar INT REFERENCES baz(baz_id) ON DELETE CASCADE)
CREATE TABLE foo (bar INT REFERENCES baz(baz_id) ON DELETE SET NULL)
CREATE TABLE foo (bar INT REFERENCES baz(baz_id) ON DELETE SET DEFAULT)
CREATE TABLE foo (bar INT REFERENCES baz(baz_id) ON UPDATE NO ACTION)
CREATE TABLE foo (bar INT REFERENCES baz(baz_id) ON UPDATE CASCADE)
CREATE TABLE foo (bar INT REFERENCES baz(baz_id) ON UPDATE SET NULL)
CREATE TABLE foo (bar INT REFERENCES baz(baz_id) ON UPDATE SET DEFAULT)
CREATE TABLE asd AS SELECT asd FROM asd WITH NO DATA
CREATE TABLE asd AS SELECT asd FROM asd WITH DATA
CREATE TABLE products (x INT GENERATED BY DEFAULT AS IDENTITY)
@ -526,6 +536,7 @@ CREATE TABLE a, DUAL JOURNAL, DUAL AFTER JOURNAL, MERGEBLOCKRATIO=1 PERCENT, DAT
CREATE TABLE a, DUAL BEFORE JOURNAL, LOCAL AFTER JOURNAL, MAXIMUM DATABLOCKSIZE, BLOCKCOMPRESSION=AUTOTEMP(c1 INT) (a INT)
CREATE SET GLOBAL TEMPORARY TABLE a, NO BEFORE JOURNAL, NO AFTER JOURNAL, MINIMUM DATABLOCKSIZE, BLOCKCOMPRESSION=NEVER (a INT)
CREATE MULTISET VOLATILE TABLE a, NOT LOCAL AFTER JOURNAL, FREESPACE=1 PERCENT, DATABLOCKSIZE=10 BYTES, WITH NO CONCURRENT ISOLATED LOADING FOR ALL (a INT)
CREATE ALGORITHM=UNDEFINED DEFINER=foo@% SQL SECURITY DEFINER VIEW a AS (SELECT a FROM b)
CREATE TEMPORARY TABLE x AS SELECT a FROM d
CREATE TEMPORARY TABLE IF NOT EXISTS x AS SELECT a FROM d
CREATE VIEW x AS SELECT a FROM b
@ -555,6 +566,7 @@ CREATE UNIQUE INDEX IF NOT EXISTS my_idx ON tbl (a, b)
CREATE SCHEMA x
CREATE SCHEMA IF NOT EXISTS y
CREATE PROCEDURE IF NOT EXISTS a.b.c() AS 'DECLARE BEGIN; END'
CREATE OR REPLACE STAGE
DESCRIBE x
DROP INDEX a.b.c
DROP FUNCTION a.b.c (INT)

View file

@ -50,6 +50,10 @@ WITH cte AS (SELECT 1 AS x, 2 AS y) SELECT cte.x AS x, cte.y AS y FROM cte AS ct
(SELECT a FROM (SELECT b FROM x)) UNION (SELECT a FROM (SELECT b FROM y));
WITH cte AS (SELECT b FROM x), cte_2 AS (SELECT a FROM cte AS cte), cte_3 AS (SELECT b FROM y), cte_4 AS (SELECT a FROM cte_3 AS cte_3) (SELECT cte_2.a AS a FROM cte_2 AS cte_2) UNION (SELECT cte_4.a AS a FROM cte_4 AS cte_4);
-- Three unions
SELECT a FROM x UNION ALL SELECT a FROM y UNION ALL SELECT a FROM z;
WITH cte AS (SELECT a FROM x), cte_2 AS (SELECT a FROM y), cte_3 AS (SELECT a FROM z), cte_4 AS (SELECT cte_2.a AS a FROM cte_2 AS cte_2 UNION ALL SELECT cte_3.a AS a FROM cte_3 AS cte_3) SELECT cte.a AS a FROM cte AS cte UNION ALL SELECT cte_4.a AS a FROM cte_4 AS cte_4;
-- Subquery
SELECT a FROM x WHERE b = (SELECT y.c FROM y);
SELECT a FROM x WHERE b = (SELECT y.c FROM y);

View file

@ -99,7 +99,7 @@ WITH cte1 AS (
GROUPING SETS (
a,
(b, c)
)
),
CUBE (
y,
z

View file

@ -62,6 +62,16 @@ class TestBuild(unittest.TestCase):
lambda: select("x").from_("tbl").where("x > 0").where("x < 9", append=False),
"SELECT x FROM tbl WHERE x < 9",
),
(
lambda: select("x").from_("tbl").where("x > 0").lock(),
"SELECT x FROM tbl WHERE x > 0 FOR UPDATE",
"mysql",
),
(
lambda: select("x").from_("tbl").where("x > 0").lock(update=False),
"SELECT x FROM tbl WHERE x > 0 FOR SHARE",
"postgres",
),
(
lambda: select("x", "y").from_("tbl").group_by("x"),
"SELECT x, y FROM tbl GROUP BY x",

View file

@ -466,6 +466,7 @@ class TestExpressions(unittest.TestCase):
self.assertIsInstance(parse_one("BEGIN DEFERRED TRANSACTION"), exp.Transaction)
self.assertIsInstance(parse_one("COMMIT"), exp.Commit)
self.assertIsInstance(parse_one("ROLLBACK"), exp.Rollback)
self.assertIsInstance(parse_one("GENERATE_SERIES(a, b, c)"), exp.GenerateSeries)
def test_column(self):
dot = parse_one("a.b.c")
@ -630,6 +631,19 @@ FROM foo""",
FROM foo""",
)
def test_to_interval(self):
self.assertEqual(exp.to_interval("1day").sql(), "INTERVAL '1' day")
self.assertEqual(exp.to_interval(" 5 months").sql(), "INTERVAL '5' months")
with self.assertRaises(ValueError):
exp.to_interval("bla")
self.assertEqual(exp.to_interval(exp.Literal.string("1day")).sql(), "INTERVAL '1' day")
self.assertEqual(
exp.to_interval(exp.Literal.string(" 5 months")).sql(), "INTERVAL '5' months"
)
with self.assertRaises(ValueError):
exp.to_interval(exp.Literal.string("bla"))
def test_to_table(self):
table_only = exp.to_table("table_name")
self.assertEqual(table_only.name, "table_name")

View file

@ -326,12 +326,12 @@ FROM bar /* comment 5 */, tbl /* comment 6 */""",
self.validate("TIME_TO_UNIX(x)", "EPOCH(x)", write="duckdb")
self.validate(
"UNIX_TO_STR(123, 'y')",
"STRFTIME(TO_TIMESTAMP(CAST(123 AS BIGINT)), 'y')",
"STRFTIME(TO_TIMESTAMP(123), 'y')",
write="duckdb",
)
self.validate(
"UNIX_TO_TIME(123)",
"TO_TIMESTAMP(CAST(123 AS BIGINT))",
"TO_TIMESTAMP(123)",
write="duckdb",
)
@ -426,6 +426,9 @@ FROM bar /* comment 5 */, tbl /* comment 6 */""",
mock_logger.warning.assert_any_call("Applying array index offset (%s)", 1)
mock_logger.warning.assert_any_call("Applying array index offset (%s)", -1)
def test_identify_lambda(self):
self.validate("x(y -> y)", 'X("y" -> "y")', identify=True)
def test_identity(self):
self.assertEqual(transpile("")[0], "")
for sql in load_sql_fixtures("identity.sql"):