1
0
Fork 0

Merging upstream version 16.2.1.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-02-13 16:00:51 +01:00
parent c12f551e31
commit 718a80b164
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
106 changed files with 41940 additions and 40162 deletions

View file

@ -6,6 +6,9 @@ class TestBigQuery(Validator):
dialect = "bigquery"
def test_bigquery(self):
self.validate_identity("DATE(2016, 12, 25)")
self.validate_identity("DATE(CAST('2016-12-25 23:59:59' AS DATETIME))")
self.validate_identity("SELECT foo IN UNNEST(bar) AS bla")
self.validate_identity("SELECT * FROM x-0.a")
self.validate_identity("SELECT * FROM pivot CROSS JOIN foo")
self.validate_identity("SAFE_CAST(x AS STRING)")
@ -27,6 +30,9 @@ class TestBigQuery(Validator):
self.validate_identity("SELECT * FROM q UNPIVOT(values FOR quarter IN (b, c))")
self.validate_identity("""CREATE TABLE x (a STRUCT<values ARRAY<INT64>>)""")
self.validate_identity("""CREATE TABLE x (a STRUCT<b STRING OPTIONS (description='b')>)""")
self.validate_identity(
"DATE(CAST('2016-12-25 05:30:00+07' AS DATETIME), 'America/Los_Angeles')"
)
self.validate_identity(
"""CREATE TABLE x (a STRING OPTIONS (description='x')) OPTIONS (table_expiration_days=1)"""
)
@ -37,6 +43,19 @@ class TestBigQuery(Validator):
"CREATE TABLE IF NOT EXISTS foo AS SELECT * FROM bla EXCEPT DISTINCT (SELECT * FROM bar) LIMIT 0"
)
self.validate_all("SELECT SPLIT(foo)", write={"bigquery": "SELECT SPLIT(foo, ',')"})
self.validate_all(
"cast(x as date format 'MM/DD/YYYY')",
write={
"bigquery": "PARSE_DATE('%m/%d/%Y', x)",
},
)
self.validate_all(
"cast(x as time format 'YYYY.MM.DD HH:MI:SSTZH')",
write={
"bigquery": "PARSE_TIMESTAMP('%Y.%m.%d %I:%M:%S%z', x)",
},
)
self.validate_all("SELECT 1 AS hash", write={"bigquery": "SELECT 1 AS `hash`"})
self.validate_all('x <> ""', write={"bigquery": "x <> ''"})
self.validate_all('x <> """"""', write={"bigquery": "x <> ''"})
@ -55,11 +74,12 @@ class TestBigQuery(Validator):
"SELECT * FROM `my-project.my-dataset.my-table`",
write={"bigquery": "SELECT * FROM `my-project`.`my-dataset`.`my-table`"},
)
self.validate_all("CAST(x AS DATETIME)", read={"": "x::timestamp"})
self.validate_identity("CAST(x AS TIMESTAMP)")
self.validate_all("LEAST(x, y)", read={"sqlite": "MIN(x, y)"})
self.validate_all("CAST(x AS CHAR)", write={"bigquery": "CAST(x AS STRING)"})
self.validate_all("CAST(x AS NCHAR)", write={"bigquery": "CAST(x AS STRING)"})
self.validate_all("CAST(x AS NVARCHAR)", write={"bigquery": "CAST(x AS STRING)"})
self.validate_all("CAST(x AS TIMESTAMP)", write={"bigquery": "CAST(x AS DATETIME)"})
self.validate_all("CAST(x AS TIMESTAMPTZ)", write={"bigquery": "CAST(x AS TIMESTAMP)"})
self.validate_all("CAST(x AS RECORD)", write={"bigquery": "CAST(x AS STRUCT)"})
self.validate_all(
@ -418,9 +438,11 @@ class TestBigQuery(Validator):
self.validate_all(
"SELECT REGEXP_EXTRACT(abc, 'pattern(group)') FROM table",
write={
"bigquery": "SELECT REGEXP_EXTRACT(abc, 'pattern(group)') FROM table",
"duckdb": "SELECT REGEXP_EXTRACT(abc, 'pattern(group)', 1) FROM table",
},
)
self.validate_identity("REGEXP_EXTRACT(`foo`, 'bar: (.+?)', 1, 1)")
self.validate_identity("BEGIN A B C D E F")
self.validate_identity("BEGIN TRANSACTION")
self.validate_identity("COMMIT TRANSACTION")

View file

@ -45,7 +45,21 @@ class TestClickhouse(Validator):
self.validate_identity(
"CREATE MATERIALIZED VIEW test_view ON CLUSTER cl1 (id UInt8) ENGINE=AggregatingMergeTree() ORDER BY tuple() AS SELECT * FROM test_data"
)
self.validate_identity(
"CREATE MATERIALIZED VIEW test_view ON CLUSTER cl1 (id UInt8) TO table1 AS SELECT * FROM test_data"
)
self.validate_identity(
"CREATE MATERIALIZED VIEW test_view (id UInt8) TO db.table1 AS SELECT * FROM test_data"
)
self.validate_all(
"CONCAT(CASE WHEN COALESCE(a, '') IS NULL THEN COALESCE(a, '') ELSE CAST(COALESCE(a, '') AS TEXT) END, CASE WHEN COALESCE(b, '') IS NULL THEN COALESCE(b, '') ELSE CAST(COALESCE(b, '') AS TEXT) END)",
read={"postgres": "CONCAT(a, b)"},
)
self.validate_all(
"CONCAT(CASE WHEN a IS NULL THEN a ELSE CAST(a AS TEXT) END, CASE WHEN b IS NULL THEN b ELSE CAST(b AS TEXT) END)",
read={"mysql": "CONCAT(a, b)"},
)
self.validate_all(
r"'Enum8(\'Sunday\' = 0)'", write={"clickhouse": "'Enum8(''Sunday'' = 0)'"}
)

View file

@ -1084,6 +1084,14 @@ class TestDialect(Validator):
self.validate_identity("some.column LIKE 'foo' + another.column + 'bar'")
self.validate_all("LIKE(x, 'z')", write={"": "'z' LIKE x"})
self.validate_all(
"CONCAT(a, b, c)",
write={
"": "CONCAT(a, b, c)",
"redshift": "a || b || c",
"sqlite": "a || b || c",
},
)
self.validate_all(
"x ILIKE '%y'",
read={
@ -1177,10 +1185,21 @@ class TestDialect(Validator):
self.validate_all(
"CONCAT(a)",
write={
"mysql": "a",
"clickhouse": "a",
"presto": "a",
"trino": "a",
"tsql": "a",
},
)
self.validate_all(
"COALESCE(a, '')",
read={
"drill": "CONCAT(a)",
"duckdb": "CONCAT(a)",
"postgres": "CONCAT(a)",
"tsql": "CONCAT(a)",
},
)
self.validate_all(
"IF(x > 1, 1, 0)",
write={
@ -1276,7 +1295,7 @@ class TestDialect(Validator):
def test_limit(self):
self.validate_all(
"SELECT * FROM data LIMIT 10, 20",
write={"sqlite": "SELECT * FROM data LIMIT 10 OFFSET 20"},
write={"sqlite": "SELECT * FROM data LIMIT 20 OFFSET 10"},
)
self.validate_all(
"SELECT x FROM y LIMIT 10",

View file

@ -9,6 +9,20 @@ class TestDuckDB(Validator):
self.validate_identity("SELECT CURRENT_DATE")
self.validate_identity("SELECT CURRENT_TIMESTAMP")
self.validate_all(
"SELECT CAST('2020-01-01' AS DATE) + INTERVAL (-1) DAY",
read={"mysql": "SELECT DATE '2020-01-01' + INTERVAL -1 DAY"},
)
self.validate_all(
"SELECT INTERVAL '1 quarter'",
write={"duckdb": "SELECT (90 * INTERVAL '1' day)"},
)
self.validate_all(
"SELECT ((DATE_TRUNC('DAY', CAST(CAST(DATE_TRUNC('DAY', CURRENT_TIMESTAMP) AS DATE) AS TIMESTAMP) + INTERVAL (0 - MOD((DAYOFWEEK(CAST(CAST(DATE_TRUNC('DAY', CURRENT_TIMESTAMP) AS DATE) AS TIMESTAMP)) % 7) - 1 + 7, 7)) day) + (7 * INTERVAL (-5) day))) AS t1",
read={
"presto": "SELECT ((DATE_ADD('week', -5, DATE_TRUNC('DAY', DATE_ADD('day', (0 - MOD((DAY_OF_WEEK(CAST(CAST(DATE_TRUNC('DAY', NOW()) AS DATE) AS TIMESTAMP)) % 7) - 1 + 7, 7)), CAST(CAST(DATE_TRUNC('DAY', NOW()) AS DATE) AS TIMESTAMP)))))) AS t1",
},
)
self.validate_all(
"EPOCH(x)",
read={
@ -51,7 +65,7 @@ class TestDuckDB(Validator):
self.validate_all(
"STRPTIME(x, '%y-%-m')",
write={
"bigquery": "PARSE_TIMESTAMP('%y-%m', x)",
"bigquery": "PARSE_TIMESTAMP('%y-%-m', x)",
"duckdb": "STRPTIME(x, '%y-%-m')",
"presto": "DATE_PARSE(x, '%y-%c')",
"hive": "CAST(FROM_UNIXTIME(UNIX_TIMESTAMP(x, 'yy-M')) AS TIMESTAMP)",
@ -70,7 +84,7 @@ class TestDuckDB(Validator):
self.validate_all(
"STRPTIME(x, '%-m/%-d/%y %-I:%M %p')",
write={
"bigquery": "PARSE_TIMESTAMP('%m/%d/%y %I:%M %p', x)",
"bigquery": "PARSE_TIMESTAMP('%-m/%-d/%y %-I:%M %p', x)",
"duckdb": "STRPTIME(x, '%-m/%-d/%y %-I:%M %p')",
"presto": "DATE_PARSE(x, '%c/%e/%y %l:%i %p')",
"hive": "CAST(FROM_UNIXTIME(UNIX_TIMESTAMP(x, 'M/d/yy h:mm a')) AS TIMESTAMP)",

View file

@ -45,6 +45,8 @@ class TestMySQL(Validator):
)
def test_identity(self):
self.validate_identity("CAST(x AS ENUM('a', 'b'))")
self.validate_identity("CAST(x AS SET('a', 'b'))")
self.validate_identity("SELECT CURRENT_TIMESTAMP(6)")
self.validate_identity("x ->> '$.name'")
self.validate_identity("SELECT CAST(`a`.`b` AS INT) FROM foo")

View file

@ -26,8 +26,8 @@ class TestOracle(Validator):
self.validate_all(
"NVL(NULL, 1)",
write={
"": "COALESCE(NULL, 1)",
"oracle": "NVL(NULL, 1)",
"": "IFNULL(NULL, 1)",
},
)
self.validate_all(

View file

@ -1,3 +1,5 @@
from unittest import mock
from sqlglot import ParseError, exp, parse_one, transpile
from tests.dialects.test_dialect import Validator
@ -85,6 +87,39 @@ class TestPostgres(Validator):
read="postgres",
)
def test_unnest(self):
self.validate_identity(
"SELECT * FROM UNNEST(ARRAY[1, 2], ARRAY['foo', 'bar', 'baz']) AS x(a, b)"
)
self.validate_all(
"SELECT UNNEST(c) FROM t",
write={
"hive": "SELECT EXPLODE(c) FROM t",
"postgres": "SELECT UNNEST(c) FROM t",
"presto": "SELECT col FROM t CROSS JOIN UNNEST(c) AS _u(col)",
},
)
self.validate_all(
"SELECT UNNEST(ARRAY[1])",
write={
"hive": "SELECT EXPLODE(ARRAY(1))",
"postgres": "SELECT UNNEST(ARRAY[1])",
"presto": "SELECT col FROM UNNEST(ARRAY[1]) AS _u(col)",
},
)
@mock.patch("sqlglot.helper.logger")
def test_array_offset(self, mock_logger):
self.validate_all(
"SELECT col[1]",
write={
"hive": "SELECT col[0]",
"postgres": "SELECT col[1]",
"presto": "SELECT col[1]",
},
)
def test_postgres(self):
self.validate_identity("CAST(x AS INT4RANGE)")
self.validate_identity("CAST(x AS INT4MULTIRANGE)")
@ -540,3 +575,24 @@ class TestPostgres(Validator):
"SELECT a, LOGICAL_OR(b) FROM table GROUP BY a",
write={"postgres": "SELECT a, BOOL_OR(b) FROM table GROUP BY a"},
)
def test_string_concat(self):
self.validate_all(
"CONCAT(a, b)",
write={
"": "CONCAT(COALESCE(a, ''), COALESCE(b, ''))",
"duckdb": "CONCAT(COALESCE(a, ''), COALESCE(b, ''))",
"postgres": "CONCAT(COALESCE(a, ''), COALESCE(b, ''))",
"presto": "CONCAT(CAST(COALESCE(a, '') AS VARCHAR), CAST(COALESCE(b, '') AS VARCHAR))",
},
)
self.validate_all(
"a || b",
write={
"": "a || b",
"clickhouse": "CONCAT(CAST(a AS TEXT), CAST(b AS TEXT))",
"duckdb": "a || b",
"postgres": "a || b",
"presto": "CONCAT(CAST(a AS VARCHAR), CAST(b AS VARCHAR))",
},
)

View file

@ -440,6 +440,8 @@ class TestPresto(Validator):
)
def test_presto(self):
self.validate_identity("SELECT * FROM x OFFSET 1 LIMIT 1")
self.validate_identity("SELECT * FROM x OFFSET 1 FETCH FIRST 1 ROWS ONLY")
self.validate_identity("SELECT BOOL_OR(a > 10) FROM asd AS T(a)")
self.validate_identity("SELECT * FROM (VALUES (1))")
self.validate_identity("START TRANSACTION READ WRITE, ISOLATION LEVEL SERIALIZABLE")

View file

@ -30,6 +30,10 @@ class TestSnowflake(Validator):
self.validate_identity("SELECT CONVERT_TIMEZONE('UTC', 'America/Los_Angeles', col)")
self.validate_all("CAST(x AS CHAR VARYING)", write={"snowflake": "CAST(x AS VARCHAR)"})
self.validate_all(
"SELECT * FROM (VALUES (0) foo(bar))",
write={"snowflake": "SELECT * FROM (VALUES (0)) AS foo(bar)"},
)
self.validate_all("CAST(x AS CHARACTER VARYING)", write={"snowflake": "CAST(x AS VARCHAR)"})
self.validate_all("CAST(x AS NCHAR VARYING)", write={"snowflake": "CAST(x AS VARCHAR)"})
self.validate_all(
@ -274,8 +278,8 @@ class TestSnowflake(Validator):
"SELECT TO_TIMESTAMP('2013-04-05 01:02:03')",
write={
"bigquery": "SELECT PARSE_TIMESTAMP('%Y-%m-%d %H:%M:%S', '2013-04-05 01:02:03')",
"snowflake": "SELECT TO_TIMESTAMP('2013-04-05 01:02:03', 'yyyy-mm-dd hh24:mi:ss')",
"spark": "SELECT TO_TIMESTAMP('2013-04-05 01:02:03', 'yyyy-MM-d HH:mm:ss')",
"snowflake": "SELECT TO_TIMESTAMP('2013-04-05 01:02:03', 'yyyy-mm-DD hh24:mi:ss')",
"spark": "SELECT TO_TIMESTAMP('2013-04-05 01:02:03', 'yyyy-MM-dd HH:mm:ss')",
},
)
self.validate_all(

View file

@ -138,11 +138,15 @@ class TestTeradata(Validator):
def test_cast(self):
self.validate_all(
"CAST('1992-01' AS DATE FORMAT 'YYYY-DD')",
read={
"bigquery": "CAST('1992-01' AS DATE FORMAT 'YYYY-DD')",
},
write={
"teradata": "CAST('1992-01' AS DATE FORMAT 'YYYY-DD')",
"databricks": "DATE_FORMAT('1992-01', 'YYYY-DD')",
"mysql": "DATE_FORMAT('1992-01', 'YYYY-DD')",
"spark": "DATE_FORMAT('1992-01', 'YYYY-DD')",
"": "TIME_TO_STR('1992-01', 'YYYY-DD')",
"bigquery": "PARSE_DATE('%Y-%d', '1992-01')",
"databricks": "TO_DATE('1992-01', 'yyyy-dd')",
"mysql": "STR_TO_DATE('1992-01', '%Y-%d')",
"spark": "TO_DATE('1992-01', 'yyyy-dd')",
"": "STR_TO_DATE('1992-01', '%Y-%d')",
},
)