Adding upstream version 26.8.0.
Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
parent
4b797b16f0
commit
4c394df415
61 changed files with 43883 additions and 41898 deletions
|
@ -865,6 +865,7 @@ LANGUAGE js AS
|
|||
"presto": "SHA256(x)",
|
||||
"trino": "SHA256(x)",
|
||||
"postgres": "SHA256(x)",
|
||||
"duckdb": "SHA256(x)",
|
||||
},
|
||||
write={
|
||||
"bigquery": "SHA256(x)",
|
||||
|
@ -875,6 +876,7 @@ LANGUAGE js AS
|
|||
"redshift": "SHA2(x, 256)",
|
||||
"trino": "SHA256(x)",
|
||||
"duckdb": "SHA256(x)",
|
||||
"snowflake": "SHA2(x, 256)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
|
|
|
@ -29,6 +29,10 @@ class TestClickhouse(Validator):
|
|||
self.assertEqual(expr.sql(dialect="clickhouse"), "COUNT(x)")
|
||||
self.assertIsNone(expr._meta)
|
||||
|
||||
self.validate_identity("SELECT 1 OR (1 = 2)")
|
||||
self.validate_identity("SELECT 1 AND (1 = 2)")
|
||||
self.validate_identity("SELECT json.a.:Int64")
|
||||
self.validate_identity("SELECT json.a.:JSON.b.:Int64")
|
||||
self.validate_identity("WITH arrayJoin([(1, [2, 3])]) AS arr SELECT arr")
|
||||
self.validate_identity("CAST(1 AS Bool)")
|
||||
self.validate_identity("SELECT toString(CHAR(104.1, 101, 108.9, 108.9, 111, 32))")
|
||||
|
@ -85,6 +89,7 @@ class TestClickhouse(Validator):
|
|||
self.validate_identity("SELECT exponentialTimeDecayedAvg(60)(a, b)")
|
||||
self.validate_identity("levenshteinDistance(col1, col2)", "editDistance(col1, col2)")
|
||||
self.validate_identity("SELECT * FROM foo WHERE x GLOBAL IN (SELECT * FROM bar)")
|
||||
self.validate_identity("SELECT * FROM foo WHERE x GLOBAL NOT IN (SELECT * FROM bar)")
|
||||
self.validate_identity("POSITION(haystack, needle)")
|
||||
self.validate_identity("POSITION(haystack, needle, position)")
|
||||
self.validate_identity("CAST(x AS DATETIME)", "CAST(x AS DateTime)")
|
||||
|
@ -158,6 +163,21 @@ class TestClickhouse(Validator):
|
|||
self.validate_identity(
|
||||
"CREATE TABLE t (foo String CODEC(LZ4HC(9), ZSTD, DELTA), size String ALIAS formatReadableSize(size_bytes), INDEX idx1 a TYPE bloom_filter(0.001) GRANULARITY 1, INDEX idx2 a TYPE set(100) GRANULARITY 2, INDEX idx3 a TYPE minmax GRANULARITY 3)"
|
||||
)
|
||||
self.validate_identity(
|
||||
"SELECT generate_series FROM generate_series(0, 10) AS g(x)",
|
||||
)
|
||||
self.validate_identity(
|
||||
"SELECT and(1, 2)",
|
||||
"SELECT 1 AND 2",
|
||||
)
|
||||
self.validate_identity(
|
||||
"SELECT or(1, 2)",
|
||||
"SELECT 1 OR 2",
|
||||
)
|
||||
self.validate_identity(
|
||||
"SELECT generate_series FROM generate_series(0, 10) AS g",
|
||||
"SELECT generate_series FROM generate_series(0, 10) AS g(generate_series)",
|
||||
)
|
||||
self.validate_identity(
|
||||
"INSERT INTO tab VALUES ({'key1': 1, 'key2': 10}), ({'key1': 2, 'key2': 20}), ({'key1': 3, 'key2': 30})",
|
||||
"INSERT INTO tab VALUES (map('key1', 1, 'key2', 10)), (map('key1', 2, 'key2', 20)), (map('key1', 3, 'key2', 30))",
|
||||
|
@ -179,6 +199,13 @@ class TestClickhouse(Validator):
|
|||
"SELECT SUM(1) AS impressions FROM (SELECT ['Istanbul', 'Berlin', 'Bobruisk'] AS cities) WHERE arrayJoin(cities) IN ('Istanbul', 'Berlin')",
|
||||
)
|
||||
|
||||
self.validate_all(
|
||||
"SELECT CAST(STR_TO_DATE(SUBSTRING(a.eta, 1, 10), '%Y-%m-%d') AS Nullable(DATE))",
|
||||
read={
|
||||
"clickhouse": "SELECT CAST(STR_TO_DATE(SUBSTRING(a.eta, 1, 10), '%Y-%m-%d') AS Nullable(DATE))",
|
||||
"oracle": "SELECT to_date(substr(a.eta, 1,10), 'YYYY-MM-DD')",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"CHAR(67) || CHAR(65) || CHAR(84)",
|
||||
read={
|
||||
|
@ -201,13 +228,13 @@ class TestClickhouse(Validator):
|
|||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT CAST(STR_TO_DATE('05 12 2000', '%d %m %Y') AS DATE)",
|
||||
"SELECT CAST(STR_TO_DATE('05 12 2000', '%d %m %Y') AS Nullable(DATE))",
|
||||
read={
|
||||
"clickhouse": "SELECT CAST(STR_TO_DATE('05 12 2000', '%d %m %Y') AS DATE)",
|
||||
"clickhouse": "SELECT CAST(STR_TO_DATE('05 12 2000', '%d %m %Y') AS Nullable(DATE))",
|
||||
"postgres": "SELECT TO_DATE('05 12 2000', 'DD MM YYYY')",
|
||||
},
|
||||
write={
|
||||
"clickhouse": "SELECT CAST(STR_TO_DATE('05 12 2000', '%d %m %Y') AS DATE)",
|
||||
"clickhouse": "SELECT CAST(STR_TO_DATE('05 12 2000', '%d %m %Y') AS Nullable(DATE))",
|
||||
"postgres": "SELECT CAST(CAST(TO_DATE('05 12 2000', 'DD MM YYYY') AS TIMESTAMP) AS DATE)",
|
||||
},
|
||||
)
|
||||
|
@ -226,9 +253,9 @@ class TestClickhouse(Validator):
|
|||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT a, b FROM (SELECT * FROM x) AS t",
|
||||
"SELECT a, b FROM (SELECT * FROM x) AS t(a, b)",
|
||||
read={
|
||||
"clickhouse": "SELECT a, b FROM (SELECT * FROM x) AS t",
|
||||
"clickhouse": "SELECT a, b FROM (SELECT * FROM x) AS t(a, b)",
|
||||
"duckdb": "SELECT a, b FROM (SELECT * FROM x) AS t(a, b)",
|
||||
},
|
||||
)
|
||||
|
@ -557,6 +584,7 @@ class TestClickhouse(Validator):
|
|||
self.validate_identity(
|
||||
"SELECT COUNT(1) FROM table SETTINGS additional_table_filters = {'a': 'b', 'c': 'd'}"
|
||||
)
|
||||
self.validate_identity("SELECT arrayConcat([1, 2], [3, 4])")
|
||||
|
||||
def test_clickhouse_values(self):
|
||||
values = exp.select("*").from_(
|
||||
|
@ -682,6 +710,33 @@ class TestClickhouse(Validator):
|
|||
with self.subTest(f"Casting to ClickHouse {data_type}"):
|
||||
self.validate_identity(f"SELECT CAST(val AS {data_type})")
|
||||
|
||||
def test_aggregate_function_column_with_any_keyword(self):
|
||||
# Regression test for https://github.com/tobymao/sqlglot/issues/4723
|
||||
self.validate_all(
|
||||
"""
|
||||
CREATE TABLE my_db.my_table
|
||||
(
|
||||
someId UUID,
|
||||
aggregatedColumn AggregateFunction(any, String),
|
||||
aggregatedColumnWithParams AggregateFunction(any(somecolumn), String),
|
||||
)
|
||||
ENGINE = AggregatingMergeTree()
|
||||
ORDER BY (someId)
|
||||
""",
|
||||
write={
|
||||
"clickhouse": """CREATE TABLE my_db.my_table (
|
||||
someId UUID,
|
||||
aggregatedColumn AggregateFunction(any, String),
|
||||
aggregatedColumnWithParams AggregateFunction(any(somecolumn), String)
|
||||
)
|
||||
ENGINE=AggregatingMergeTree()
|
||||
ORDER BY (
|
||||
someId
|
||||
)""",
|
||||
},
|
||||
pretty=True,
|
||||
)
|
||||
|
||||
def test_ddl(self):
|
||||
db_table_expr = exp.Table(this=None, db=exp.to_identifier("foo"), catalog=None)
|
||||
create_with_cluster = exp.Create(
|
||||
|
@ -1061,13 +1116,15 @@ LIFETIME(MIN 0 MAX 0)""",
|
|||
CREATE TABLE t (
|
||||
a AggregateFunction(quantiles(0.5, 0.9), UInt64),
|
||||
b AggregateFunction(quantiles, UInt64),
|
||||
c SimpleAggregateFunction(sum, Float64)
|
||||
c SimpleAggregateFunction(sum, Float64),
|
||||
d AggregateFunction(count)
|
||||
)""",
|
||||
write={
|
||||
"clickhouse": """CREATE TABLE t (
|
||||
a AggregateFunction(quantiles(0.5, 0.9), UInt64),
|
||||
b AggregateFunction(quantiles, UInt64),
|
||||
c SimpleAggregateFunction(sum, Float64)
|
||||
c SimpleAggregateFunction(sum, Float64),
|
||||
d AggregateFunction(count)
|
||||
)"""
|
||||
},
|
||||
pretty=True,
|
||||
|
|
|
@ -168,6 +168,18 @@ class TestDialect(Validator):
|
|||
self.assertFalse(snowflake_class in {"bigquery", "redshift"})
|
||||
self.assertFalse(snowflake_object in {"bigquery", "redshift"})
|
||||
|
||||
def test_compare_dialect_versions(self):
|
||||
ddb_v1 = Dialect.get_or_raise("duckdb, version=1.0")
|
||||
ddb_v1_2 = Dialect.get_or_raise("duckdb, foo=bar, version=1.0")
|
||||
ddb_v2 = Dialect.get_or_raise("duckdb, version=2.2.4")
|
||||
ddb_latest = Dialect.get_or_raise("duckdb")
|
||||
|
||||
self.assertTrue(ddb_latest.version > ddb_v2.version)
|
||||
self.assertTrue(ddb_v1.version < ddb_v2.version)
|
||||
|
||||
self.assertTrue(ddb_v1.version == ddb_v1_2.version)
|
||||
self.assertTrue(ddb_latest.version == Dialect.get_or_raise("duckdb").version)
|
||||
|
||||
def test_cast(self):
|
||||
self.validate_all(
|
||||
"CAST(a AS TEXT)",
|
||||
|
@ -3015,7 +3027,7 @@ FROM subquery2""",
|
|||
"databricks": "SELECT * FROM EXPLODE(SEQUENCE(CAST('2020-01-01' AS DATE), CAST('2020-02-01' AS DATE), INTERVAL '1' WEEK))",
|
||||
"duckdb": "SELECT * FROM UNNEST(CAST(GENERATE_SERIES(CAST('2020-01-01' AS DATE), CAST('2020-02-01' AS DATE), (7 * INTERVAL '1' DAY)) AS DATE[]))",
|
||||
"mysql": "WITH RECURSIVE _generated_dates(date_value) AS (SELECT CAST('2020-01-01' AS DATE) AS date_value UNION ALL SELECT CAST(DATE_ADD(date_value, INTERVAL 1 WEEK) AS DATE) FROM _generated_dates WHERE CAST(DATE_ADD(date_value, INTERVAL 1 WEEK) AS DATE) <= CAST('2020-02-01' AS DATE)) SELECT * FROM (SELECT date_value FROM _generated_dates) AS _generated_dates",
|
||||
"postgres": "SELECT * FROM (SELECT CAST(value AS DATE) FROM GENERATE_SERIES(CAST('2020-01-01' AS DATE), CAST('2020-02-01' AS DATE), INTERVAL '1 WEEK') AS value) AS _unnested_generate_series",
|
||||
"postgres": "SELECT * FROM (SELECT CAST(value AS DATE) FROM GENERATE_SERIES(CAST('2020-01-01' AS DATE), CAST('2020-02-01' AS DATE), INTERVAL '1 WEEK') AS _t(value)) AS _unnested_generate_series",
|
||||
"presto": "SELECT * FROM UNNEST(SEQUENCE(CAST('2020-01-01' AS DATE), CAST('2020-02-01' AS DATE), (1 * INTERVAL '7' DAY)))",
|
||||
"redshift": "WITH RECURSIVE _generated_dates(date_value) AS (SELECT CAST('2020-01-01' AS DATE) AS date_value UNION ALL SELECT CAST(DATEADD(WEEK, 1, date_value) AS DATE) FROM _generated_dates WHERE CAST(DATEADD(WEEK, 1, date_value) AS DATE) <= CAST('2020-02-01' AS DATE)) SELECT * FROM (SELECT date_value FROM _generated_dates) AS _generated_dates",
|
||||
"snowflake": "SELECT * FROM (SELECT DATEADD(WEEK, CAST(value AS INT), CAST('2020-01-01' AS DATE)) AS value FROM TABLE(FLATTEN(INPUT => ARRAY_GENERATE_RANGE(0, (DATEDIFF(WEEK, CAST('2020-01-01' AS DATE), CAST('2020-02-01' AS DATE)) + 1 - 1) + 1))) AS _u(seq, key, path, index, value, this))",
|
||||
|
@ -3335,3 +3347,26 @@ FROM subquery2""",
|
|||
"tsql": "SCHEMA_NAME()",
|
||||
},
|
||||
)
|
||||
|
||||
def test_integer_hex_strings(self):
|
||||
# Hex strings such as 0xCC represent INTEGER values in the read dialects
|
||||
integer_dialects = ("bigquery", "clickhouse")
|
||||
for read_dialect in integer_dialects:
|
||||
for write_dialect in (
|
||||
"",
|
||||
"duckdb",
|
||||
"databricks",
|
||||
"snowflake",
|
||||
"spark",
|
||||
"redshift",
|
||||
):
|
||||
with self.subTest(f"Testing hex string -> INTEGER evaluation for {read_dialect}"):
|
||||
self.assertEqual(
|
||||
parse_one("SELECT 0xCC", read=read_dialect).sql(write_dialect), "SELECT 204"
|
||||
)
|
||||
|
||||
for other_integer_dialects in integer_dialects:
|
||||
self.assertEqual(
|
||||
parse_one("SELECT 0xCC", read=read_dialect).sql(other_integer_dialects),
|
||||
"SELECT 0xCC",
|
||||
)
|
||||
|
|
|
@ -314,6 +314,10 @@ class TestDuckDB(Validator):
|
|||
self.validate_identity(
|
||||
"""SELECT '{ "family": "anatidae", "species": [ "duck", "goose", "swan", null ] }' ->> ['$.family', '$.species']""",
|
||||
)
|
||||
self.validate_identity(
|
||||
"SELECT 20_000 AS literal",
|
||||
"SELECT 20000 AS literal",
|
||||
)
|
||||
self.validate_identity(
|
||||
"""SELECT JSON_EXTRACT_STRING('{ "family": "anatidae", "species": [ "duck", "goose", "swan", null ] }', ['$.family', '$.species'])""",
|
||||
"""SELECT '{ "family": "anatidae", "species": [ "duck", "goose", "swan", null ] }' ->> ['$.family', '$.species']""",
|
||||
|
@ -403,6 +407,14 @@ class TestDuckDB(Validator):
|
|||
self.validate_all("0x1010", write={"": "0 AS x1010"})
|
||||
self.validate_all("x ~ y", write={"duckdb": "REGEXP_MATCHES(x, y)"})
|
||||
self.validate_all("SELECT * FROM 'x.y'", write={"duckdb": 'SELECT * FROM "x.y"'})
|
||||
self.validate_all(
|
||||
"COUNT_IF(x)",
|
||||
write={
|
||||
"duckdb": "COUNT_IF(x)",
|
||||
"duckdb, version=1.0": "SUM(CASE WHEN x THEN 1 ELSE 0 END)",
|
||||
"duckdb, version=1.2": "COUNT_IF(x)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT STRFTIME(CAST('2020-01-01' AS TIMESTAMP), CONCAT('%Y', '%m'))",
|
||||
write={
|
||||
|
@ -410,6 +422,13 @@ class TestDuckDB(Validator):
|
|||
"tsql": "SELECT FORMAT(CAST('2020-01-01' AS DATETIME2), CONCAT('yyyy', 'MM'))",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"""SELECT CAST('{"x": 1}' AS JSON)""",
|
||||
read={
|
||||
"duckdb": """SELECT '{"x": 1}'::JSON""",
|
||||
"postgres": """SELECT '{"x": 1}'::JSONB""",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT * FROM produce PIVOT(SUM(sales) FOR quarter IN ('Q1', 'Q2'))",
|
||||
read={
|
||||
|
@ -910,6 +929,10 @@ class TestDuckDB(Validator):
|
|||
"postgres": "SELECT 'ThOmAs' ~* 'thomas'",
|
||||
},
|
||||
)
|
||||
self.validate_identity(
|
||||
"SELECT DATE_ADD(CAST('2020-01-01' AS DATE), INTERVAL 1 DAY)",
|
||||
"SELECT CAST('2020-01-01' AS DATE) + INTERVAL '1' DAY",
|
||||
)
|
||||
|
||||
def test_array_index(self):
|
||||
with self.assertLogs(helper_logger) as cm:
|
||||
|
@ -929,15 +952,15 @@ class TestDuckDB(Validator):
|
|||
)
|
||||
self.validate_identity(
|
||||
"""SELECT LIST_VALUE(1)[i]""",
|
||||
"""SELECT ([1])[i]""",
|
||||
"""SELECT [1][i]""",
|
||||
)
|
||||
self.validate_identity(
|
||||
"""{'x': LIST_VALUE(1)[i]}""",
|
||||
"""{'x': ([1])[i]}""",
|
||||
"""{'x': [1][i]}""",
|
||||
)
|
||||
self.validate_identity(
|
||||
"""SELECT LIST_APPLY(RANGE(1, 4), i -> {'f1': LIST_VALUE(1, 2, 3)[i], 'f2': LIST_VALUE(1, 2, 3)[i]})""",
|
||||
"""SELECT LIST_APPLY(RANGE(1, 4), i -> {'f1': ([1, 2, 3])[i], 'f2': ([1, 2, 3])[i]})""",
|
||||
"""SELECT LIST_APPLY(RANGE(1, 4), i -> {'f1': [1, 2, 3][i], 'f2': [1, 2, 3][i]})""",
|
||||
)
|
||||
|
||||
self.assertEqual(
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
from sqlglot import exp
|
||||
from tests.dialects.test_dialect import Validator
|
||||
|
||||
|
||||
|
@ -8,16 +9,28 @@ class TestDune(Validator):
|
|||
self.validate_identity("CAST(x AS INT256)")
|
||||
self.validate_identity("CAST(x AS UINT256)")
|
||||
|
||||
self.validate_all(
|
||||
"SELECT 0xdeadbeef",
|
||||
read={
|
||||
"dune": "SELECT X'deadbeef'",
|
||||
"postgres": "SELECT x'deadbeef'",
|
||||
"trino": "SELECT X'deadbeef'",
|
||||
},
|
||||
write={
|
||||
"dune": "SELECT 0xdeadbeef",
|
||||
"postgres": "SELECT x'deadbeef'",
|
||||
"trino": "SELECT X'deadbeef'",
|
||||
},
|
||||
)
|
||||
for hex_literal in (
|
||||
"deadbeef",
|
||||
"deadbeefdead",
|
||||
"deadbeefdeadbeef",
|
||||
"deadbeefdeadbeefde",
|
||||
"deadbeefdeadbeefdead",
|
||||
"deadbeefdeadbeefdeadbeef",
|
||||
"deadbeefdeadbeefdeadbeefdeadbeef",
|
||||
):
|
||||
with self.subTest(f"Transpiling hex literal {hex_literal}"):
|
||||
self.parse_one(f"0x{hex_literal}").assert_is(exp.HexString)
|
||||
|
||||
self.validate_all(
|
||||
f"SELECT 0x{hex_literal}",
|
||||
read={
|
||||
"dune": f"SELECT X'{hex_literal}'",
|
||||
"postgres": f"SELECT x'{hex_literal}'",
|
||||
"trino": f"SELECT X'{hex_literal}'",
|
||||
},
|
||||
write={
|
||||
"dune": f"SELECT 0x{hex_literal}",
|
||||
"postgres": f"SELECT x'{hex_literal}'",
|
||||
"trino": f"SELECT x'{hex_literal}'",
|
||||
},
|
||||
)
|
||||
|
|
|
@ -14,6 +14,7 @@ class TestMySQL(Validator):
|
|||
self.validate_identity(f"CREATE TABLE t (id {t} UNSIGNED)")
|
||||
self.validate_identity(f"CREATE TABLE t (id {t}(10) UNSIGNED)")
|
||||
|
||||
self.validate_identity("CREATE TABLE bar (abacate DOUBLE(10, 2) UNSIGNED)")
|
||||
self.validate_identity("CREATE TABLE t (id DECIMAL(20, 4) UNSIGNED)")
|
||||
self.validate_identity("CREATE TABLE foo (a BIGINT, UNIQUE (b) USING BTREE)")
|
||||
self.validate_identity("CREATE TABLE foo (id BIGINT)")
|
||||
|
@ -149,6 +150,10 @@ class TestMySQL(Validator):
|
|||
"sqlite": "CREATE TABLE x (id INTEGER NOT NULL AUTOINCREMENT PRIMARY KEY)",
|
||||
},
|
||||
)
|
||||
self.validate_identity("ALTER TABLE t ALTER INDEX i INVISIBLE")
|
||||
self.validate_identity("ALTER TABLE t ALTER INDEX i VISIBLE")
|
||||
self.validate_identity("ALTER TABLE t ALTER COLUMN c SET INVISIBLE")
|
||||
self.validate_identity("ALTER TABLE t ALTER COLUMN c SET VISIBLE")
|
||||
|
||||
def test_identity(self):
|
||||
self.validate_identity("SELECT HIGH_PRIORITY STRAIGHT_JOIN SQL_CALC_FOUND_ROWS * FROM t")
|
||||
|
@ -385,16 +390,16 @@ class TestMySQL(Validator):
|
|||
|
||||
def test_hexadecimal_literal(self):
|
||||
write_CC = {
|
||||
"bigquery": "SELECT 0xCC",
|
||||
"clickhouse": "SELECT 0xCC",
|
||||
"bigquery": "SELECT FROM_HEX('CC')",
|
||||
"clickhouse": UnsupportedError,
|
||||
"databricks": "SELECT X'CC'",
|
||||
"drill": "SELECT 204",
|
||||
"duckdb": "SELECT 204",
|
||||
"duckdb": "SELECT FROM_HEX('CC')",
|
||||
"hive": "SELECT 204",
|
||||
"mysql": "SELECT x'CC'",
|
||||
"oracle": "SELECT 204",
|
||||
"postgres": "SELECT x'CC'",
|
||||
"presto": "SELECT 204",
|
||||
"presto": "SELECT x'CC'",
|
||||
"redshift": "SELECT 204",
|
||||
"snowflake": "SELECT x'CC'",
|
||||
"spark": "SELECT X'CC'",
|
||||
|
@ -402,20 +407,20 @@ class TestMySQL(Validator):
|
|||
"starrocks": "SELECT x'CC'",
|
||||
"tableau": "SELECT 204",
|
||||
"teradata": "SELECT X'CC'",
|
||||
"trino": "SELECT X'CC'",
|
||||
"trino": "SELECT x'CC'",
|
||||
"tsql": "SELECT 0xCC",
|
||||
}
|
||||
write_CC_with_leading_zeros = {
|
||||
"bigquery": "SELECT 0x0000CC",
|
||||
"clickhouse": "SELECT 0x0000CC",
|
||||
"bigquery": "SELECT FROM_HEX('0000CC')",
|
||||
"clickhouse": UnsupportedError,
|
||||
"databricks": "SELECT X'0000CC'",
|
||||
"drill": "SELECT 204",
|
||||
"duckdb": "SELECT 204",
|
||||
"duckdb": "SELECT FROM_HEX('0000CC')",
|
||||
"hive": "SELECT 204",
|
||||
"mysql": "SELECT x'0000CC'",
|
||||
"oracle": "SELECT 204",
|
||||
"postgres": "SELECT x'0000CC'",
|
||||
"presto": "SELECT 204",
|
||||
"presto": "SELECT x'0000CC'",
|
||||
"redshift": "SELECT 204",
|
||||
"snowflake": "SELECT x'0000CC'",
|
||||
"spark": "SELECT X'0000CC'",
|
||||
|
@ -423,7 +428,7 @@ class TestMySQL(Validator):
|
|||
"starrocks": "SELECT x'0000CC'",
|
||||
"tableau": "SELECT 204",
|
||||
"teradata": "SELECT X'0000CC'",
|
||||
"trino": "SELECT X'0000CC'",
|
||||
"trino": "SELECT x'0000CC'",
|
||||
"tsql": "SELECT 0x0000CC",
|
||||
}
|
||||
|
||||
|
@ -728,6 +733,13 @@ class TestMySQL(Validator):
|
|||
},
|
||||
)
|
||||
|
||||
self.validate_all(
|
||||
"CURDATE()",
|
||||
write={
|
||||
"mysql": "CURRENT_DATE",
|
||||
"postgres": "CURRENT_DATE",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT CONCAT('11', '22')",
|
||||
read={
|
||||
|
|
|
@ -117,6 +117,13 @@ class TestOracle(Validator):
|
|||
"SELECT * FROM t START WITH col CONNECT BY NOCYCLE PRIOR col1 = col2"
|
||||
)
|
||||
|
||||
self.validate_all(
|
||||
"SELECT TRIM('|' FROM '||Hello ||| world||')",
|
||||
write={
|
||||
"clickhouse": "SELECT TRIM(BOTH '|' FROM '||Hello ||| world||')",
|
||||
"oracle": "SELECT TRIM('|' FROM '||Hello ||| world||')",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT department_id, department_name INTO v_department_id, v_department_name FROM departments FETCH FIRST 1 ROWS ONLY",
|
||||
write={
|
||||
|
|
|
@ -1358,3 +1358,13 @@ CROSS JOIN JSON_ARRAY_ELEMENTS(CAST(JSON_EXTRACT_PATH(tbox, 'boxes') AS JSON)) A
|
|||
self.validate_identity("ANALYZE TBL(col1, col2)")
|
||||
self.validate_identity("ANALYZE VERBOSE SKIP_LOCKED TBL(col1, col2)")
|
||||
self.validate_identity("ANALYZE BUFFER_USAGE_LIMIT 1337 TBL")
|
||||
|
||||
def test_recursive_cte(self):
|
||||
for kind in ("BREADTH", "DEPTH"):
|
||||
self.validate_identity(
|
||||
f"WITH RECURSIVE search_tree(id, link, data) AS (SELECT t.id, t.link, t.data FROM tree AS t UNION ALL SELECT t.id, t.link, t.data FROM tree AS t, search_tree AS st WHERE t.id = st.link) SEARCH {kind} FIRST BY id SET ordercol SELECT * FROM search_tree ORDER BY ordercol"
|
||||
)
|
||||
|
||||
self.validate_identity(
|
||||
"WITH RECURSIVE search_graph(id, link, data, depth) AS (SELECT g.id, g.link, g.data, 1 FROM graph AS g UNION ALL SELECT g.id, g.link, g.data, sg.depth + 1 FROM graph AS g, search_graph AS sg WHERE g.id = sg.link) CYCLE id SET is_cycle USING path SELECT * FROM search_graph"
|
||||
)
|
||||
|
|
|
@ -1427,18 +1427,10 @@ class TestSnowflake(Validator):
|
|||
"CREATE TABLE t (id INT TAG (key1='value_1', key2='value_2'))",
|
||||
)
|
||||
|
||||
self.validate_identity("USE SECONDARY ROLES ALL")
|
||||
self.validate_identity("USE SECONDARY ROLES NONE")
|
||||
self.validate_identity("USE SECONDARY ROLES a, b, c")
|
||||
self.validate_identity("CREATE SECURE VIEW table1 AS (SELECT a FROM table2)")
|
||||
self.validate_identity(
|
||||
"""create external table et2(
|
||||
col1 date as (parse_json(metadata$external_table_partition):COL1::date),
|
||||
col2 varchar as (parse_json(metadata$external_table_partition):COL2::varchar),
|
||||
col3 number as (parse_json(metadata$external_table_partition):COL3::number))
|
||||
partition by (col1,col2,col3)
|
||||
location=@s2/logs/
|
||||
partition_type = user_specified
|
||||
file_format = (type = parquet)""",
|
||||
"CREATE EXTERNAL TABLE et2 (col1 DATE AS (CAST(GET_PATH(PARSE_JSON(metadata$external_table_partition), 'COL1') AS DATE)), col2 VARCHAR AS (CAST(GET_PATH(PARSE_JSON(metadata$external_table_partition), 'COL2') AS VARCHAR)), col3 DECIMAL(38, 0) AS (CAST(GET_PATH(PARSE_JSON(metadata$external_table_partition), 'COL3') AS DECIMAL(38, 0)))) LOCATION @s2/logs/ PARTITION BY (col1, col2, col3) partition_type=user_specified file_format=(type = parquet)",
|
||||
)
|
||||
self.validate_identity("CREATE OR REPLACE VIEW foo (uid) COPY GRANTS AS (SELECT 1)")
|
||||
self.validate_identity("CREATE TABLE geospatial_table (id INT, g GEOGRAPHY)")
|
||||
self.validate_identity("CREATE MATERIALIZED VIEW a COMMENT='...' AS SELECT 1 FROM x")
|
||||
|
@ -1504,6 +1496,17 @@ class TestSnowflake(Validator):
|
|||
"CREATE SEQUENCE seq1 WITH START=1 INCREMENT=1 ORDER",
|
||||
"CREATE SEQUENCE seq1 START=1 INCREMENT=1 ORDER",
|
||||
)
|
||||
self.validate_identity(
|
||||
"""create external table et2(
|
||||
col1 date as (parse_json(metadata$external_table_partition):COL1::date),
|
||||
col2 varchar as (parse_json(metadata$external_table_partition):COL2::varchar),
|
||||
col3 number as (parse_json(metadata$external_table_partition):COL3::number))
|
||||
partition by (col1,col2,col3)
|
||||
location=@s2/logs/
|
||||
partition_type = user_specified
|
||||
file_format = (type = parquet)""",
|
||||
"CREATE EXTERNAL TABLE et2 (col1 DATE AS (CAST(GET_PATH(PARSE_JSON(metadata$external_table_partition), 'COL1') AS DATE)), col2 VARCHAR AS (CAST(GET_PATH(PARSE_JSON(metadata$external_table_partition), 'COL2') AS VARCHAR)), col3 DECIMAL(38, 0) AS (CAST(GET_PATH(PARSE_JSON(metadata$external_table_partition), 'COL3') AS DECIMAL(38, 0)))) LOCATION @s2/logs/ PARTITION BY (col1, col2, col3) partition_type=user_specified file_format=(type = parquet)",
|
||||
)
|
||||
|
||||
self.validate_all(
|
||||
"CREATE TABLE orders_clone CLONE orders",
|
||||
|
@ -2062,6 +2065,40 @@ MATCH_RECOGNIZE (
|
|||
self.validate_identity("SHOW TERSE USERS")
|
||||
self.validate_identity("SHOW USERS LIKE '_foo%' STARTS WITH 'bar' LIMIT 5 FROM 'baz'")
|
||||
|
||||
def test_show_databases(self):
|
||||
self.validate_identity("SHOW TERSE DATABASES")
|
||||
self.validate_identity(
|
||||
"SHOW TERSE DATABASES HISTORY LIKE 'foo' STARTS WITH 'bla' LIMIT 5 FROM 'bob' WITH PRIVILEGES USAGE, MODIFY"
|
||||
)
|
||||
|
||||
ast = parse_one("SHOW DATABASES IN ACCOUNT", read="snowflake")
|
||||
self.assertEqual(ast.this, "DATABASES")
|
||||
self.assertEqual(ast.args.get("scope_kind"), "ACCOUNT")
|
||||
|
||||
def test_show_functions(self):
|
||||
self.validate_identity("SHOW FUNCTIONS")
|
||||
self.validate_identity("SHOW FUNCTIONS LIKE 'foo' IN CLASS bla")
|
||||
|
||||
ast = parse_one("SHOW FUNCTIONS IN ACCOUNT", read="snowflake")
|
||||
self.assertEqual(ast.this, "FUNCTIONS")
|
||||
self.assertEqual(ast.args.get("scope_kind"), "ACCOUNT")
|
||||
|
||||
def test_show_procedures(self):
|
||||
self.validate_identity("SHOW PROCEDURES")
|
||||
self.validate_identity("SHOW PROCEDURES LIKE 'foo' IN APPLICATION app")
|
||||
self.validate_identity("SHOW PROCEDURES LIKE 'foo' IN APPLICATION PACKAGE pkg")
|
||||
|
||||
ast = parse_one("SHOW PROCEDURES IN ACCOUNT", read="snowflake")
|
||||
self.assertEqual(ast.this, "PROCEDURES")
|
||||
self.assertEqual(ast.args.get("scope_kind"), "ACCOUNT")
|
||||
|
||||
def test_show_warehouses(self):
|
||||
self.validate_identity("SHOW WAREHOUSES")
|
||||
self.validate_identity("SHOW WAREHOUSES LIKE 'foo' WITH PRIVILEGES USAGE, MODIFY")
|
||||
|
||||
ast = parse_one("SHOW WAREHOUSES", read="snowflake")
|
||||
self.assertEqual(ast.this, "WAREHOUSES")
|
||||
|
||||
def test_show_schemas(self):
|
||||
self.validate_identity(
|
||||
"show terse schemas in database db1 starts with 'a' limit 10 from 'b'",
|
||||
|
|
|
@ -316,7 +316,8 @@ TBLPROPERTIES (
|
|||
write={
|
||||
"databricks": "SELECT TRY_ELEMENT_AT(ARRAY(1, 2, 3), 2)",
|
||||
"spark": "SELECT TRY_ELEMENT_AT(ARRAY(1, 2, 3), 2)",
|
||||
"duckdb": "SELECT ([1, 2, 3])[2]",
|
||||
"duckdb": "SELECT [1, 2, 3][2]",
|
||||
"duckdb, version=1.1.0": "SELECT ([1, 2, 3])[2]",
|
||||
"presto": "SELECT ELEMENT_AT(ARRAY[1, 2, 3], 2)",
|
||||
},
|
||||
)
|
||||
|
@ -356,7 +357,8 @@ TBLPROPERTIES (
|
|||
},
|
||||
write={
|
||||
"databricks": "SELECT TRY_ELEMENT_AT(MAP(1, 'a', 2, 'b'), 2)",
|
||||
"duckdb": "SELECT (MAP([1, 2], ['a', 'b'])[2])[1]",
|
||||
"duckdb": "SELECT MAP([1, 2], ['a', 'b'])[2]",
|
||||
"duckdb, version=1.1.0": "SELECT (MAP([1, 2], ['a', 'b'])[2])[1]",
|
||||
"spark": "SELECT TRY_ELEMENT_AT(MAP(1, 'a', 2, 'b'), 2)",
|
||||
},
|
||||
)
|
||||
|
|
|
@ -108,6 +108,7 @@ class TestSQLite(Validator):
|
|||
"SELECT * FROM station WHERE city IS NOT ''",
|
||||
"SELECT * FROM station WHERE NOT city IS ''",
|
||||
)
|
||||
self.validate_identity("SELECT JSON_OBJECT('col1', 1, 'col2', '1')")
|
||||
|
||||
def test_strftime(self):
|
||||
self.validate_identity("SELECT STRFTIME('%Y/%m/%d', 'now')")
|
||||
|
|
|
@ -5,6 +5,7 @@ class TestTrino(Validator):
|
|||
dialect = "trino"
|
||||
|
||||
def test_trino(self):
|
||||
self.validate_identity("JSON_QUERY(m.properties, 'lax $.area' OMIT QUOTES NULL ON ERROR)")
|
||||
self.validate_identity("JSON_EXTRACT(content, json_path)")
|
||||
self.validate_identity("JSON_QUERY(content, 'lax $.HY.*')")
|
||||
self.validate_identity("JSON_QUERY(content, 'strict $.HY.*' WITH WRAPPER)")
|
||||
|
|
|
@ -1705,6 +1705,8 @@ WHERE
|
|||
"spark": "SELECT * FROM A LIMIT 3",
|
||||
},
|
||||
)
|
||||
self.validate_identity("SELECT TOP 10 PERCENT")
|
||||
self.validate_identity("SELECT TOP 10 PERCENT WITH TIES")
|
||||
|
||||
def test_format(self):
|
||||
self.validate_identity("SELECT FORMAT(foo, 'dddd', 'de-CH')")
|
||||
|
|
|
@ -314,4 +314,11 @@ STRING;
|
|||
|
||||
# dialect: bigquery
|
||||
STRING(timestamp_expr, timezone);
|
||||
STRING;
|
||||
STRING;
|
||||
|
||||
--------------------------------------
|
||||
-- Snowflake
|
||||
--------------------------------------
|
||||
|
||||
LEAST(x::DECIMAL(18, 2));
|
||||
DECIMAL(18, 2);
|
||||
|
|
18
tests/fixtures/optimizer/qualify_columns.sql
vendored
18
tests/fixtures/optimizer/qualify_columns.sql
vendored
|
@ -259,6 +259,20 @@ WITH T1 AS (SELECT 1 AS C1, 1 AS C2, 'Y' AS TOP_PARENT_INDICATOR, 1 AS ID FROM D
|
|||
SELECT * FROM ROWS FROM (GENERATE_SERIES(1, 3), GENERATE_SERIES(10, 12)) AS t(a, b);
|
||||
SELECT t.a AS a, t.b AS b FROM ROWS FROM (GENERATE_SERIES(1, 3), GENERATE_SERIES(10, 12)) AS t(a, b);
|
||||
|
||||
# execute: false
|
||||
# dialect: clickhouse
|
||||
SELECT generate_series FROM generate_series(0, 10) AS g;
|
||||
SELECT g.generate_series AS generate_series FROM generate_series(0, 10) AS g(generate_series);
|
||||
|
||||
# execute: false
|
||||
# dialect: snowflake
|
||||
SELECT * FROM quarterly_sales PIVOT(SUM(amount) FOR quarter IN (ANY ORDER BY quarter)) ORDER BY empid;
|
||||
SELECT * FROM QUARTERLY_SALES AS QUARTERLY_SALES PIVOT(SUM(QUARTERLY_SALES.AMOUNT) FOR QUARTERLY_SALES.QUARTER IN (ANY ORDER BY QUARTER)) AS _Q_0 ORDER BY _Q_0.EMPID;
|
||||
|
||||
# execute: false
|
||||
SELECT PERCENTILE_CONT(0.5) WITHIN GROUP (ORDER BY x) AS x FROM t;
|
||||
SELECT PERCENTILE_CONT(0.5) WITHIN GROUP (ORDER BY t.x) AS x FROM t AS t;
|
||||
|
||||
--------------------------------------
|
||||
-- Derived tables
|
||||
--------------------------------------
|
||||
|
@ -752,6 +766,10 @@ WITH RECURSIVE t(c) AS (SELECT 1 AS c UNION ALL SELECT t.c + 1 AS c FROM t AS t
|
|||
WITH RECURSIVE t AS (SELECT 1 AS c UNION ALL SELECT c + 1 AS c FROM t WHERE c <= 10) SELECT c FROM t;
|
||||
WITH RECURSIVE t AS (SELECT 1 AS c UNION ALL SELECT t.c + 1 AS c FROM t AS t WHERE t.c <= 10) SELECT t.c AS c FROM t AS t;
|
||||
|
||||
# title: expand DISTINCT ON ordinals / projection names
|
||||
SELECT DISTINCT ON (new_col, b + 1, 1) t1.a AS new_col FROM x AS t1 ORDER BY new_col;
|
||||
SELECT DISTINCT ON (new_col, t1.b + 1, new_col) t1.a AS new_col FROM x AS t1 ORDER BY new_col;
|
||||
|
||||
--------------------------------------
|
||||
-- Wrapped tables / join constructs
|
||||
--------------------------------------
|
||||
|
|
|
@ -240,10 +240,27 @@ class TestDiff(unittest.TestCase):
|
|||
],
|
||||
)
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
self._validate_delta_only(
|
||||
diff_delta_only(
|
||||
expr_src, expr_tgt, matchings=[(expr_src, expr_tgt), (expr_src, expr_tgt)]
|
||||
)
|
||||
),
|
||||
[
|
||||
Insert(expression=exp.Literal.number(2)),
|
||||
Insert(expression=exp.Literal.number(3)),
|
||||
Insert(expression=exp.Literal.number(4)),
|
||||
],
|
||||
)
|
||||
|
||||
expr_tgt.selects[0].replace(expr_src.selects[0])
|
||||
|
||||
self._validate_delta_only(
|
||||
diff_delta_only(expr_src, expr_tgt, matchings=[(expr_src, expr_tgt)]),
|
||||
[
|
||||
Insert(expression=exp.Literal.number(2)),
|
||||
Insert(expression=exp.Literal.number(3)),
|
||||
Insert(expression=exp.Literal.number(4)),
|
||||
],
|
||||
)
|
||||
|
||||
def test_identifier(self):
|
||||
expr_src = parse_one("SELECT a FROM tbl")
|
||||
|
|
|
@ -216,6 +216,28 @@ class TestOptimizer(unittest.TestCase):
|
|||
)
|
||||
|
||||
def test_qualify_tables(self):
|
||||
self.assertEqual(
|
||||
optimizer.qualify_tables.qualify_tables(
|
||||
parse_one(
|
||||
"WITH cte AS (SELECT * FROM t) SELECT * FROM cte PIVOT(SUM(c) FOR v IN ('x', 'y'))"
|
||||
),
|
||||
db="db",
|
||||
catalog="catalog",
|
||||
).sql(),
|
||||
"WITH cte AS (SELECT * FROM catalog.db.t AS t) SELECT * FROM cte AS cte PIVOT(SUM(c) FOR v IN ('x', 'y')) AS _q_0",
|
||||
)
|
||||
|
||||
self.assertEqual(
|
||||
optimizer.qualify_tables.qualify_tables(
|
||||
parse_one(
|
||||
"WITH cte AS (SELECT * FROM t) SELECT * FROM cte PIVOT(SUM(c) FOR v IN ('x', 'y')) AS pivot_alias"
|
||||
),
|
||||
db="db",
|
||||
catalog="catalog",
|
||||
).sql(),
|
||||
"WITH cte AS (SELECT * FROM catalog.db.t AS t) SELECT * FROM cte AS cte PIVOT(SUM(c) FOR v IN ('x', 'y')) AS pivot_alias",
|
||||
)
|
||||
|
||||
self.assertEqual(
|
||||
optimizer.qualify_tables.qualify_tables(
|
||||
parse_one("select a from b"), catalog="catalog"
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import unittest
|
||||
|
||||
from sqlglot import parse_one
|
||||
from sqlglot import parse_one, expressions as exp
|
||||
from sqlglot.transforms import (
|
||||
eliminate_distinct_on,
|
||||
eliminate_join_marks,
|
||||
|
@ -147,7 +147,7 @@ class TestTransforms(unittest.TestCase):
|
|||
self.validate(
|
||||
eliminate_qualify,
|
||||
"SELECT x AS z FROM y QUALIFY ROW_NUMBER() OVER (PARTITION BY z)",
|
||||
"SELECT z FROM (SELECT x AS z, ROW_NUMBER() OVER (PARTITION BY x) AS _w, x FROM y) AS _t WHERE _w",
|
||||
"SELECT z FROM (SELECT x AS z, ROW_NUMBER() OVER (PARTITION BY x) AS _w FROM y) AS _t WHERE _w",
|
||||
)
|
||||
self.validate(
|
||||
eliminate_qualify,
|
||||
|
@ -162,7 +162,12 @@ class TestTransforms(unittest.TestCase):
|
|||
self.validate(
|
||||
eliminate_qualify,
|
||||
"SELECT y.x AS x, y.t AS z FROM y QUALIFY ROW_NUMBER() OVER (PARTITION BY x ORDER BY x DESC, z)",
|
||||
"SELECT x, z FROM (SELECT y.x AS x, y.t AS z, ROW_NUMBER() OVER (PARTITION BY y.x ORDER BY y.x DESC, y.t) AS _w, y.t FROM y) AS _t WHERE _w",
|
||||
"SELECT x, z FROM (SELECT y.x AS x, y.t AS z, ROW_NUMBER() OVER (PARTITION BY y.x ORDER BY y.x DESC, y.t) AS _w FROM y) AS _t WHERE _w",
|
||||
)
|
||||
self.validate(
|
||||
eliminate_qualify,
|
||||
"select max(col) over (partition by col_id) as col, from some_table qualify row_number() over (partition by col_id order by col asc)=1",
|
||||
"SELECT col FROM (SELECT MAX(col) OVER (PARTITION BY col_id) AS col, ROW_NUMBER() OVER (PARTITION BY col_id ORDER BY MAX(col) OVER (PARTITION BY col_id) ASC) AS _w, col_id FROM some_table) AS _t WHERE _w = 1",
|
||||
)
|
||||
|
||||
def test_remove_precision_parameterized_types(self):
|
||||
|
@ -251,3 +256,12 @@ class TestTransforms(unittest.TestCase):
|
|||
f"SELECT table1.id, table2.cloumn1, table3.id FROM table1 LEFT JOIN table2 ON table1.id = table2.id LEFT JOIN (SELECT tableInner1.id FROM tableInner1 LEFT JOIN tableInner2 ON tableInner1.id = tableInner2.id) {alias}table3 ON table1.id = table3.id",
|
||||
dialect,
|
||||
)
|
||||
|
||||
# if multiple conditions, we check that after transformations the tree remains consistent
|
||||
s = "select a.id from a, b where a.id = b.id (+) AND b.d (+) = const"
|
||||
tree = eliminate_join_marks(parse_one(s, dialect=dialect))
|
||||
assert all(type(t.parent_select) is exp.Select for t in tree.find_all(exp.Table))
|
||||
assert (
|
||||
tree.sql(dialect=dialect)
|
||||
== "SELECT a.id FROM a LEFT JOIN b ON a.id = b.id AND b.d = const"
|
||||
)
|
||||
|
|
|
@ -23,6 +23,11 @@ class TestTranspile(unittest.TestCase):
|
|||
|
||||
def test_weird_chars(self):
|
||||
self.assertEqual(transpile("0Êß")[0], "0 AS Êß")
|
||||
self.assertEqual(
|
||||
# Ideographic space after SELECT (\u3000)
|
||||
transpile("SELECT * FROM t WHERE c = 1")[0],
|
||||
"SELECT * FROM t WHERE c = 1",
|
||||
)
|
||||
|
||||
def test_alias(self):
|
||||
self.assertEqual(transpile("SELECT SUM(y) KEEP")[0], "SELECT SUM(y) AS KEEP")
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue