Adding upstream version 26.14.0.
Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
parent
539faf8d40
commit
dfe1cec38a
84 changed files with 63872 additions and 61909 deletions
|
@ -154,6 +154,12 @@ class TestAthena(Validator):
|
|||
write_sql='CREATE TABLE "foo" AS WITH "foo" AS (SELECT "a", "b" FROM "bar") SELECT * FROM "foo"',
|
||||
)
|
||||
|
||||
# CTAS with Union should still hit the Trino engine and not Hive
|
||||
self.validate_identity(
|
||||
'CREATE TABLE `foo` AS WITH `foo` AS (SELECT "a", `b` FROM "bar") SELECT * FROM "foo" UNION SELECT * FROM "foo"',
|
||||
write_sql='CREATE TABLE "foo" AS WITH "foo" AS (SELECT "a", "b" FROM "bar") SELECT * FROM "foo" UNION SELECT * FROM "foo"',
|
||||
)
|
||||
|
||||
self.validate_identity("DESCRIBE foo.bar", write_sql="DESCRIBE `foo`.`bar`", identify=True)
|
||||
|
||||
def test_dml_quoting(self):
|
||||
|
|
|
@ -1404,11 +1404,13 @@ LANGUAGE js AS
|
|||
)
|
||||
self.validate_all(
|
||||
"TO_JSON_STRING(x)",
|
||||
read={"bigquery": "TO_JSON_STRING(x)"},
|
||||
read={
|
||||
"bigquery": "TO_JSON_STRING(x)",
|
||||
},
|
||||
write={
|
||||
"bigquery": "TO_JSON_STRING(x)",
|
||||
"duckdb": "CAST(TO_JSON(x) AS TEXT)",
|
||||
"presto": "JSON_FORMAT(x)",
|
||||
"presto": "JSON_FORMAT(CAST(x AS JSON))",
|
||||
"spark": "TO_JSON(x)",
|
||||
},
|
||||
)
|
||||
|
@ -1486,7 +1488,7 @@ WHERE
|
|||
"snowflake": "SELECT LENGTH(foo)",
|
||||
},
|
||||
write={
|
||||
"duckdb": "SELECT CASE TYPEOF(foo) WHEN 'VARCHAR' THEN LENGTH(CAST(foo AS TEXT)) WHEN 'BLOB' THEN OCTET_LENGTH(CAST(foo AS BLOB)) END",
|
||||
"duckdb": "SELECT CASE TYPEOF(foo) WHEN 'BLOB' THEN OCTET_LENGTH(CAST(foo AS BLOB)) ELSE LENGTH(CAST(foo AS TEXT)) END",
|
||||
"snowflake": "SELECT LENGTH(foo)",
|
||||
"": "SELECT LENGTH(foo)",
|
||||
},
|
||||
|
@ -2382,3 +2384,43 @@ OPTIONS (
|
|||
|
||||
for select in annotated.selects:
|
||||
self.assertEqual(select.type.sql("bigquery"), "TIMESTAMP")
|
||||
|
||||
def test_set_operations(self):
|
||||
self.validate_identity("SELECT 1 AS foo INNER UNION ALL SELECT 3 AS foo, 4 AS bar")
|
||||
|
||||
for side in ("", " LEFT", " FULL"):
|
||||
for kind in ("", " OUTER"):
|
||||
for name in (
|
||||
"",
|
||||
" BY NAME",
|
||||
" BY NAME ON (foo, bar)",
|
||||
):
|
||||
with self.subTest(f"Testing {side} {kind} {name} in test_set_operations"):
|
||||
self.validate_identity(
|
||||
f"SELECT 1 AS foo{side}{kind} UNION ALL{name} SELECT 3 AS foo, 4 AS bar",
|
||||
)
|
||||
|
||||
self.validate_identity(
|
||||
"SELECT 1 AS x UNION ALL CORRESPONDING SELECT 2 AS x",
|
||||
"SELECT 1 AS x INNER UNION ALL BY NAME SELECT 2 AS x",
|
||||
)
|
||||
|
||||
self.validate_identity(
|
||||
"SELECT 1 AS x UNION ALL CORRESPONDING BY (foo, bar) SELECT 2 AS x",
|
||||
"SELECT 1 AS x INNER UNION ALL BY NAME ON (foo, bar) SELECT 2 AS x",
|
||||
)
|
||||
|
||||
self.validate_identity(
|
||||
"SELECT 1 AS x LEFT UNION ALL CORRESPONDING SELECT 2 AS x",
|
||||
"SELECT 1 AS x LEFT UNION ALL BY NAME SELECT 2 AS x",
|
||||
)
|
||||
|
||||
self.validate_identity(
|
||||
"SELECT 1 AS x UNION ALL STRICT CORRESPONDING SELECT 2 AS x",
|
||||
"SELECT 1 AS x UNION ALL BY NAME SELECT 2 AS x",
|
||||
)
|
||||
|
||||
self.validate_identity(
|
||||
"SELECT 1 AS x UNION ALL STRICT CORRESPONDING BY (foo, bar) SELECT 2 AS x",
|
||||
"SELECT 1 AS x UNION ALL BY NAME ON (foo, bar) SELECT 2 AS x",
|
||||
)
|
||||
|
|
|
@ -33,6 +33,7 @@ class TestClickhouse(Validator):
|
|||
self.assertEqual(expr.sql(dialect="clickhouse"), "COUNT(x)")
|
||||
self.assertIsNone(expr._meta)
|
||||
|
||||
self.validate_identity('SELECT DISTINCT ON ("id") * FROM t')
|
||||
self.validate_identity("SELECT 1 OR (1 = 2)")
|
||||
self.validate_identity("SELECT 1 AND (1 = 2)")
|
||||
self.validate_identity("SELECT json.a.:Int64")
|
||||
|
@ -105,8 +106,10 @@ class TestClickhouse(Validator):
|
|||
self.validate_identity("SELECT * FROM table LIMIT 1 BY a, b")
|
||||
self.validate_identity("SELECT * FROM table LIMIT 2 OFFSET 1 BY a, b")
|
||||
self.validate_identity("TRUNCATE TABLE t1 ON CLUSTER test_cluster")
|
||||
self.validate_identity("TRUNCATE TABLE t1 ON CLUSTER '{cluster}'")
|
||||
self.validate_identity("TRUNCATE DATABASE db")
|
||||
self.validate_identity("TRUNCATE DATABASE db ON CLUSTER test_cluster")
|
||||
self.validate_identity("TRUNCATE DATABASE db ON CLUSTER '{cluster}'")
|
||||
self.validate_identity(
|
||||
"SELECT DATE_BIN(toDateTime('2023-01-01 14:45:00'), INTERVAL '1' MINUTE, toDateTime('2023-01-01 14:35:30'), 'UTC')",
|
||||
)
|
||||
|
@ -155,12 +158,21 @@ class TestClickhouse(Validator):
|
|||
self.validate_identity(
|
||||
"CREATE TABLE test ON CLUSTER default (id UInt8) ENGINE=AggregatingMergeTree() ORDER BY tuple()"
|
||||
)
|
||||
self.validate_identity(
|
||||
"CREATE TABLE test ON CLUSTER '{cluster}' (id UInt8) ENGINE=AggregatingMergeTree() ORDER BY tuple()"
|
||||
)
|
||||
self.validate_identity(
|
||||
"CREATE MATERIALIZED VIEW test_view ON CLUSTER cl1 (id UInt8) ENGINE=AggregatingMergeTree() ORDER BY tuple() AS SELECT * FROM test_data"
|
||||
)
|
||||
self.validate_identity(
|
||||
"CREATE MATERIALIZED VIEW test_view ON CLUSTER '{cluster}' (id UInt8) ENGINE=AggregatingMergeTree() ORDER BY tuple() AS SELECT * FROM test_data"
|
||||
)
|
||||
self.validate_identity(
|
||||
"CREATE MATERIALIZED VIEW test_view ON CLUSTER cl1 TO table1 AS SELECT * FROM test_data"
|
||||
)
|
||||
self.validate_identity(
|
||||
"CREATE MATERIALIZED VIEW test_view ON CLUSTER '{cluster}' TO table1 AS SELECT * FROM test_data"
|
||||
)
|
||||
self.validate_identity(
|
||||
"CREATE MATERIALIZED VIEW test_view TO db.table1 (id UInt8) AS SELECT * FROM test_data"
|
||||
)
|
||||
|
@ -184,7 +196,7 @@ class TestClickhouse(Validator):
|
|||
)
|
||||
self.validate_identity(
|
||||
"INSERT INTO tab VALUES ({'key1': 1, 'key2': 10}), ({'key1': 2, 'key2': 20}), ({'key1': 3, 'key2': 30})",
|
||||
"INSERT INTO tab VALUES (map('key1', 1, 'key2', 10)), (map('key1', 2, 'key2', 20)), (map('key1', 3, 'key2', 30))",
|
||||
"INSERT INTO tab VALUES ((map('key1', 1, 'key2', 10))), ((map('key1', 2, 'key2', 20))), ((map('key1', 3, 'key2', 30)))",
|
||||
)
|
||||
self.validate_identity(
|
||||
"SELECT (toUInt8('1') + toUInt8('2')) IS NOT NULL",
|
||||
|
@ -507,11 +519,12 @@ class TestClickhouse(Validator):
|
|||
"INSERT INTO FUNCTION s3('url', 'CSV', 'name String, value UInt32', 'gzip') SELECT name, value FROM existing_table"
|
||||
)
|
||||
self.validate_identity(
|
||||
"INSERT INTO FUNCTION remote('localhost', default.simple_table) VALUES (100, 'inserted via remote()')"
|
||||
"INSERT INTO FUNCTION remote('localhost', default.simple_table) VALUES (100, 'inserted via remote()')",
|
||||
"INSERT INTO FUNCTION remote('localhost', default.simple_table) VALUES ((100), ('inserted via remote()'))",
|
||||
)
|
||||
self.validate_identity(
|
||||
"""INSERT INTO TABLE FUNCTION hdfs('hdfs://hdfs1:9000/test', 'TSV', 'name String, column2 UInt32, column3 UInt32') VALUES ('test', 1, 2)""",
|
||||
"""INSERT INTO FUNCTION hdfs('hdfs://hdfs1:9000/test', 'TSV', 'name String, column2 UInt32, column3 UInt32') VALUES ('test', 1, 2)""",
|
||||
"""INSERT INTO FUNCTION hdfs('hdfs://hdfs1:9000/test', 'TSV', 'name String, column2 UInt32, column3 UInt32') VALUES (('test'), (1), (2))""",
|
||||
)
|
||||
|
||||
self.validate_identity("SELECT 1 FORMAT TabSeparated")
|
||||
|
@ -546,22 +559,23 @@ class TestClickhouse(Validator):
|
|||
)
|
||||
self.validate_identity("ALTER TABLE visits REPLACE PARTITION ID '201901' FROM visits_tmp")
|
||||
self.validate_identity("ALTER TABLE visits ON CLUSTER test_cluster DROP COLUMN col1")
|
||||
self.validate_identity("ALTER TABLE visits ON CLUSTER '{cluster}' DROP COLUMN col1")
|
||||
self.validate_identity("DELETE FROM tbl ON CLUSTER test_cluster WHERE date = '2019-01-01'")
|
||||
self.validate_identity("DELETE FROM tbl ON CLUSTER '{cluster}' WHERE date = '2019-01-01'")
|
||||
|
||||
self.assertIsInstance(
|
||||
parse_one("Tuple(select Int64)", into=exp.DataType, read="clickhouse"), exp.DataType
|
||||
)
|
||||
|
||||
self.validate_identity("INSERT INTO t (col1, col2) VALUES ('abcd', 1234)")
|
||||
self.validate_identity(
|
||||
"INSERT INTO t (col1, col2) VALUES ('abcd', 1234)",
|
||||
"INSERT INTO t (col1, col2) VALUES (('abcd'), (1234))",
|
||||
)
|
||||
self.validate_all(
|
||||
"INSERT INTO t (col1, col2) VALUES ('abcd', 1234)",
|
||||
read={
|
||||
# looks like values table function, but should be parsed as VALUES block
|
||||
"clickhouse": "INSERT INTO t (col1, col2) values('abcd', 1234)"
|
||||
},
|
||||
write={
|
||||
"clickhouse": "INSERT INTO t (col1, col2) VALUES ('abcd', 1234)",
|
||||
"postgres": "INSERT INTO t (col1, col2) VALUES ('abcd', 1234)",
|
||||
"clickhouse": "INSERT INTO t (col1, col2) VALUES (('abcd'), (1234))",
|
||||
"postgres": "INSERT INTO t (col1, col2) VALUES (('abcd'), (1234))",
|
||||
},
|
||||
)
|
||||
self.validate_identity("SELECT TRIM(TRAILING ')' FROM '( Hello, world! )')")
|
||||
|
@ -591,6 +605,9 @@ class TestClickhouse(Validator):
|
|||
self.validate_identity("SELECT arrayConcat([1, 2], [3, 4])")
|
||||
|
||||
def test_clickhouse_values(self):
|
||||
ast = self.parse_one("SELECT * FROM VALUES (1, 2, 3)")
|
||||
self.assertEqual(len(list(ast.find_all(exp.Tuple))), 4)
|
||||
|
||||
values = exp.select("*").from_(
|
||||
exp.values([exp.tuple_(1, 2, 3)], alias="subq", columns=["a", "b", "c"])
|
||||
)
|
||||
|
@ -599,10 +616,18 @@ class TestClickhouse(Validator):
|
|||
"SELECT * FROM (SELECT 1 AS a, 2 AS b, 3 AS c) AS subq",
|
||||
)
|
||||
|
||||
self.validate_identity("INSERT INTO t (col1, col2) VALUES ('abcd', 1234)")
|
||||
self.validate_identity("SELECT * FROM VALUES ((1, 1), (2, 1), (3, 1), (4, 1))")
|
||||
self.validate_identity(
|
||||
"SELECT type, id FROM VALUES ('id Int, type Int', (1, 1), (2, 1), (3, 1), (4, 1))"
|
||||
)
|
||||
|
||||
self.validate_identity(
|
||||
"INSERT INTO t (col1, col2) VALUES ('abcd', 1234)",
|
||||
"INSERT INTO t (col1, col2) VALUES (('abcd'), (1234))",
|
||||
)
|
||||
self.validate_identity(
|
||||
"INSERT INTO t (col1, col2) FORMAT Values('abcd', 1234)",
|
||||
"INSERT INTO t (col1, col2) VALUES ('abcd', 1234)",
|
||||
"INSERT INTO t (col1, col2) VALUES (('abcd'), (1234))",
|
||||
)
|
||||
|
||||
self.validate_all(
|
||||
|
@ -1171,6 +1196,7 @@ LIFETIME(MIN 0 MAX 0)""",
|
|||
for creatable in ("DATABASE", "TABLE", "VIEW", "DICTIONARY", "FUNCTION"):
|
||||
with self.subTest(f"Test DROP {creatable} ON CLUSTER"):
|
||||
self.validate_identity(f"DROP {creatable} test ON CLUSTER test_cluster")
|
||||
self.validate_identity(f"DROP {creatable} test ON CLUSTER '{{cluster}}'")
|
||||
|
||||
def test_datetime_funcs(self):
|
||||
# Each datetime func has an alias that is roundtripped to the original name e.g. (DATE_SUB, DATESUB) -> DATE_SUB
|
||||
|
|
|
@ -68,6 +68,24 @@ class TestDatabricks(Validator):
|
|||
"FROM_UTC_TIMESTAMP(CAST(x AS TIMESTAMP), tz)",
|
||||
)
|
||||
|
||||
self.validate_all(
|
||||
"SELECT c1:item[1].price",
|
||||
read={
|
||||
"spark": "SELECT GET_JSON_OBJECT(c1, '$.item[1].price')",
|
||||
},
|
||||
write={
|
||||
"databricks": "SELECT c1:item[1].price",
|
||||
"spark": "SELECT GET_JSON_OBJECT(c1, '$.item[1].price')",
|
||||
},
|
||||
)
|
||||
|
||||
self.validate_all(
|
||||
"SELECT GET_JSON_OBJECT(c1, '$.item[1].price')",
|
||||
write={
|
||||
"databricks": "SELECT c1:item[1].price",
|
||||
"spark": "SELECT GET_JSON_OBJECT(c1, '$.item[1].price')",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"CREATE TABLE foo (x INT GENERATED ALWAYS AS (YEAR(y)))",
|
||||
write={
|
||||
|
|
|
@ -400,6 +400,9 @@ class TestDuckDB(Validator):
|
|||
self.validate_identity(
|
||||
"SELECT * FROM (PIVOT Cities ON Year USING SUM(Population) GROUP BY Country) AS pivot_alias"
|
||||
)
|
||||
self.validate_identity(
|
||||
"SELECT * FROM cities PIVOT(SUM(population) FOR year IN (2000, 2010, 2020) GROUP BY country)"
|
||||
)
|
||||
self.validate_identity(
|
||||
# QUALIFY comes after WINDOW
|
||||
"SELECT schema_name, function_name, ROW_NUMBER() OVER my_window AS function_rank FROM DUCKDB_FUNCTIONS() WINDOW my_window AS (PARTITION BY schema_name ORDER BY function_name) QUALIFY ROW_NUMBER() OVER my_window < 3"
|
||||
|
@ -1595,3 +1598,7 @@ class TestDuckDB(Validator):
|
|||
"SELECT (@-1) + 1",
|
||||
"SELECT (ABS(-1)) + 1",
|
||||
)
|
||||
|
||||
def test_show_tables(self):
|
||||
self.validate_identity("SHOW TABLES").assert_is(exp.Show)
|
||||
self.validate_identity("SHOW ALL TABLES").assert_is(exp.Show)
|
||||
|
|
|
@ -76,6 +76,9 @@ class TestPostgres(Validator):
|
|||
self.validate_identity("SELECT CURRENT_SCHEMA")
|
||||
self.validate_identity("SELECT CURRENT_USER")
|
||||
self.validate_identity("SELECT * FROM ONLY t1")
|
||||
self.validate_identity(
|
||||
"SELECT * FROM test_data, LATERAL JSONB_ARRAY_ELEMENTS(data) WITH ORDINALITY AS elem(value, ordinality)"
|
||||
)
|
||||
self.validate_identity(
|
||||
"SELECT id, name FROM xml_data AS t, XMLTABLE('/root/user' PASSING t.xml COLUMNS id INT PATH '@id', name TEXT PATH 'name/text()') AS x"
|
||||
)
|
||||
|
|
|
@ -1021,9 +1021,6 @@ class TestPresto(Validator):
|
|||
)
|
||||
self.validate_all(
|
||||
"JSON_FORMAT(x)",
|
||||
read={
|
||||
"spark": "TO_JSON(x)",
|
||||
},
|
||||
write={
|
||||
"bigquery": "TO_JSON_STRING(x)",
|
||||
"duckdb": "CAST(TO_JSON(x) AS TEXT)",
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
from unittest import mock
|
||||
|
||||
from sqlglot import UnsupportedError, exp, parse_one
|
||||
from sqlglot import UnsupportedError, exp, parse_one, ParseError
|
||||
from sqlglot.optimizer.normalize_identifiers import normalize_identifiers
|
||||
from sqlglot.optimizer.qualify_columns import quote_identifiers
|
||||
from tests.dialects.test_dialect import Validator
|
||||
|
@ -1481,9 +1481,30 @@ class TestSnowflake(Validator):
|
|||
self.validate_identity("CREATE TAG cost_center ALLOWED_VALUES 'a', 'b'")
|
||||
self.validate_identity("CREATE WAREHOUSE x").this.assert_is(exp.Identifier)
|
||||
self.validate_identity("CREATE STREAMLIT x").this.assert_is(exp.Identifier)
|
||||
self.validate_identity(
|
||||
"CREATE TEMPORARY STAGE stage1 FILE_FORMAT=(TYPE=PARQUET)"
|
||||
).this.assert_is(exp.Table)
|
||||
self.validate_identity(
|
||||
"CREATE STAGE stage1 FILE_FORMAT='format1'",
|
||||
"CREATE STAGE stage1 FILE_FORMAT=(FORMAT_NAME='format1')",
|
||||
)
|
||||
self.validate_identity("CREATE STAGE stage1 FILE_FORMAT=(FORMAT_NAME=stage1.format1)")
|
||||
self.validate_identity("CREATE STAGE stage1 FILE_FORMAT=(FORMAT_NAME='stage1.format1')")
|
||||
self.validate_identity(
|
||||
"CREATE STAGE stage1 FILE_FORMAT=schema1.format1",
|
||||
"CREATE STAGE stage1 FILE_FORMAT=(FORMAT_NAME=schema1.format1)",
|
||||
)
|
||||
with self.assertRaises(ParseError):
|
||||
self.parse_one("CREATE STAGE stage1 FILE_FORMAT=123", dialect="snowflake")
|
||||
self.validate_identity(
|
||||
"CREATE STAGE s1 URL='s3://bucket-123' FILE_FORMAT=(TYPE='JSON') CREDENTIALS=(aws_key_id='test' aws_secret_key='test')"
|
||||
)
|
||||
self.validate_identity(
|
||||
"CREATE OR REPLACE TAG IF NOT EXISTS cost_center COMMENT='cost_center tag'"
|
||||
).this.assert_is(exp.Identifier)
|
||||
self.validate_identity(
|
||||
"CREATE TEMPORARY FILE FORMAT fileformat1 TYPE=PARQUET COMPRESSION=auto"
|
||||
).this.assert_is(exp.Table)
|
||||
self.validate_identity(
|
||||
"CREATE DYNAMIC TABLE product (pre_tax_profit, taxes, after_tax_profit) TARGET_LAG='20 minutes' WAREHOUSE=mywh AS SELECT revenue - cost, (revenue - cost) * tax_rate, (revenue - cost) * (1.0 - tax_rate) FROM staging_table"
|
||||
)
|
||||
|
@ -1499,9 +1520,6 @@ class TestSnowflake(Validator):
|
|||
self.validate_identity(
|
||||
"CREATE TABLE orders_clone_restore CLONE orders BEFORE (STATEMENT => '8e5d0ca9-005e-44e6-b858-a8f5b37c5726')"
|
||||
)
|
||||
self.validate_identity(
|
||||
"CREATE TABLE a (x DATE, y BIGINT) PARTITION BY (x) integration='q' auto_refresh=TRUE file_format=(type = parquet)"
|
||||
)
|
||||
self.validate_identity(
|
||||
"CREATE SCHEMA mytestschema_clone_restore CLONE testschema BEFORE (TIMESTAMP => TO_TIMESTAMP(40 * 365 * 86400))"
|
||||
)
|
||||
|
@ -1544,8 +1562,8 @@ class TestSnowflake(Validator):
|
|||
partition by (col1,col2,col3)
|
||||
location=@s2/logs/
|
||||
partition_type = user_specified
|
||||
file_format = (type = parquet)""",
|
||||
"CREATE EXTERNAL TABLE et2 (col1 DATE AS (CAST(GET_PATH(PARSE_JSON(metadata$external_table_partition), 'COL1') AS DATE)), col2 VARCHAR AS (CAST(GET_PATH(PARSE_JSON(metadata$external_table_partition), 'COL2') AS VARCHAR)), col3 DECIMAL(38, 0) AS (CAST(GET_PATH(PARSE_JSON(metadata$external_table_partition), 'COL3') AS DECIMAL(38, 0)))) LOCATION @s2/logs/ PARTITION BY (col1, col2, col3) partition_type=user_specified file_format=(type = parquet)",
|
||||
file_format = (type = parquet compression = gzip binary_as_text = false)""",
|
||||
"CREATE EXTERNAL TABLE et2 (col1 DATE AS (CAST(GET_PATH(PARSE_JSON(metadata$external_table_partition), 'COL1') AS DATE)), col2 VARCHAR AS (CAST(GET_PATH(PARSE_JSON(metadata$external_table_partition), 'COL2') AS VARCHAR)), col3 DECIMAL(38, 0) AS (CAST(GET_PATH(PARSE_JSON(metadata$external_table_partition), 'COL3') AS DECIMAL(38, 0)))) PARTITION BY (col1, col2, col3) LOCATION=@s2/logs/ partition_type=user_specified FILE_FORMAT=(type=parquet compression=gzip binary_as_text=FALSE)",
|
||||
)
|
||||
|
||||
self.validate_all(
|
||||
|
@ -2092,6 +2110,15 @@ MATCH_RECOGNIZE (
|
|||
self.assertEqual(ast.this, "DATABASES")
|
||||
self.assertEqual(ast.args.get("scope_kind"), "ACCOUNT")
|
||||
|
||||
def test_show_file_formats(self):
|
||||
self.validate_identity("SHOW FILE FORMATS")
|
||||
self.validate_identity("SHOW FILE FORMATS LIKE 'foo' IN DATABASE db1")
|
||||
self.validate_identity("SHOW FILE FORMATS LIKE 'foo' IN SCHEMA db1.schema1")
|
||||
|
||||
ast = parse_one("SHOW FILE FORMATS IN ACCOUNT", read="snowflake")
|
||||
self.assertEqual(ast.this, "FILE FORMATS")
|
||||
self.assertEqual(ast.args.get("scope_kind"), "ACCOUNT")
|
||||
|
||||
def test_show_functions(self):
|
||||
self.validate_identity("SHOW FUNCTIONS")
|
||||
self.validate_identity("SHOW FUNCTIONS LIKE 'foo' IN CLASS bla")
|
||||
|
@ -2109,6 +2136,15 @@ MATCH_RECOGNIZE (
|
|||
self.assertEqual(ast.this, "PROCEDURES")
|
||||
self.assertEqual(ast.args.get("scope_kind"), "ACCOUNT")
|
||||
|
||||
def test_show_stages(self):
|
||||
self.validate_identity("SHOW STAGES")
|
||||
self.validate_identity("SHOW STAGES LIKE 'foo' IN DATABASE db1")
|
||||
self.validate_identity("SHOW STAGES LIKE 'foo' IN SCHEMA db1.schema1")
|
||||
|
||||
ast = parse_one("SHOW STAGES IN ACCOUNT", read="snowflake")
|
||||
self.assertEqual(ast.this, "STAGES")
|
||||
self.assertEqual(ast.args.get("scope_kind"), "ACCOUNT")
|
||||
|
||||
def test_show_warehouses(self):
|
||||
self.validate_identity("SHOW WAREHOUSES")
|
||||
self.validate_identity("SHOW WAREHOUSES LIKE 'foo' WITH PRIVILEGES USAGE, MODIFY")
|
||||
|
|
|
@ -245,6 +245,7 @@ TBLPROPERTIES (
|
|||
"REFRESH TABLE t",
|
||||
)
|
||||
|
||||
self.validate_identity("IF(cond, foo AS bar, bla AS baz)")
|
||||
self.validate_identity("any_value(col, true)", "ANY_VALUE(col) IGNORE NULLS")
|
||||
self.validate_identity("first(col, true)", "FIRST(col) IGNORE NULLS")
|
||||
self.validate_identity("first_value(col, true)", "FIRST_VALUE(col) IGNORE NULLS")
|
||||
|
@ -307,6 +308,15 @@ TBLPROPERTIES (
|
|||
"SELECT STR_TO_MAP('a:1,b:2,c:3')",
|
||||
"SELECT STR_TO_MAP('a:1,b:2,c:3', ',', ':')",
|
||||
)
|
||||
|
||||
self.validate_all(
|
||||
"SELECT TO_JSON(STRUCT('blah' AS x)) AS y",
|
||||
write={
|
||||
"presto": "SELECT JSON_FORMAT(CAST(CAST(ROW('blah') AS ROW(x VARCHAR)) AS JSON)) AS y",
|
||||
"spark": "SELECT TO_JSON(STRUCT('blah' AS x)) AS y",
|
||||
"trino": "SELECT JSON_FORMAT(CAST(CAST(ROW('blah') AS ROW(x VARCHAR)) AS JSON)) AS y",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT TRY_ELEMENT_AT(ARRAY(1, 2, 3), 2)",
|
||||
read={
|
||||
|
|
|
@ -109,6 +109,10 @@ class TestSQLite(Validator):
|
|||
"SELECT * FROM station WHERE NOT city IS ''",
|
||||
)
|
||||
self.validate_identity("SELECT JSON_OBJECT('col1', 1, 'col2', '1')")
|
||||
self.validate_identity(
|
||||
'CREATE TABLE "foo t" ("foo t id" TEXT NOT NULL, PRIMARY KEY ("foo t id"))',
|
||||
'CREATE TABLE "foo t" ("foo t id" TEXT NOT NULL PRIMARY KEY)',
|
||||
)
|
||||
|
||||
def test_strftime(self):
|
||||
self.validate_identity("SELECT STRFTIME('%Y/%m/%d', 'now')")
|
||||
|
|
|
@ -17,6 +17,8 @@ class TestTSQL(Validator):
|
|||
# tsql allows .. which means use the default schema
|
||||
self.validate_identity("SELECT * FROM a..b")
|
||||
|
||||
self.validate_identity("GO").assert_is(exp.Command)
|
||||
self.validate_identity("SELECT go").selects[0].assert_is(exp.Column)
|
||||
self.validate_identity("CREATE view a.b.c", "CREATE VIEW b.c")
|
||||
self.validate_identity("DROP view a.b.c", "DROP VIEW b.c")
|
||||
self.validate_identity("ROUND(x, 1, 0)")
|
||||
|
@ -559,6 +561,14 @@ class TestTSQL(Validator):
|
|||
with self.assertRaises(ParseError, msg=f"When running '{query}'"):
|
||||
self.parse_one(query)
|
||||
|
||||
self.validate_all(
|
||||
"SELECT col FROM t OPTION(LABEL = 'foo')",
|
||||
write={
|
||||
"tsql": "SELECT col FROM t OPTION(LABEL = 'foo')",
|
||||
"databricks": UnsupportedError,
|
||||
},
|
||||
)
|
||||
|
||||
def test_types(self):
|
||||
self.validate_identity("CAST(x AS XML)")
|
||||
self.validate_identity("CAST(x AS UNIQUEIDENTIFIER)")
|
||||
|
|
18
tests/fixtures/optimizer/merge_subqueries.sql
vendored
18
tests/fixtures/optimizer/merge_subqueries.sql
vendored
|
@ -481,3 +481,21 @@ FROM (
|
|||
LEFT OUTER JOIN tbl AS ITBL ON OTBL.id = ITBL.id
|
||||
) AS ITBL;
|
||||
WITH tbl AS (SELECT 1 AS id) SELECT OTBL.id AS id FROM tbl AS OTBL LEFT OUTER JOIN tbl AS ITBL_2 ON OTBL.id = ITBL_2.id LEFT OUTER JOIN tbl AS ITBL_3 ON OTBL.id = ITBL_3.id LEFT OUTER JOIN tbl AS ITBL ON OTBL.id = ITBL.id;
|
||||
|
||||
# title: Inner query contains subquery with an alias that conflicts with outer query
|
||||
WITH i AS (
|
||||
SELECT
|
||||
a
|
||||
FROM (
|
||||
SELECT 1 a
|
||||
) AS conflict
|
||||
), j AS (
|
||||
SELECT 1 AS a
|
||||
)
|
||||
SELECT
|
||||
i.a,
|
||||
conflict.a
|
||||
FROM i
|
||||
LEFT JOIN j AS conflict
|
||||
ON i.a = conflict.a;
|
||||
WITH j AS (SELECT 1 AS a) SELECT conflict_2.a AS a, conflict.a AS a FROM (SELECT 1 AS a) AS conflict_2 LEFT JOIN j AS conflict ON conflict_2.a = conflict.a;
|
10
tests/fixtures/optimizer/optimizer.sql
vendored
10
tests/fixtures/optimizer/optimizer.sql
vendored
|
@ -760,10 +760,12 @@ SELECT
|
|||
`_q_0`.`first_half_sales` AS `first_half_sales`,
|
||||
`_q_0`.`second_half_sales` AS `second_half_sales`
|
||||
FROM `produce` AS `produce`
|
||||
UNPIVOT((`first_half_sales`, `second_half_sales`) FOR `semesters` IN (
|
||||
(`produce`.`q1`, `produce`.`q2`) AS 'semester_1',
|
||||
(`produce`.`q3`, `produce`.`q4`) AS 'semester_2'
|
||||
)) AS `_q_0`;
|
||||
UNPIVOT((`first_half_sales`, `second_half_sales`) FOR
|
||||
`semesters` IN (
|
||||
(`produce`.`q1`, `produce`.`q2`) AS 'semester_1',
|
||||
(`produce`.`q3`, `produce`.`q4`) AS 'semester_2'
|
||||
)
|
||||
) AS `_q_0`;
|
||||
|
||||
# title: quoting is preserved
|
||||
# dialect: snowflake
|
||||
|
|
82
tests/fixtures/optimizer/qualify_columns.sql
vendored
82
tests/fixtures/optimizer/qualify_columns.sql
vendored
|
@ -325,6 +325,65 @@ SELECT _q_0.a AS a FROM (SELECT x.a AS a FROM x AS x UNION SELECT x.a AS a FROM
|
|||
((select a from x where a < 1)) UNION ((select a from x where a > 2));
|
||||
((SELECT x.a AS a FROM x AS x WHERE x.a < 1)) UNION ((SELECT x.a AS a FROM x AS x WHERE x.a > 2));
|
||||
|
||||
|
||||
# dialect: bigquery
|
||||
# execute: false
|
||||
SELECT * FROM (SELECT 1 AS foo, 2 AS bar INNER UNION ALL BY NAME SELECT 3 AS bar, 4 AS baz);
|
||||
SELECT _q_0.bar AS bar FROM (SELECT 1 AS foo, 2 AS bar INNER UNION ALL BY NAME SELECT 3 AS bar, 4 AS baz) AS _q_0;
|
||||
|
||||
# dialect: bigquery
|
||||
# execute: false
|
||||
SELECT * FROM (SELECT 1 AS foo, 2 AS bar UNION ALL CORRESPONDING SELECT 3 AS bar, 4 AS baz);
|
||||
SELECT _q_0.bar AS bar FROM (SELECT 1 AS foo, 2 AS bar INNER UNION ALL BY NAME SELECT 3 AS bar, 4 AS baz) AS _q_0;
|
||||
|
||||
# dialect: bigquery
|
||||
# execute: false
|
||||
SELECT * FROM (SELECT 1 AS foo, 2 AS bar LEFT UNION ALL BY NAME SELECT 3 AS bar, 4 AS baz);
|
||||
SELECT _q_0.foo AS foo, _q_0.bar AS bar FROM (SELECT 1 AS foo, 2 AS bar LEFT UNION ALL BY NAME SELECT 3 AS bar, 4 AS baz) AS _q_0;
|
||||
|
||||
# dialect: bigquery
|
||||
# execute: false
|
||||
SELECT * FROM (SELECT 1 AS foo, 2 AS bar FULL UNION ALL BY NAME SELECT 3 AS bar, 4 AS baz);
|
||||
SELECT _q_0.foo AS foo, _q_0.bar AS bar, _q_0.baz AS baz FROM (SELECT 1 AS foo, 2 AS bar FULL UNION ALL BY NAME SELECT 3 AS bar, 4 AS baz) AS _q_0;
|
||||
|
||||
|
||||
# dialect: bigquery
|
||||
# execute: false
|
||||
SELECT * FROM (SELECT 1 AS foo, 2 AS bar LEFT UNION ALL CORRESPONDING SELECT 3 AS bar, 4 AS baz);
|
||||
SELECT _q_0.foo AS foo, _q_0.bar AS bar FROM (SELECT 1 AS foo, 2 AS bar LEFT UNION ALL BY NAME SELECT 3 AS bar, 4 AS baz) AS _q_0;
|
||||
|
||||
|
||||
# dialect: bigquery
|
||||
# execute: false
|
||||
SELECT * FROM (SELECT 1 AS foo, 2 AS bar FULL UNION ALL CORRESPONDING SELECT 3 AS bar, 4 AS baz);
|
||||
SELECT _q_0.foo AS foo, _q_0.bar AS bar, _q_0.baz AS baz FROM (SELECT 1 AS foo, 2 AS bar FULL UNION ALL BY NAME SELECT 3 AS bar, 4 AS baz) AS _q_0;
|
||||
|
||||
# dialect: bigquery
|
||||
# execute: false
|
||||
SELECT * FROM (SELECT 1 AS foo, 2 AS bar FULL UNION ALL CORRESPONDING BY (foo, bar) SELECT 3 AS bar, 4 AS baz);
|
||||
SELECT _q_0.foo AS foo, _q_0.bar AS bar FROM (SELECT 1 AS foo, 2 AS bar FULL UNION ALL BY NAME ON (foo, bar) SELECT 3 AS bar, 4 AS baz) AS _q_0;
|
||||
|
||||
|
||||
# dialect: bigquery
|
||||
# execute: false
|
||||
SELECT * FROM (SELECT 1 AS foo, 2 AS bar FULL UNION ALL BY NAME ON (foo, bar) SELECT 3 AS bar, 4 AS baz);
|
||||
SELECT _q_0.foo AS foo, _q_0.bar AS bar FROM (SELECT 1 AS foo, 2 AS bar FULL UNION ALL BY NAME ON (foo, bar) SELECT 3 AS bar, 4 AS baz) AS _q_0;
|
||||
|
||||
# dialect: bigquery
|
||||
# execute: false
|
||||
SELECT * FROM ((SELECT 1 AS foo, 2 AS bar LEFT UNION ALL BY NAME SELECT 3 AS bar, 4 AS baz) LEFT UNION ALL BY NAME ON (bar) SELECT 3 AS foo, 4 AS bar);
|
||||
SELECT _q_0.bar AS bar FROM ((SELECT 1 AS foo, 2 AS bar LEFT UNION ALL BY NAME SELECT 3 AS bar, 4 AS baz) LEFT UNION ALL BY NAME ON (bar) SELECT 3 AS foo, 4 AS bar) AS _q_0;
|
||||
|
||||
# dialect: bigquery
|
||||
# execute: false
|
||||
SELECT * FROM ((SELECT 1 AS foo, 2 AS bar LEFT UNION ALL BY NAME SELECT 3 AS bar, 4 AS baz) FULL UNION ALL BY NAME ON (foo, qux) SELECT 3 AS qux, 4 AS bar);
|
||||
SELECT _q_0.foo AS foo, _q_0.qux AS qux FROM ((SELECT 1 AS foo, 2 AS bar LEFT UNION ALL BY NAME SELECT 3 AS bar, 4 AS baz) FULL UNION ALL BY NAME ON (foo, qux) SELECT 3 AS qux, 4 AS bar) AS _q_0;
|
||||
|
||||
# dialect: bigquery
|
||||
# execute: false
|
||||
SELECT * FROM (((SELECT 1 AS foo, 2 AS bar LEFT UNION ALL BY NAME SELECT 3 AS bar, 4 AS baz) FULL UNION ALL BY NAME ON (foo, qux) SELECT 3 AS qux, 4 AS bar) INNER UNION ALL BY NAME ON (foo) SELECT 6 AS foo);
|
||||
SELECT _q_0.foo AS foo FROM (((SELECT 1 AS foo, 2 AS bar LEFT UNION ALL BY NAME SELECT 3 AS bar, 4 AS baz) FULL UNION ALL BY NAME ON (foo, qux) SELECT 3 AS qux, 4 AS bar) INNER UNION ALL BY NAME ON (foo) SELECT 6 AS foo) AS _q_0;
|
||||
|
||||
--------------------------------------
|
||||
-- Subqueries
|
||||
--------------------------------------
|
||||
|
@ -435,6 +494,29 @@ WITH tbl1 AS (SELECT STRUCT(1 AS col1, 2 AS col1) AS col) SELECT tbl1.col.* FROM
|
|||
SELECT * FROM READ_CSV('file.csv');
|
||||
SELECT * FROM READ_CSV('file.csv') AS _q_0;
|
||||
|
||||
# dialect: clickhouse
|
||||
# Title: Expand tuples in VALUES using the structure provided
|
||||
# execute: false
|
||||
SELECT * FROM VALUES ('person String, place String', ('Noah', 'Paris'));
|
||||
SELECT _q_0.person AS person, _q_0.place AS place FROM VALUES ('person String, place String', ('Noah', 'Paris')) AS _q_0(person, place);
|
||||
|
||||
# dialect: clickhouse
|
||||
# Title: Expand tuples in VALUES using the default naming scheme in CH
|
||||
# execute: false
|
||||
SELECT * FROM VALUES ((1, 1), (2, 2));
|
||||
SELECT _q_0.c1 AS c1, _q_0.c2 AS c2 FROM VALUES ((1, 1), (2, 2)) AS _q_0(c1, c2);
|
||||
|
||||
# dialect: clickhouse
|
||||
# Title: Expand fields in VALUES using the default naming scheme in CH
|
||||
# execute: false
|
||||
SELECT * FROM VALUES (1, 2, 3);
|
||||
SELECT _q_0.c1 AS c1 FROM VALUES ((1), (2), (3)) AS _q_0(c1);
|
||||
|
||||
# title: Expand PIVOT column combinations
|
||||
# dialect: duckdb
|
||||
WITH cities AS (SELECT * FROM (VALUES ('nl', 'amsterdam', 2000, 1005)) AS t(country, name, year, population)) SELECT * FROM cities PIVOT(SUM(population) AS total, COUNT(population) AS count FOR country IN ('nl', 'us') year IN (2000, 2010) name IN ('amsterdam', 'seattle'));
|
||||
WITH cities AS (SELECT t.country AS country, t.name AS name, t.year AS year, t.population AS population FROM (VALUES ('nl', 'amsterdam', 2000, 1005)) AS t(country, name, year, population)) SELECT _q_0.nl_2000_amsterdam_total AS nl_2000_amsterdam_total, _q_0.nl_2000_amsterdam_count AS nl_2000_amsterdam_count, _q_0.nl_2000_seattle_total AS nl_2000_seattle_total, _q_0.nl_2000_seattle_count AS nl_2000_seattle_count, _q_0.nl_2010_amsterdam_total AS nl_2010_amsterdam_total, _q_0.nl_2010_amsterdam_count AS nl_2010_amsterdam_count, _q_0.nl_2010_seattle_total AS nl_2010_seattle_total, _q_0.nl_2010_seattle_count AS nl_2010_seattle_count, _q_0.us_2000_amsterdam_total AS us_2000_amsterdam_total, _q_0.us_2000_amsterdam_count AS us_2000_amsterdam_count, _q_0.us_2000_seattle_total AS us_2000_seattle_total, _q_0.us_2000_seattle_count AS us_2000_seattle_count, _q_0.us_2010_amsterdam_total AS us_2010_amsterdam_total, _q_0.us_2010_amsterdam_count AS us_2010_amsterdam_count, _q_0.us_2010_seattle_total AS us_2010_seattle_total, _q_0.us_2010_seattle_count AS us_2010_seattle_count FROM cities AS cities PIVOT(SUM(population) AS total, COUNT(population) AS count FOR country IN ('nl', 'us') year IN (2000, 2010) name IN ('amsterdam', 'seattle')) AS _q_0;
|
||||
|
||||
--------------------------------------
|
||||
-- CTEs
|
||||
--------------------------------------
|
||||
|
|
|
@ -55,13 +55,10 @@ def simplify(expression, **kwargs):
|
|||
|
||||
|
||||
def annotate_functions(expression, **kwargs):
|
||||
from sqlglot.dialects import Dialect
|
||||
|
||||
dialect = kwargs.get("dialect")
|
||||
schema = kwargs.get("schema")
|
||||
|
||||
annotators = Dialect.get_or_raise(dialect).ANNOTATORS
|
||||
annotated = annotate_types(expression, annotators=annotators, schema=schema)
|
||||
annotated = annotate_types(expression, dialect=dialect, schema=schema)
|
||||
|
||||
return annotated.expressions[0]
|
||||
|
||||
|
|
|
@ -646,6 +646,27 @@ class TestParser(unittest.TestCase):
|
|||
) PIVOT (AVG("PrIcE"), MAX(quality) FOR partname IN ('prop' AS prop1, 'rudder'))
|
||||
"""
|
||||
|
||||
two_in_clauses_duckdb = """
|
||||
SELECT * FROM cities PIVOT (
|
||||
sum(population) AS total,
|
||||
count(population) AS count
|
||||
FOR
|
||||
year IN (2000, 2010)
|
||||
country IN ('NL', 'US')
|
||||
)
|
||||
"""
|
||||
|
||||
three_in_clauses_duckdb = """
|
||||
SELECT * FROM cities PIVOT (
|
||||
sum(population) AS total,
|
||||
count(population) AS count
|
||||
FOR
|
||||
year IN (2000, 2010)
|
||||
country IN ('NL', 'US')
|
||||
name IN ('Amsterdam', 'Seattle')
|
||||
)
|
||||
"""
|
||||
|
||||
query_to_column_names = {
|
||||
nothing_aliased: {
|
||||
"bigquery": ["prop", "rudder"],
|
||||
|
@ -707,13 +728,48 @@ class TestParser(unittest.TestCase):
|
|||
'"rudder_max(quality)"',
|
||||
],
|
||||
},
|
||||
two_in_clauses_duckdb: {
|
||||
"duckdb": [
|
||||
'"2000_NL_total"',
|
||||
'"2000_NL_count"',
|
||||
'"2000_US_total"',
|
||||
'"2000_US_count"',
|
||||
'"2010_NL_total"',
|
||||
'"2010_NL_count"',
|
||||
'"2010_US_total"',
|
||||
'"2010_US_count"',
|
||||
],
|
||||
},
|
||||
three_in_clauses_duckdb: {
|
||||
"duckdb": [
|
||||
'"2000_NL_Amsterdam_total"',
|
||||
'"2000_NL_Amsterdam_count"',
|
||||
'"2000_NL_Seattle_total"',
|
||||
'"2000_NL_Seattle_count"',
|
||||
'"2000_US_Amsterdam_total"',
|
||||
'"2000_US_Amsterdam_count"',
|
||||
'"2000_US_Seattle_total"',
|
||||
'"2000_US_Seattle_count"',
|
||||
'"2010_NL_Amsterdam_total"',
|
||||
'"2010_NL_Amsterdam_count"',
|
||||
'"2010_NL_Seattle_total"',
|
||||
'"2010_NL_Seattle_count"',
|
||||
'"2010_US_Amsterdam_total"',
|
||||
'"2010_US_Amsterdam_count"',
|
||||
'"2010_US_Seattle_total"',
|
||||
'"2010_US_Seattle_count"',
|
||||
],
|
||||
},
|
||||
}
|
||||
|
||||
for query, dialect_columns in query_to_column_names.items():
|
||||
for dialect, expected_columns in dialect_columns.items():
|
||||
expr = parse_one(query, read=dialect)
|
||||
columns = expr.args["from"].this.args["pivots"][0].args["columns"]
|
||||
self.assertEqual(expected_columns, [col.sql(dialect=dialect) for col in columns])
|
||||
with self.subTest(f"Testing query '{query}' for dialect {dialect}"):
|
||||
expr = parse_one(query, read=dialect)
|
||||
columns = expr.args["from"].this.args["pivots"][0].args["columns"]
|
||||
self.assertEqual(
|
||||
expected_columns, [col.sql(dialect=dialect) for col in columns]
|
||||
)
|
||||
|
||||
def test_parse_nested(self):
|
||||
def warn_over_threshold(query: str, max_threshold: float = 0.2):
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue