1
0
Fork 0

Adding upstream version 26.14.0.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-04-16 09:04:38 +02:00
parent 539faf8d40
commit dfe1cec38a
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
84 changed files with 63872 additions and 61909 deletions

View file

@ -154,6 +154,12 @@ class TestAthena(Validator):
write_sql='CREATE TABLE "foo" AS WITH "foo" AS (SELECT "a", "b" FROM "bar") SELECT * FROM "foo"',
)
# CTAS with Union should still hit the Trino engine and not Hive
self.validate_identity(
'CREATE TABLE `foo` AS WITH `foo` AS (SELECT "a", `b` FROM "bar") SELECT * FROM "foo" UNION SELECT * FROM "foo"',
write_sql='CREATE TABLE "foo" AS WITH "foo" AS (SELECT "a", "b" FROM "bar") SELECT * FROM "foo" UNION SELECT * FROM "foo"',
)
self.validate_identity("DESCRIBE foo.bar", write_sql="DESCRIBE `foo`.`bar`", identify=True)
def test_dml_quoting(self):

View file

@ -1404,11 +1404,13 @@ LANGUAGE js AS
)
self.validate_all(
"TO_JSON_STRING(x)",
read={"bigquery": "TO_JSON_STRING(x)"},
read={
"bigquery": "TO_JSON_STRING(x)",
},
write={
"bigquery": "TO_JSON_STRING(x)",
"duckdb": "CAST(TO_JSON(x) AS TEXT)",
"presto": "JSON_FORMAT(x)",
"presto": "JSON_FORMAT(CAST(x AS JSON))",
"spark": "TO_JSON(x)",
},
)
@ -1486,7 +1488,7 @@ WHERE
"snowflake": "SELECT LENGTH(foo)",
},
write={
"duckdb": "SELECT CASE TYPEOF(foo) WHEN 'VARCHAR' THEN LENGTH(CAST(foo AS TEXT)) WHEN 'BLOB' THEN OCTET_LENGTH(CAST(foo AS BLOB)) END",
"duckdb": "SELECT CASE TYPEOF(foo) WHEN 'BLOB' THEN OCTET_LENGTH(CAST(foo AS BLOB)) ELSE LENGTH(CAST(foo AS TEXT)) END",
"snowflake": "SELECT LENGTH(foo)",
"": "SELECT LENGTH(foo)",
},
@ -2382,3 +2384,43 @@ OPTIONS (
for select in annotated.selects:
self.assertEqual(select.type.sql("bigquery"), "TIMESTAMP")
def test_set_operations(self):
self.validate_identity("SELECT 1 AS foo INNER UNION ALL SELECT 3 AS foo, 4 AS bar")
for side in ("", " LEFT", " FULL"):
for kind in ("", " OUTER"):
for name in (
"",
" BY NAME",
" BY NAME ON (foo, bar)",
):
with self.subTest(f"Testing {side} {kind} {name} in test_set_operations"):
self.validate_identity(
f"SELECT 1 AS foo{side}{kind} UNION ALL{name} SELECT 3 AS foo, 4 AS bar",
)
self.validate_identity(
"SELECT 1 AS x UNION ALL CORRESPONDING SELECT 2 AS x",
"SELECT 1 AS x INNER UNION ALL BY NAME SELECT 2 AS x",
)
self.validate_identity(
"SELECT 1 AS x UNION ALL CORRESPONDING BY (foo, bar) SELECT 2 AS x",
"SELECT 1 AS x INNER UNION ALL BY NAME ON (foo, bar) SELECT 2 AS x",
)
self.validate_identity(
"SELECT 1 AS x LEFT UNION ALL CORRESPONDING SELECT 2 AS x",
"SELECT 1 AS x LEFT UNION ALL BY NAME SELECT 2 AS x",
)
self.validate_identity(
"SELECT 1 AS x UNION ALL STRICT CORRESPONDING SELECT 2 AS x",
"SELECT 1 AS x UNION ALL BY NAME SELECT 2 AS x",
)
self.validate_identity(
"SELECT 1 AS x UNION ALL STRICT CORRESPONDING BY (foo, bar) SELECT 2 AS x",
"SELECT 1 AS x UNION ALL BY NAME ON (foo, bar) SELECT 2 AS x",
)

View file

@ -33,6 +33,7 @@ class TestClickhouse(Validator):
self.assertEqual(expr.sql(dialect="clickhouse"), "COUNT(x)")
self.assertIsNone(expr._meta)
self.validate_identity('SELECT DISTINCT ON ("id") * FROM t')
self.validate_identity("SELECT 1 OR (1 = 2)")
self.validate_identity("SELECT 1 AND (1 = 2)")
self.validate_identity("SELECT json.a.:Int64")
@ -105,8 +106,10 @@ class TestClickhouse(Validator):
self.validate_identity("SELECT * FROM table LIMIT 1 BY a, b")
self.validate_identity("SELECT * FROM table LIMIT 2 OFFSET 1 BY a, b")
self.validate_identity("TRUNCATE TABLE t1 ON CLUSTER test_cluster")
self.validate_identity("TRUNCATE TABLE t1 ON CLUSTER '{cluster}'")
self.validate_identity("TRUNCATE DATABASE db")
self.validate_identity("TRUNCATE DATABASE db ON CLUSTER test_cluster")
self.validate_identity("TRUNCATE DATABASE db ON CLUSTER '{cluster}'")
self.validate_identity(
"SELECT DATE_BIN(toDateTime('2023-01-01 14:45:00'), INTERVAL '1' MINUTE, toDateTime('2023-01-01 14:35:30'), 'UTC')",
)
@ -155,12 +158,21 @@ class TestClickhouse(Validator):
self.validate_identity(
"CREATE TABLE test ON CLUSTER default (id UInt8) ENGINE=AggregatingMergeTree() ORDER BY tuple()"
)
self.validate_identity(
"CREATE TABLE test ON CLUSTER '{cluster}' (id UInt8) ENGINE=AggregatingMergeTree() ORDER BY tuple()"
)
self.validate_identity(
"CREATE MATERIALIZED VIEW test_view ON CLUSTER cl1 (id UInt8) ENGINE=AggregatingMergeTree() ORDER BY tuple() AS SELECT * FROM test_data"
)
self.validate_identity(
"CREATE MATERIALIZED VIEW test_view ON CLUSTER '{cluster}' (id UInt8) ENGINE=AggregatingMergeTree() ORDER BY tuple() AS SELECT * FROM test_data"
)
self.validate_identity(
"CREATE MATERIALIZED VIEW test_view ON CLUSTER cl1 TO table1 AS SELECT * FROM test_data"
)
self.validate_identity(
"CREATE MATERIALIZED VIEW test_view ON CLUSTER '{cluster}' TO table1 AS SELECT * FROM test_data"
)
self.validate_identity(
"CREATE MATERIALIZED VIEW test_view TO db.table1 (id UInt8) AS SELECT * FROM test_data"
)
@ -184,7 +196,7 @@ class TestClickhouse(Validator):
)
self.validate_identity(
"INSERT INTO tab VALUES ({'key1': 1, 'key2': 10}), ({'key1': 2, 'key2': 20}), ({'key1': 3, 'key2': 30})",
"INSERT INTO tab VALUES (map('key1', 1, 'key2', 10)), (map('key1', 2, 'key2', 20)), (map('key1', 3, 'key2', 30))",
"INSERT INTO tab VALUES ((map('key1', 1, 'key2', 10))), ((map('key1', 2, 'key2', 20))), ((map('key1', 3, 'key2', 30)))",
)
self.validate_identity(
"SELECT (toUInt8('1') + toUInt8('2')) IS NOT NULL",
@ -507,11 +519,12 @@ class TestClickhouse(Validator):
"INSERT INTO FUNCTION s3('url', 'CSV', 'name String, value UInt32', 'gzip') SELECT name, value FROM existing_table"
)
self.validate_identity(
"INSERT INTO FUNCTION remote('localhost', default.simple_table) VALUES (100, 'inserted via remote()')"
"INSERT INTO FUNCTION remote('localhost', default.simple_table) VALUES (100, 'inserted via remote()')",
"INSERT INTO FUNCTION remote('localhost', default.simple_table) VALUES ((100), ('inserted via remote()'))",
)
self.validate_identity(
"""INSERT INTO TABLE FUNCTION hdfs('hdfs://hdfs1:9000/test', 'TSV', 'name String, column2 UInt32, column3 UInt32') VALUES ('test', 1, 2)""",
"""INSERT INTO FUNCTION hdfs('hdfs://hdfs1:9000/test', 'TSV', 'name String, column2 UInt32, column3 UInt32') VALUES ('test', 1, 2)""",
"""INSERT INTO FUNCTION hdfs('hdfs://hdfs1:9000/test', 'TSV', 'name String, column2 UInt32, column3 UInt32') VALUES (('test'), (1), (2))""",
)
self.validate_identity("SELECT 1 FORMAT TabSeparated")
@ -546,22 +559,23 @@ class TestClickhouse(Validator):
)
self.validate_identity("ALTER TABLE visits REPLACE PARTITION ID '201901' FROM visits_tmp")
self.validate_identity("ALTER TABLE visits ON CLUSTER test_cluster DROP COLUMN col1")
self.validate_identity("ALTER TABLE visits ON CLUSTER '{cluster}' DROP COLUMN col1")
self.validate_identity("DELETE FROM tbl ON CLUSTER test_cluster WHERE date = '2019-01-01'")
self.validate_identity("DELETE FROM tbl ON CLUSTER '{cluster}' WHERE date = '2019-01-01'")
self.assertIsInstance(
parse_one("Tuple(select Int64)", into=exp.DataType, read="clickhouse"), exp.DataType
)
self.validate_identity("INSERT INTO t (col1, col2) VALUES ('abcd', 1234)")
self.validate_identity(
"INSERT INTO t (col1, col2) VALUES ('abcd', 1234)",
"INSERT INTO t (col1, col2) VALUES (('abcd'), (1234))",
)
self.validate_all(
"INSERT INTO t (col1, col2) VALUES ('abcd', 1234)",
read={
# looks like values table function, but should be parsed as VALUES block
"clickhouse": "INSERT INTO t (col1, col2) values('abcd', 1234)"
},
write={
"clickhouse": "INSERT INTO t (col1, col2) VALUES ('abcd', 1234)",
"postgres": "INSERT INTO t (col1, col2) VALUES ('abcd', 1234)",
"clickhouse": "INSERT INTO t (col1, col2) VALUES (('abcd'), (1234))",
"postgres": "INSERT INTO t (col1, col2) VALUES (('abcd'), (1234))",
},
)
self.validate_identity("SELECT TRIM(TRAILING ')' FROM '( Hello, world! )')")
@ -591,6 +605,9 @@ class TestClickhouse(Validator):
self.validate_identity("SELECT arrayConcat([1, 2], [3, 4])")
def test_clickhouse_values(self):
ast = self.parse_one("SELECT * FROM VALUES (1, 2, 3)")
self.assertEqual(len(list(ast.find_all(exp.Tuple))), 4)
values = exp.select("*").from_(
exp.values([exp.tuple_(1, 2, 3)], alias="subq", columns=["a", "b", "c"])
)
@ -599,10 +616,18 @@ class TestClickhouse(Validator):
"SELECT * FROM (SELECT 1 AS a, 2 AS b, 3 AS c) AS subq",
)
self.validate_identity("INSERT INTO t (col1, col2) VALUES ('abcd', 1234)")
self.validate_identity("SELECT * FROM VALUES ((1, 1), (2, 1), (3, 1), (4, 1))")
self.validate_identity(
"SELECT type, id FROM VALUES ('id Int, type Int', (1, 1), (2, 1), (3, 1), (4, 1))"
)
self.validate_identity(
"INSERT INTO t (col1, col2) VALUES ('abcd', 1234)",
"INSERT INTO t (col1, col2) VALUES (('abcd'), (1234))",
)
self.validate_identity(
"INSERT INTO t (col1, col2) FORMAT Values('abcd', 1234)",
"INSERT INTO t (col1, col2) VALUES ('abcd', 1234)",
"INSERT INTO t (col1, col2) VALUES (('abcd'), (1234))",
)
self.validate_all(
@ -1171,6 +1196,7 @@ LIFETIME(MIN 0 MAX 0)""",
for creatable in ("DATABASE", "TABLE", "VIEW", "DICTIONARY", "FUNCTION"):
with self.subTest(f"Test DROP {creatable} ON CLUSTER"):
self.validate_identity(f"DROP {creatable} test ON CLUSTER test_cluster")
self.validate_identity(f"DROP {creatable} test ON CLUSTER '{{cluster}}'")
def test_datetime_funcs(self):
# Each datetime func has an alias that is roundtripped to the original name e.g. (DATE_SUB, DATESUB) -> DATE_SUB

View file

@ -68,6 +68,24 @@ class TestDatabricks(Validator):
"FROM_UTC_TIMESTAMP(CAST(x AS TIMESTAMP), tz)",
)
self.validate_all(
"SELECT c1:item[1].price",
read={
"spark": "SELECT GET_JSON_OBJECT(c1, '$.item[1].price')",
},
write={
"databricks": "SELECT c1:item[1].price",
"spark": "SELECT GET_JSON_OBJECT(c1, '$.item[1].price')",
},
)
self.validate_all(
"SELECT GET_JSON_OBJECT(c1, '$.item[1].price')",
write={
"databricks": "SELECT c1:item[1].price",
"spark": "SELECT GET_JSON_OBJECT(c1, '$.item[1].price')",
},
)
self.validate_all(
"CREATE TABLE foo (x INT GENERATED ALWAYS AS (YEAR(y)))",
write={

View file

@ -400,6 +400,9 @@ class TestDuckDB(Validator):
self.validate_identity(
"SELECT * FROM (PIVOT Cities ON Year USING SUM(Population) GROUP BY Country) AS pivot_alias"
)
self.validate_identity(
"SELECT * FROM cities PIVOT(SUM(population) FOR year IN (2000, 2010, 2020) GROUP BY country)"
)
self.validate_identity(
# QUALIFY comes after WINDOW
"SELECT schema_name, function_name, ROW_NUMBER() OVER my_window AS function_rank FROM DUCKDB_FUNCTIONS() WINDOW my_window AS (PARTITION BY schema_name ORDER BY function_name) QUALIFY ROW_NUMBER() OVER my_window < 3"
@ -1595,3 +1598,7 @@ class TestDuckDB(Validator):
"SELECT (@-1) + 1",
"SELECT (ABS(-1)) + 1",
)
def test_show_tables(self):
self.validate_identity("SHOW TABLES").assert_is(exp.Show)
self.validate_identity("SHOW ALL TABLES").assert_is(exp.Show)

View file

@ -76,6 +76,9 @@ class TestPostgres(Validator):
self.validate_identity("SELECT CURRENT_SCHEMA")
self.validate_identity("SELECT CURRENT_USER")
self.validate_identity("SELECT * FROM ONLY t1")
self.validate_identity(
"SELECT * FROM test_data, LATERAL JSONB_ARRAY_ELEMENTS(data) WITH ORDINALITY AS elem(value, ordinality)"
)
self.validate_identity(
"SELECT id, name FROM xml_data AS t, XMLTABLE('/root/user' PASSING t.xml COLUMNS id INT PATH '@id', name TEXT PATH 'name/text()') AS x"
)

View file

@ -1021,9 +1021,6 @@ class TestPresto(Validator):
)
self.validate_all(
"JSON_FORMAT(x)",
read={
"spark": "TO_JSON(x)",
},
write={
"bigquery": "TO_JSON_STRING(x)",
"duckdb": "CAST(TO_JSON(x) AS TEXT)",

View file

@ -1,6 +1,6 @@
from unittest import mock
from sqlglot import UnsupportedError, exp, parse_one
from sqlglot import UnsupportedError, exp, parse_one, ParseError
from sqlglot.optimizer.normalize_identifiers import normalize_identifiers
from sqlglot.optimizer.qualify_columns import quote_identifiers
from tests.dialects.test_dialect import Validator
@ -1481,9 +1481,30 @@ class TestSnowflake(Validator):
self.validate_identity("CREATE TAG cost_center ALLOWED_VALUES 'a', 'b'")
self.validate_identity("CREATE WAREHOUSE x").this.assert_is(exp.Identifier)
self.validate_identity("CREATE STREAMLIT x").this.assert_is(exp.Identifier)
self.validate_identity(
"CREATE TEMPORARY STAGE stage1 FILE_FORMAT=(TYPE=PARQUET)"
).this.assert_is(exp.Table)
self.validate_identity(
"CREATE STAGE stage1 FILE_FORMAT='format1'",
"CREATE STAGE stage1 FILE_FORMAT=(FORMAT_NAME='format1')",
)
self.validate_identity("CREATE STAGE stage1 FILE_FORMAT=(FORMAT_NAME=stage1.format1)")
self.validate_identity("CREATE STAGE stage1 FILE_FORMAT=(FORMAT_NAME='stage1.format1')")
self.validate_identity(
"CREATE STAGE stage1 FILE_FORMAT=schema1.format1",
"CREATE STAGE stage1 FILE_FORMAT=(FORMAT_NAME=schema1.format1)",
)
with self.assertRaises(ParseError):
self.parse_one("CREATE STAGE stage1 FILE_FORMAT=123", dialect="snowflake")
self.validate_identity(
"CREATE STAGE s1 URL='s3://bucket-123' FILE_FORMAT=(TYPE='JSON') CREDENTIALS=(aws_key_id='test' aws_secret_key='test')"
)
self.validate_identity(
"CREATE OR REPLACE TAG IF NOT EXISTS cost_center COMMENT='cost_center tag'"
).this.assert_is(exp.Identifier)
self.validate_identity(
"CREATE TEMPORARY FILE FORMAT fileformat1 TYPE=PARQUET COMPRESSION=auto"
).this.assert_is(exp.Table)
self.validate_identity(
"CREATE DYNAMIC TABLE product (pre_tax_profit, taxes, after_tax_profit) TARGET_LAG='20 minutes' WAREHOUSE=mywh AS SELECT revenue - cost, (revenue - cost) * tax_rate, (revenue - cost) * (1.0 - tax_rate) FROM staging_table"
)
@ -1499,9 +1520,6 @@ class TestSnowflake(Validator):
self.validate_identity(
"CREATE TABLE orders_clone_restore CLONE orders BEFORE (STATEMENT => '8e5d0ca9-005e-44e6-b858-a8f5b37c5726')"
)
self.validate_identity(
"CREATE TABLE a (x DATE, y BIGINT) PARTITION BY (x) integration='q' auto_refresh=TRUE file_format=(type = parquet)"
)
self.validate_identity(
"CREATE SCHEMA mytestschema_clone_restore CLONE testschema BEFORE (TIMESTAMP => TO_TIMESTAMP(40 * 365 * 86400))"
)
@ -1544,8 +1562,8 @@ class TestSnowflake(Validator):
partition by (col1,col2,col3)
location=@s2/logs/
partition_type = user_specified
file_format = (type = parquet)""",
"CREATE EXTERNAL TABLE et2 (col1 DATE AS (CAST(GET_PATH(PARSE_JSON(metadata$external_table_partition), 'COL1') AS DATE)), col2 VARCHAR AS (CAST(GET_PATH(PARSE_JSON(metadata$external_table_partition), 'COL2') AS VARCHAR)), col3 DECIMAL(38, 0) AS (CAST(GET_PATH(PARSE_JSON(metadata$external_table_partition), 'COL3') AS DECIMAL(38, 0)))) LOCATION @s2/logs/ PARTITION BY (col1, col2, col3) partition_type=user_specified file_format=(type = parquet)",
file_format = (type = parquet compression = gzip binary_as_text = false)""",
"CREATE EXTERNAL TABLE et2 (col1 DATE AS (CAST(GET_PATH(PARSE_JSON(metadata$external_table_partition), 'COL1') AS DATE)), col2 VARCHAR AS (CAST(GET_PATH(PARSE_JSON(metadata$external_table_partition), 'COL2') AS VARCHAR)), col3 DECIMAL(38, 0) AS (CAST(GET_PATH(PARSE_JSON(metadata$external_table_partition), 'COL3') AS DECIMAL(38, 0)))) PARTITION BY (col1, col2, col3) LOCATION=@s2/logs/ partition_type=user_specified FILE_FORMAT=(type=parquet compression=gzip binary_as_text=FALSE)",
)
self.validate_all(
@ -2092,6 +2110,15 @@ MATCH_RECOGNIZE (
self.assertEqual(ast.this, "DATABASES")
self.assertEqual(ast.args.get("scope_kind"), "ACCOUNT")
def test_show_file_formats(self):
self.validate_identity("SHOW FILE FORMATS")
self.validate_identity("SHOW FILE FORMATS LIKE 'foo' IN DATABASE db1")
self.validate_identity("SHOW FILE FORMATS LIKE 'foo' IN SCHEMA db1.schema1")
ast = parse_one("SHOW FILE FORMATS IN ACCOUNT", read="snowflake")
self.assertEqual(ast.this, "FILE FORMATS")
self.assertEqual(ast.args.get("scope_kind"), "ACCOUNT")
def test_show_functions(self):
self.validate_identity("SHOW FUNCTIONS")
self.validate_identity("SHOW FUNCTIONS LIKE 'foo' IN CLASS bla")
@ -2109,6 +2136,15 @@ MATCH_RECOGNIZE (
self.assertEqual(ast.this, "PROCEDURES")
self.assertEqual(ast.args.get("scope_kind"), "ACCOUNT")
def test_show_stages(self):
self.validate_identity("SHOW STAGES")
self.validate_identity("SHOW STAGES LIKE 'foo' IN DATABASE db1")
self.validate_identity("SHOW STAGES LIKE 'foo' IN SCHEMA db1.schema1")
ast = parse_one("SHOW STAGES IN ACCOUNT", read="snowflake")
self.assertEqual(ast.this, "STAGES")
self.assertEqual(ast.args.get("scope_kind"), "ACCOUNT")
def test_show_warehouses(self):
self.validate_identity("SHOW WAREHOUSES")
self.validate_identity("SHOW WAREHOUSES LIKE 'foo' WITH PRIVILEGES USAGE, MODIFY")

View file

@ -245,6 +245,7 @@ TBLPROPERTIES (
"REFRESH TABLE t",
)
self.validate_identity("IF(cond, foo AS bar, bla AS baz)")
self.validate_identity("any_value(col, true)", "ANY_VALUE(col) IGNORE NULLS")
self.validate_identity("first(col, true)", "FIRST(col) IGNORE NULLS")
self.validate_identity("first_value(col, true)", "FIRST_VALUE(col) IGNORE NULLS")
@ -307,6 +308,15 @@ TBLPROPERTIES (
"SELECT STR_TO_MAP('a:1,b:2,c:3')",
"SELECT STR_TO_MAP('a:1,b:2,c:3', ',', ':')",
)
self.validate_all(
"SELECT TO_JSON(STRUCT('blah' AS x)) AS y",
write={
"presto": "SELECT JSON_FORMAT(CAST(CAST(ROW('blah') AS ROW(x VARCHAR)) AS JSON)) AS y",
"spark": "SELECT TO_JSON(STRUCT('blah' AS x)) AS y",
"trino": "SELECT JSON_FORMAT(CAST(CAST(ROW('blah') AS ROW(x VARCHAR)) AS JSON)) AS y",
},
)
self.validate_all(
"SELECT TRY_ELEMENT_AT(ARRAY(1, 2, 3), 2)",
read={

View file

@ -109,6 +109,10 @@ class TestSQLite(Validator):
"SELECT * FROM station WHERE NOT city IS ''",
)
self.validate_identity("SELECT JSON_OBJECT('col1', 1, 'col2', '1')")
self.validate_identity(
'CREATE TABLE "foo t" ("foo t id" TEXT NOT NULL, PRIMARY KEY ("foo t id"))',
'CREATE TABLE "foo t" ("foo t id" TEXT NOT NULL PRIMARY KEY)',
)
def test_strftime(self):
self.validate_identity("SELECT STRFTIME('%Y/%m/%d', 'now')")

View file

@ -17,6 +17,8 @@ class TestTSQL(Validator):
# tsql allows .. which means use the default schema
self.validate_identity("SELECT * FROM a..b")
self.validate_identity("GO").assert_is(exp.Command)
self.validate_identity("SELECT go").selects[0].assert_is(exp.Column)
self.validate_identity("CREATE view a.b.c", "CREATE VIEW b.c")
self.validate_identity("DROP view a.b.c", "DROP VIEW b.c")
self.validate_identity("ROUND(x, 1, 0)")
@ -559,6 +561,14 @@ class TestTSQL(Validator):
with self.assertRaises(ParseError, msg=f"When running '{query}'"):
self.parse_one(query)
self.validate_all(
"SELECT col FROM t OPTION(LABEL = 'foo')",
write={
"tsql": "SELECT col FROM t OPTION(LABEL = 'foo')",
"databricks": UnsupportedError,
},
)
def test_types(self):
self.validate_identity("CAST(x AS XML)")
self.validate_identity("CAST(x AS UNIQUEIDENTIFIER)")