1
0
Fork 0

Adding upstream version 26.25.3.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-06-07 04:46:28 +02:00
parent bc7749846c
commit d9e621c994
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
83 changed files with 67317 additions and 67680 deletions

View file

@ -1234,7 +1234,7 @@ LANGUAGE js AS
"bigquery": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a INT64, struct_col_b STRING>)",
"duckdb": "CREATE TABLE db.example_table (col_a STRUCT(struct_col_a INT, struct_col_b TEXT))",
"presto": "CREATE TABLE db.example_table (col_a ROW(struct_col_a INTEGER, struct_col_b VARCHAR))",
"hive": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a INT, struct_col_b STRING>)",
"hive": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a: INT, struct_col_b: STRING>)",
"spark": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a: INT, struct_col_b: STRING>)",
},
)
@ -1244,7 +1244,7 @@ LANGUAGE js AS
"bigquery": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a INT64, struct_col_b STRUCT<nested_col_a STRING, nested_col_b STRING>>)",
"duckdb": "CREATE TABLE db.example_table (col_a STRUCT(struct_col_a BIGINT, struct_col_b STRUCT(nested_col_a TEXT, nested_col_b TEXT)))",
"presto": "CREATE TABLE db.example_table (col_a ROW(struct_col_a BIGINT, struct_col_b ROW(nested_col_a VARCHAR, nested_col_b VARCHAR)))",
"hive": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a BIGINT, struct_col_b STRUCT<nested_col_a STRING, nested_col_b STRING>>)",
"hive": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a: BIGINT, struct_col_b: STRUCT<nested_col_a: STRING, nested_col_b: STRING>>)",
"spark": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a: BIGINT, struct_col_b: STRUCT<nested_col_a: STRING, nested_col_b: STRING>>)",
},
)
@ -2616,3 +2616,14 @@ OPTIONS (
"snowflake": "WITH t1 AS (SELECT (SELECT ARRAY_AGG(OBJECT_CONSTRUCT('alias_x1', x1, 'x2', x2 /* test */)) FROM t2 WHERE x2 = 4) AS array_col) SELECT array_col[0].alias_x1, array_col[0].x2 FROM t1",
},
)
def test_avoid_generating_nested_comment(self):
sql = """
select
id,
foo,
-- bar, /* the thing */
from facts
"""
expected = "SELECT\n id,\n foo\n/* bar, /* the thing * / */\nFROM facts"
self.assertEqual(self.parse_one(sql).sql("bigquery", pretty=True), expected)

View file

@ -2191,6 +2191,21 @@ class TestDialect(Validator):
"bigquery": "MOD(a, b + 1)",
},
)
self.validate_all(
"ARRAY_REMOVE(the_array, target)",
write={
"": "ARRAY_REMOVE(the_array, target)",
"clickhouse": "arrayFilter(_u -> _u <> target, the_array)",
"duckdb": "LIST_FILTER(the_array, _u -> _u <> target)",
"bigquery": "ARRAY(SELECT _u FROM UNNEST(the_array) AS _u WHERE _u <> target)",
"hive": "ARRAY_REMOVE(the_array, target)",
"postgres": "ARRAY_REMOVE(the_array, target)",
"presto": "ARRAY_REMOVE(the_array, target)",
"starrocks": "ARRAY_REMOVE(the_array, target)",
"databricks": "ARRAY_REMOVE(the_array, target)",
"snowflake": "ARRAY_REMOVE(the_array, target)",
},
)
def test_typeddiv(self):
typed_div = exp.Div(this=exp.column("a"), expression=exp.column("b"), typed=True)
@ -3453,3 +3468,63 @@ FROM subquery2""",
parse_one("SELECT 0xCC", read=read_dialect).sql(other_integer_dialects),
"SELECT 0xCC",
)
def test_pipe_syntax(self):
self.validate_identity("FROM x", "SELECT * FROM x")
self.validate_identity("FROM x |> SELECT x1, x2", "SELECT x1, x2 FROM (SELECT * FROM x)")
self.validate_identity(
"FROM x |> SELECT x1 as c1, x2 as c2",
"SELECT x1 AS c1, x2 AS c2 FROM (SELECT * FROM x)",
)
self.validate_identity(
"FROM x |> SELECT x1 + 1 as x1_a, x2 - 1 as x2_a |> WHERE x1_a > 1",
"SELECT x1 + 1 AS x1_a, x2 - 1 AS x2_a FROM (SELECT * FROM x) WHERE x1_a > 1",
)
self.validate_identity(
"FROM x |> SELECT x1 + 1 as x1_a, x2 - 1 as x2_a |> WHERE x1_a > 1 |> SELECT x2_a",
"SELECT x2_a FROM (SELECT x1 + 1 AS x1_a, x2 - 1 AS x2_a FROM (SELECT * FROM x) WHERE x1_a > 1)",
)
self.validate_identity(
"FROM x |> WHERE x1 > 0 OR x2 > 0 |> WHERE x3 > 1 AND x4 > 1 |> SELECT x1, x4",
"SELECT x1, x4 FROM (SELECT * FROM x WHERE (x1 > 0 OR x2 > 0) AND (x3 > 1 AND x4 > 1))",
)
self.validate_identity(
"FROM x |> WHERE x1 > 1 |> WHERE x2 > 2 |> SELECT x1 as gt1, x2 as gt2",
"SELECT x1 AS gt1, x2 AS gt2 FROM (SELECT * FROM x WHERE x1 > 1 AND x2 > 2)",
)
self.validate_identity(
"FROM x |> WHERE x1 > 1 AND x2 > 2 |> SELECT x1 as gt1, x2 as gt2 |> SELECT gt1 * 2 + gt2 * 2 AS gt2_2",
"SELECT gt1 * 2 + gt2 * 2 AS gt2_2 FROM (SELECT x1 AS gt1, x2 AS gt2 FROM (SELECT * FROM x WHERE x1 > 1 AND x2 > 2))",
)
self.validate_identity("FROM x |> ORDER BY x1", "SELECT * FROM x ORDER BY x1")
self.validate_identity(
"FROM x |> ORDER BY x1 |> ORDER BY x2", "SELECT * FROM x ORDER BY x1, x2"
)
self.validate_identity(
"FROM x |> ORDER BY x1 |> WHERE x1 > 0 OR x1 != 1 |> ORDER BY x2 |> WHERE x2 > 0 AND x2 != 1 |> SELECT x1, x2",
"SELECT x1, x2 FROM (SELECT * FROM x WHERE (x1 > 0 OR x1 <> 1) AND (x2 > 0 AND x2 <> 1) ORDER BY x1, x2)",
)
self.validate_identity(
"FROM x |> ORDER BY x1 |> WHERE x1 > 0 |> SELECT x1",
"SELECT x1 FROM (SELECT * FROM x WHERE x1 > 0 ORDER BY x1)",
)
self.validate_identity(
"FROM x |> WHERE x1 > 0 |> SELECT x1 |> ORDER BY x1",
"SELECT x1 FROM (SELECT * FROM x WHERE x1 > 0) ORDER BY x1",
)
self.validate_identity(
"FROM x |> SELECT x1, x2, x3 |> ORDER BY x1 DESC NULLS FIRST, x2 ASC NULLS LAST, x3",
"SELECT x1, x2, x3 FROM (SELECT * FROM x) ORDER BY x1 DESC NULLS FIRST, x2 ASC NULLS LAST, x3",
)
for option in ("LIMIT 1", "OFFSET 2", "LIMIT 1 OFFSET 2"):
with self.subTest(f"Testing pipe syntax LIMIT and OFFSET option: {option}"):
self.validate_identity(f"FROM x |> {option}", f"SELECT * FROM x {option}")
self.validate_identity(f"FROM x |> {option}", f"SELECT * FROM x {option}")
self.validate_identity(
f"FROM x |> {option} |> SELECT x1, x2 |> WHERE x1 > 0 |> WHERE x2 > 0 |> ORDER BY x1, x2 ",
f"SELECT x1, x2 FROM (SELECT * FROM x {option}) WHERE x1 > 0 AND x2 > 0 ORDER BY x1, x2",
)
self.validate_identity(
f"FROM x |> SELECT x1, x2 |> WHERE x1 > 0 |> WHERE x2 > 0 |> ORDER BY x1, x2 |> {option}",
f"SELECT x1, x2 FROM (SELECT * FROM x) WHERE x1 > 0 AND x2 > 0 ORDER BY x1, x2 {option}",
)

View file

@ -568,6 +568,9 @@ class TestDuckDB(Validator):
)
self.validate_all(
"STRING_TO_ARRAY(x, 'a')",
read={
"snowflake": "STRTOK_TO_ARRAY(x, 'a')",
},
write={
"duckdb": "STR_SPLIT(x, 'a')",
"presto": "SPLIT(x, 'a')",

View file

@ -184,6 +184,28 @@ class TestHive(Validator):
self.validate_identity(
"ALTER VIEW v1 UNSET TBLPROPERTIES ('tblp1', 'tblp2')", check_command_warning=True
)
self.validate_identity("CREATE TABLE foo (col STRUCT<struct_col_a: VARCHAR((50))>)")
self.validate_all(
"CREATE TABLE db.example_table (col_a struct<struct_col_a:int, struct_col_b:string>)",
write={
"duckdb": "CREATE TABLE db.example_table (col_a STRUCT(struct_col_a INT, struct_col_b TEXT))",
"presto": "CREATE TABLE db.example_table (col_a ROW(struct_col_a INTEGER, struct_col_b VARCHAR))",
"hive": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a: INT, struct_col_b: STRING>)",
"spark": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a: INT, struct_col_b: STRING>)",
},
)
self.validate_all(
"CREATE TABLE db.example_table (col_a struct<struct_col_a:int, struct_col_b:struct<nested_col_a:string, nested_col_b:string>>)",
write={
"bigquery": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a INT64, struct_col_b STRUCT<nested_col_a STRING, nested_col_b STRING>>)",
"duckdb": "CREATE TABLE db.example_table (col_a STRUCT(struct_col_a INT, struct_col_b STRUCT(nested_col_a TEXT, nested_col_b TEXT)))",
"presto": "CREATE TABLE db.example_table (col_a ROW(struct_col_a INTEGER, struct_col_b ROW(nested_col_a VARCHAR, nested_col_b VARCHAR)))",
"hive": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a: INT, struct_col_b: STRUCT<nested_col_a: STRING, nested_col_b: STRING>>)",
"spark": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a: INT, struct_col_b: STRUCT<nested_col_a: STRING, nested_col_b: STRING>>)",
},
)
def test_lateral_view(self):
self.validate_all(

View file

@ -111,7 +111,7 @@ class TestMySQL(Validator):
)
self.validate_identity(
"CREATE TABLE test (ts TIMESTAMP, ts_tz TIMESTAMPTZ, ts_ltz TIMESTAMPLTZ)",
"CREATE TABLE test (ts DATETIME, ts_tz TIMESTAMP, ts_ltz TIMESTAMP)",
"CREATE TABLE test (ts TIMESTAMP, ts_tz TIMESTAMP, ts_ltz TIMESTAMP)",
)
self.validate_identity(
"ALTER TABLE test_table ALTER COLUMN test_column SET DATA TYPE LONGTEXT",
@ -298,7 +298,7 @@ class TestMySQL(Validator):
)
self.validate_identity(
"CAST(x AS TIMESTAMP)",
"CAST(x AS DATETIME)",
"TIMESTAMP(x)",
)
self.validate_identity(
"CAST(x AS TIMESTAMPTZ)",

View file

@ -8,19 +8,6 @@ class TestPostgres(Validator):
dialect = "postgres"
def test_postgres(self):
self.validate_all(
"x ? y",
write={
"": "JSONB_CONTAINS(x, y)",
"postgres": "x ? y",
},
)
self.validate_identity("SHA384(x)")
self.validate_identity("1.x", "1. AS x")
self.validate_identity("|/ x", "SQRT(x)")
self.validate_identity("||/ x", "CBRT(x)")
expr = self.parse_one("SELECT * FROM r CROSS JOIN LATERAL UNNEST(ARRAY[1]) AS s(location)")
unnest = expr.args["joins"][0].this.this
unnest.assert_is(exp.Unnest)
@ -31,6 +18,14 @@ class TestPostgres(Validator):
self.assertIsInstance(expr, exp.Alter)
self.assertEqual(expr.sql(dialect="postgres"), alter_table_only)
sql = "ARRAY[x" + ",x" * 27 + "]"
expected_sql = "ARRAY[\n x" + (",\n x" * 27) + "\n]"
self.validate_identity(sql, expected_sql, pretty=True)
self.validate_identity("SHA384(x)")
self.validate_identity("1.x", "1. AS x")
self.validate_identity("|/ x", "SQRT(x)")
self.validate_identity("||/ x", "CBRT(x)")
self.validate_identity("SELECT EXTRACT(QUARTER FROM CAST('2025-04-26' AS DATE))")
self.validate_identity("SELECT DATE_TRUNC('QUARTER', CAST('2025-04-26' AS DATE))")
self.validate_identity("STRING_TO_ARRAY('xx~^~yy~^~zz', '~^~', 'yy')")
@ -79,6 +74,11 @@ class TestPostgres(Validator):
self.validate_identity("SELECT CURRENT_USER")
self.validate_identity("SELECT * FROM ONLY t1")
self.validate_identity("SELECT INTERVAL '-1 MONTH'")
self.validate_identity("SELECT INTERVAL '4.1 DAY'")
self.validate_identity("SELECT INTERVAL '3.14159 HOUR'")
self.validate_identity("SELECT INTERVAL '2.5 MONTH'")
self.validate_identity("SELECT INTERVAL '-10.75 MINUTE'")
self.validate_identity("SELECT INTERVAL '0.123456789 SECOND'")
self.validate_identity(
"SELECT * FROM test_data, LATERAL JSONB_ARRAY_ELEMENTS(data) WITH ORDINALITY AS elem(value, ordinality)"
)
@ -374,6 +374,13 @@ FROM json_data, field_ids""",
pretty=True,
)
self.validate_all(
"x ? y",
write={
"": "JSONB_CONTAINS(x, y)",
"postgres": "x ? y",
},
)
self.validate_all(
"SELECT CURRENT_TIMESTAMP + INTERVAL '-3 MONTH'",
read={
@ -1050,6 +1057,9 @@ FROM json_data, field_ids""",
self.validate_identity(
"CREATE UNLOGGED TABLE foo AS WITH t(c) AS (SELECT 1) SELECT * FROM (SELECT c AS c FROM t) AS temp"
)
self.validate_identity(
"ALTER TABLE foo ADD COLUMN id BIGINT NOT NULL PRIMARY KEY DEFAULT 1, ADD CONSTRAINT fk_orders_user FOREIGN KEY (id) REFERENCES foo (id)"
)
self.validate_identity(
"CREATE TABLE t (col integer ARRAY[3])",
"CREATE TABLE t (col INT[3])",

View file

@ -511,7 +511,7 @@ class TestPresto(Validator):
write={
"duckdb": "CREATE TABLE db.example_table (col_a STRUCT(struct_col_a INT, struct_col_b TEXT))",
"presto": "CREATE TABLE db.example_table (col_a ROW(struct_col_a INTEGER, struct_col_b VARCHAR))",
"hive": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a INT, struct_col_b STRING>)",
"hive": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a: INT, struct_col_b: STRING>)",
"spark": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a: INT, struct_col_b: STRING>)",
},
)
@ -520,7 +520,7 @@ class TestPresto(Validator):
write={
"duckdb": "CREATE TABLE db.example_table (col_a STRUCT(struct_col_a INT, struct_col_b STRUCT(nested_col_a TEXT, nested_col_b TEXT)))",
"presto": "CREATE TABLE db.example_table (col_a ROW(struct_col_a INTEGER, struct_col_b ROW(nested_col_a VARCHAR, nested_col_b VARCHAR)))",
"hive": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a INT, struct_col_b STRUCT<nested_col_a STRING, nested_col_b STRING>>)",
"hive": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a: INT, struct_col_b: STRUCT<nested_col_a: STRING, nested_col_b: STRING>>)",
"spark": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a: INT, struct_col_b: STRUCT<nested_col_a: STRING, nested_col_b: STRING>>)",
},
)

View file

@ -27,7 +27,7 @@ class TestSpark(Validator):
write={
"duckdb": "CREATE TABLE db.example_table (col_a STRUCT(struct_col_a INT, struct_col_b TEXT))",
"presto": "CREATE TABLE db.example_table (col_a ROW(struct_col_a INTEGER, struct_col_b VARCHAR))",
"hive": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a INT, struct_col_b STRING>)",
"hive": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a: INT, struct_col_b: STRING>)",
"spark": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a: INT, struct_col_b: STRING>)",
},
)
@ -37,7 +37,7 @@ class TestSpark(Validator):
"bigquery": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a INT64, struct_col_b STRUCT<nested_col_a STRING, nested_col_b STRING>>)",
"duckdb": "CREATE TABLE db.example_table (col_a STRUCT(struct_col_a INT, struct_col_b STRUCT(nested_col_a TEXT, nested_col_b TEXT)))",
"presto": "CREATE TABLE db.example_table (col_a ROW(struct_col_a INTEGER, struct_col_b ROW(nested_col_a VARCHAR, nested_col_b VARCHAR)))",
"hive": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a INT, struct_col_b STRUCT<nested_col_a STRING, nested_col_b STRING>>)",
"hive": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a: INT, struct_col_b: STRUCT<nested_col_a: STRING, nested_col_b: STRING>>)",
"spark": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a: INT, struct_col_b: STRUCT<nested_col_a: STRING, nested_col_b: STRING>>)",
},
)

View file

@ -62,6 +62,13 @@ class TestTSQL(Validator):
"SELECT 1 WHERE EXISTS(SELECT 1)",
)
self.validate_all(
"SELECT CONVERT(DATETIME, '2006-04-25T15:50:59.997', 126)",
write={
"duckdb": "SELECT STRPTIME('2006-04-25T15:50:59.997', '%Y-%m-%dT%H:%M:%S.%f')",
"tsql": "SELECT CONVERT(DATETIME, '2006-04-25T15:50:59.997', 126)",
},
)
self.validate_all(
"WITH A AS (SELECT 2 AS value), C AS (SELECT * FROM A) SELECT * INTO TEMP_NESTED_WITH FROM (SELECT * FROM C) AS temp",
read={
@ -569,6 +576,79 @@ class TestTSQL(Validator):
},
)
def test_for_xml(self):
xml_possible_options = [
"RAW('ElementName')",
"RAW('ElementName'), BINARY BASE64",
"RAW('ElementName'), TYPE",
"RAW('ElementName'), ROOT('RootName')",
"RAW('ElementName'), BINARY BASE64, TYPE",
"RAW('ElementName'), BINARY BASE64, ROOT('RootName')",
"RAW('ElementName'), TYPE, ROOT('RootName')",
"RAW('ElementName'), BINARY BASE64, TYPE, ROOT('RootName')",
"RAW('ElementName'), XMLDATA",
"RAW('ElementName'), XMLSCHEMA('TargetNameSpaceURI')",
"RAW('ElementName'), XMLDATA, ELEMENTS XSINIL",
"RAW('ElementName'), XMLSCHEMA('TargetNameSpaceURI'), ELEMENTS ABSENT",
"RAW('ElementName'), XMLDATA, ELEMENTS ABSENT",
"RAW('ElementName'), XMLSCHEMA('TargetNameSpaceURI'), ELEMENTS XSINIL",
"AUTO",
"AUTO, BINARY BASE64",
"AUTO, TYPE",
"AUTO, ROOT('RootName')",
"AUTO, BINARY BASE64, TYPE",
"AUTO, TYPE, ROOT('RootName')",
"AUTO, BINARY BASE64, TYPE, ROOT('RootName')",
"AUTO, XMLDATA",
"AUTO, XMLSCHEMA('TargetNameSpaceURI')",
"AUTO, XMLDATA, ELEMENTS XSINIL",
"AUTO, XMLSCHEMA('TargetNameSpaceURI'), ELEMENTS ABSENT",
"AUTO, XMLDATA, ELEMENTS ABSENT",
"AUTO, XMLSCHEMA('TargetNameSpaceURI'), ELEMENTS XSINIL",
"EXPLICIT",
"EXPLICIT, BINARY BASE64",
"EXPLICIT, TYPE",
"EXPLICIT, ROOT('RootName')",
"EXPLICIT, BINARY BASE64, TYPE",
"EXPLICIT, TYPE, ROOT('RootName')",
"EXPLICIT, BINARY BASE64, TYPE, ROOT('RootName')",
"EXPLICIT, XMLDATA",
"EXPLICIT, XMLDATA, BINARY BASE64",
"EXPLICIT, XMLDATA, TYPE",
"EXPLICIT, XMLDATA, ROOT('RootName')",
"EXPLICIT, XMLDATA, BINARY BASE64, TYPE",
"EXPLICIT, XMLDATA, BINARY BASE64, TYPE, ROOT('RootName')",
"PATH('ElementName')",
"PATH('ElementName'), BINARY BASE64",
"PATH('ElementName'), TYPE",
"PATH('ElementName'), ROOT('RootName')",
"PATH('ElementName'), BINARY BASE64, TYPE",
"PATH('ElementName'), TYPE, ROOT('RootName')",
"PATH('ElementName'), BINARY BASE64, TYPE, ROOT('RootName')",
"PATH('ElementName'), ELEMENTS XSINIL",
"PATH('ElementName'), ELEMENTS ABSENT",
"PATH('ElementName'), BINARY BASE64, ELEMENTS XSINIL",
"PATH('ElementName'), TYPE, ELEMENTS ABSENT",
"PATH('ElementName'), ROOT('RootName'), ELEMENTS XSINIL",
"PATH('ElementName'), BINARY BASE64, TYPE, ROOT('RootName'), ELEMENTS ABSENT",
]
for xml_option in xml_possible_options:
with self.subTest(f"Testing FOR XML option: {xml_option}"):
self.validate_identity(f"SELECT * FROM t FOR XML {xml_option}")
self.validate_identity(
"SELECT * FROM t FOR XML PATH, BINARY BASE64, ELEMENTS XSINIL",
"""SELECT
*
FROM t
FOR XML
PATH,
BINARY BASE64,
ELEMENTS XSINIL""",
pretty=True,
)
def test_types(self):
self.validate_identity("CAST(x AS XML)")
self.validate_identity("CAST(x AS UNIQUEIDENTIFIER)")
@ -904,18 +984,18 @@ class TestTSQL(Validator):
self.validate_identity("CREATE SCHEMA testSchema")
self.validate_identity("CREATE VIEW t AS WITH cte AS (SELECT 1 AS c) SELECT c FROM cte")
self.validate_identity("ALTER TABLE tbl SET SYSTEM_VERSIONING=OFF")
self.validate_identity("ALTER TABLE tbl SET FILESTREAM_ON = 'test'")
self.validate_identity("ALTER TABLE tbl SET DATA_DELETION=ON")
self.validate_identity("ALTER TABLE tbl SET DATA_DELETION=OFF")
self.validate_identity("ALTER TABLE tbl SET (SYSTEM_VERSIONING=OFF)")
self.validate_identity("ALTER TABLE tbl SET (FILESTREAM_ON = 'test')")
self.validate_identity("ALTER TABLE tbl SET (DATA_DELETION=ON)")
self.validate_identity("ALTER TABLE tbl SET (DATA_DELETION=OFF)")
self.validate_identity(
"ALTER TABLE tbl SET SYSTEM_VERSIONING=ON(HISTORY_TABLE=db.tbl, DATA_CONSISTENCY_CHECK=OFF, HISTORY_RETENTION_PERIOD=5 DAYS)"
"ALTER TABLE tbl SET (SYSTEM_VERSIONING=ON(HISTORY_TABLE=db.tbl, DATA_CONSISTENCY_CHECK=OFF, HISTORY_RETENTION_PERIOD=5 DAYS))"
)
self.validate_identity(
"ALTER TABLE tbl SET SYSTEM_VERSIONING=ON(HISTORY_TABLE=db.tbl, HISTORY_RETENTION_PERIOD=INFINITE)"
"ALTER TABLE tbl SET (SYSTEM_VERSIONING=ON(HISTORY_TABLE=db.tbl, HISTORY_RETENTION_PERIOD=INFINITE))"
)
self.validate_identity(
"ALTER TABLE tbl SET DATA_DELETION=ON(FILTER_COLUMN=col, RETENTION_PERIOD=5 MONTHS)"
"ALTER TABLE tbl SET (DATA_DELETION=ON(FILTER_COLUMN=col, RETENTION_PERIOD=5 MONTHS))"
)
self.validate_identity("ALTER VIEW v AS SELECT a, b, c, d FROM foo")