Merging upstream version 20.1.0.
Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
parent
d4fe7bdb16
commit
90988d8258
127 changed files with 73384 additions and 73067 deletions
|
@ -6,8 +6,22 @@ class TestTSQL(Validator):
|
|||
dialect = "tsql"
|
||||
|
||||
def test_tsql(self):
|
||||
# https://learn.microsoft.com/en-us/previous-versions/sql/sql-server-2008-r2/ms187879(v=sql.105)?redirectedfrom=MSDN
|
||||
# tsql allows .. which means use the default schema
|
||||
self.validate_identity("SELECT * FROM a..b")
|
||||
|
||||
self.validate_identity("SELECT CONCAT(column1, column2)")
|
||||
self.validate_identity("SELECT TestSpecialChar.Test# FROM TestSpecialChar")
|
||||
self.validate_identity("SELECT TestSpecialChar.Test@ FROM TestSpecialChar")
|
||||
self.validate_identity("SELECT TestSpecialChar.Test$ FROM TestSpecialChar")
|
||||
self.validate_identity("SELECT TestSpecialChar.Test_ FROM TestSpecialChar")
|
||||
self.validate_identity("SELECT TOP (2 + 1) 1")
|
||||
self.validate_identity("SELECT * FROM t WHERE NOT c", "SELECT * FROM t WHERE NOT c <> 0")
|
||||
self.validate_identity("1 AND true", "1 <> 0 AND (1 = 1)")
|
||||
self.validate_identity("CAST(x AS int) OR y", "CAST(x AS INTEGER) <> 0 OR y <> 0")
|
||||
|
||||
self.validate_all(
|
||||
"WITH t(c) AS (SELECT 1) SELECT * INTO foo FROM (SELECT c FROM t) AS temp",
|
||||
"WITH t(c) AS (SELECT 1) SELECT * INTO foo FROM (SELECT c AS c FROM t) AS temp",
|
||||
read={
|
||||
"duckdb": "CREATE TABLE foo AS WITH t(c) AS (SELECT 1) SELECT c FROM t",
|
||||
},
|
||||
|
@ -25,7 +39,7 @@ class TestTSQL(Validator):
|
|||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"WITH t(c) AS (SELECT 1) MERGE INTO x AS z USING (SELECT c FROM t) AS y ON a = b WHEN MATCHED THEN UPDATE SET a = y.b",
|
||||
"WITH t(c) AS (SELECT 1) MERGE INTO x AS z USING (SELECT c AS c FROM t) AS y ON a = b WHEN MATCHED THEN UPDATE SET a = y.b",
|
||||
read={
|
||||
"postgres": "MERGE INTO x AS z USING (WITH t(c) AS (SELECT 1) SELECT c FROM t) AS y ON a = b WHEN MATCHED THEN UPDATE SET a = y.b",
|
||||
},
|
||||
|
@ -166,18 +180,6 @@ class TestTSQL(Validator):
|
|||
"SELECT DISTINCT DepartmentName, PERCENTILE_CONT(0.5) WITHIN GROUP (ORDER BY BaseRate) OVER (PARTITION BY DepartmentName) AS MedianCont FROM dbo.DimEmployee"
|
||||
)
|
||||
|
||||
self.validate_all(
|
||||
"SELECT DATEPART(year, CAST('2017-01-01' AS DATE))",
|
||||
read={"postgres": "SELECT DATE_PART('year', '2017-01-01'::DATE)"},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT DATEPART(month, CAST('2017-03-01' AS DATE))",
|
||||
read={"postgres": "SELECT DATE_PART('month', '2017-03-01'::DATE)"},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT DATEPART(day, CAST('2017-01-02' AS DATE))",
|
||||
read={"postgres": "SELECT DATE_PART('day', '2017-01-02'::DATE)"},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT CAST([a].[b] AS SMALLINT) FROM foo",
|
||||
write={
|
||||
|
@ -229,11 +231,13 @@ class TestTSQL(Validator):
|
|||
self.validate_all(
|
||||
"HASHBYTES('SHA1', x)",
|
||||
read={
|
||||
"snowflake": "SHA1(x)",
|
||||
"spark": "SHA(x)",
|
||||
},
|
||||
write={
|
||||
"tsql": "HASHBYTES('SHA1', x)",
|
||||
"snowflake": "SHA1(x)",
|
||||
"spark": "SHA(x)",
|
||||
"tsql": "HASHBYTES('SHA1', x)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
|
@ -561,6 +565,21 @@ class TestTSQL(Validator):
|
|||
)
|
||||
|
||||
def test_ddl(self):
|
||||
expression = parse_one("ALTER TABLE dbo.DocExe DROP CONSTRAINT FK_Column_B", dialect="tsql")
|
||||
self.assertIsInstance(expression, exp.AlterTable)
|
||||
self.assertIsInstance(expression.args["actions"][0], exp.Drop)
|
||||
self.assertEqual(
|
||||
expression.sql(dialect="tsql"), "ALTER TABLE dbo.DocExe DROP CONSTRAINT FK_Column_B"
|
||||
)
|
||||
|
||||
for clusterd_keyword in ("CLUSTERED", "NONCLUSTERED"):
|
||||
self.validate_identity(
|
||||
'CREATE TABLE "dbo"."benchmark" ('
|
||||
'"name" CHAR(7) NOT NULL, '
|
||||
'"internal_id" VARCHAR(10) NOT NULL, '
|
||||
f'UNIQUE {clusterd_keyword} ("internal_id" ASC))'
|
||||
)
|
||||
|
||||
self.validate_identity(
|
||||
"CREATE PROCEDURE foo AS BEGIN DELETE FROM bla WHERE foo < CURRENT_TIMESTAMP - 7 END",
|
||||
"CREATE PROCEDURE foo AS BEGIN DELETE FROM bla WHERE foo < GETDATE() - 7 END",
|
||||
|
@ -588,6 +607,12 @@ class TestTSQL(Validator):
|
|||
"": "CREATE TABLE foo.bar.baz AS SELECT * FROM a.b.c",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT * INTO foo.bar.baz FROM (SELECT * FROM a.b.c) AS temp",
|
||||
read={
|
||||
"": "CREATE TABLE foo.bar.baz AS (SELECT * FROM a.b.c)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id('db.tbl') AND name = 'idx') EXEC('CREATE INDEX idx ON db.tbl')",
|
||||
read={
|
||||
|
@ -622,7 +647,6 @@ class TestTSQL(Validator):
|
|||
"tsql": "CREATE OR ALTER VIEW a.b AS SELECT 1",
|
||||
},
|
||||
)
|
||||
|
||||
self.validate_all(
|
||||
"ALTER TABLE a ADD b INTEGER, c INTEGER",
|
||||
read={
|
||||
|
@ -633,7 +657,6 @@ class TestTSQL(Validator):
|
|||
"tsql": "ALTER TABLE a ADD b INTEGER, c INTEGER",
|
||||
},
|
||||
)
|
||||
|
||||
self.validate_all(
|
||||
"CREATE TABLE #mytemp (a INTEGER, b CHAR(2), c TIME(4), d FLOAT(24))",
|
||||
write={
|
||||
|
@ -833,7 +856,7 @@ WHERE
|
|||
)
|
||||
|
||||
def test_len(self):
|
||||
self.validate_all("LEN(x)", write={"spark": "LENGTH(x)"})
|
||||
self.validate_all("LEN(x)", read={"": "LENGTH(x)"}, write={"spark": "LENGTH(x)"})
|
||||
|
||||
def test_replicate(self):
|
||||
self.validate_all("REPLICATE('x', 2)", write={"spark": "REPEAT('x', 2)"})
|
||||
|
@ -870,11 +893,68 @@ WHERE
|
|||
)
|
||||
|
||||
def test_datepart(self):
|
||||
self.validate_identity(
|
||||
"DATEPART(QUARTER, x)",
|
||||
"DATEPART(quarter, CAST(x AS DATETIME2))",
|
||||
)
|
||||
self.validate_identity(
|
||||
"DATEPART(YEAR, x)",
|
||||
"FORMAT(CAST(x AS DATETIME2), 'yyyy')",
|
||||
)
|
||||
self.validate_identity(
|
||||
"DATEPART(HOUR, date_and_time)",
|
||||
"DATEPART(hour, CAST(date_and_time AS DATETIME2))",
|
||||
)
|
||||
self.validate_identity(
|
||||
"DATEPART(WEEKDAY, date_and_time)",
|
||||
"DATEPART(dw, CAST(date_and_time AS DATETIME2))",
|
||||
)
|
||||
self.validate_identity(
|
||||
"DATEPART(DW, date_and_time)",
|
||||
"DATEPART(dw, CAST(date_and_time AS DATETIME2))",
|
||||
)
|
||||
|
||||
self.validate_all(
|
||||
"SELECT DATEPART(month,'1970-01-01')",
|
||||
write={"spark": "SELECT DATE_FORMAT(CAST('1970-01-01' AS TIMESTAMP), 'MM')"},
|
||||
write={
|
||||
"postgres": "SELECT TO_CHAR(CAST('1970-01-01' AS TIMESTAMP), 'MM')",
|
||||
"spark": "SELECT DATE_FORMAT(CAST('1970-01-01' AS TIMESTAMP), 'MM')",
|
||||
"tsql": "SELECT FORMAT(CAST('1970-01-01' AS DATETIME2), 'MM')",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT DATEPART(year, CAST('2017-01-01' AS DATE))",
|
||||
read={
|
||||
"postgres": "SELECT DATE_PART('year', '2017-01-01'::DATE)",
|
||||
},
|
||||
write={
|
||||
"postgres": "SELECT TO_CHAR(CAST(CAST('2017-01-01' AS DATE) AS TIMESTAMP), 'YYYY')",
|
||||
"spark": "SELECT DATE_FORMAT(CAST(CAST('2017-01-01' AS DATE) AS TIMESTAMP), 'yyyy')",
|
||||
"tsql": "SELECT FORMAT(CAST(CAST('2017-01-01' AS DATE) AS DATETIME2), 'yyyy')",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT DATEPART(month, CAST('2017-03-01' AS DATE))",
|
||||
read={
|
||||
"postgres": "SELECT DATE_PART('month', '2017-03-01'::DATE)",
|
||||
},
|
||||
write={
|
||||
"postgres": "SELECT TO_CHAR(CAST(CAST('2017-03-01' AS DATE) AS TIMESTAMP), 'MM')",
|
||||
"spark": "SELECT DATE_FORMAT(CAST(CAST('2017-03-01' AS DATE) AS TIMESTAMP), 'MM')",
|
||||
"tsql": "SELECT FORMAT(CAST(CAST('2017-03-01' AS DATE) AS DATETIME2), 'MM')",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT DATEPART(day, CAST('2017-01-02' AS DATE))",
|
||||
read={
|
||||
"postgres": "SELECT DATE_PART('day', '2017-01-02'::DATE)",
|
||||
},
|
||||
write={
|
||||
"postgres": "SELECT TO_CHAR(CAST(CAST('2017-01-02' AS DATE) AS TIMESTAMP), 'DD')",
|
||||
"spark": "SELECT DATE_FORMAT(CAST(CAST('2017-01-02' AS DATE) AS TIMESTAMP), 'dd')",
|
||||
"tsql": "SELECT FORMAT(CAST(CAST('2017-01-02' AS DATE) AS DATETIME2), 'dd')",
|
||||
},
|
||||
)
|
||||
self.validate_identity("DATEPART(YEAR, x)", "FORMAT(CAST(x AS DATETIME2), 'yyyy')")
|
||||
|
||||
def test_convert_date_format(self):
|
||||
self.validate_all(
|
||||
|
@ -1073,10 +1153,7 @@ WHERE
|
|||
|
||||
def test_date_diff(self):
|
||||
self.validate_identity("SELECT DATEDIFF(hour, 1.5, '2021-01-01')")
|
||||
self.validate_identity(
|
||||
"SELECT DATEDIFF(year, '2020-01-01', '2021-01-01')",
|
||||
"SELECT DATEDIFF(year, CAST('2020-01-01' AS DATETIME2), CAST('2021-01-01' AS DATETIME2))",
|
||||
)
|
||||
|
||||
self.validate_all(
|
||||
"SELECT DATEDIFF(quarter, 0, '2021-01-01')",
|
||||
write={
|
||||
|
@ -1098,7 +1175,7 @@ WHERE
|
|||
write={
|
||||
"tsql": "SELECT DATEDIFF(year, CAST('2020-01-01' AS DATETIME2), CAST('2021-01-01' AS DATETIME2))",
|
||||
"spark": "SELECT DATEDIFF(year, CAST('2020-01-01' AS TIMESTAMP), CAST('2021-01-01' AS TIMESTAMP))",
|
||||
"spark2": "SELECT CAST(MONTHS_BETWEEN(CAST('2021-01-01' AS TIMESTAMP), CAST('2020-01-01' AS TIMESTAMP)) AS INT) / 12",
|
||||
"spark2": "SELECT CAST(MONTHS_BETWEEN(CAST('2021-01-01' AS TIMESTAMP), CAST('2020-01-01' AS TIMESTAMP)) / 12 AS INT)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
|
@ -1114,16 +1191,18 @@ WHERE
|
|||
write={
|
||||
"databricks": "SELECT DATEDIFF(quarter, CAST('start' AS TIMESTAMP), CAST('end' AS TIMESTAMP))",
|
||||
"spark": "SELECT DATEDIFF(quarter, CAST('start' AS TIMESTAMP), CAST('end' AS TIMESTAMP))",
|
||||
"spark2": "SELECT CAST(MONTHS_BETWEEN(CAST('end' AS TIMESTAMP), CAST('start' AS TIMESTAMP)) AS INT) / 3",
|
||||
"spark2": "SELECT CAST(MONTHS_BETWEEN(CAST('end' AS TIMESTAMP), CAST('start' AS TIMESTAMP)) / 3 AS INT)",
|
||||
"tsql": "SELECT DATEDIFF(quarter, CAST('start' AS DATETIME2), CAST('end' AS DATETIME2))",
|
||||
},
|
||||
)
|
||||
|
||||
def test_iif(self):
|
||||
self.validate_identity(
|
||||
"SELECT IF(cond, 'True', 'False')", "SELECT IIF(cond, 'True', 'False')"
|
||||
"SELECT IF(cond, 'True', 'False')", "SELECT IIF(cond <> 0, 'True', 'False')"
|
||||
)
|
||||
self.validate_identity(
|
||||
"SELECT IIF(cond, 'True', 'False')", "SELECT IIF(cond <> 0, 'True', 'False')"
|
||||
)
|
||||
self.validate_identity("SELECT IIF(cond, 'True', 'False')")
|
||||
self.validate_all(
|
||||
"SELECT IIF(cond, 'True', 'False');",
|
||||
write={
|
||||
|
@ -1173,9 +1252,14 @@ WHERE
|
|||
|
||||
def test_top(self):
|
||||
self.validate_all(
|
||||
"SELECT TOP 3 * FROM A",
|
||||
"SELECT DISTINCT TOP 3 * FROM A",
|
||||
read={
|
||||
"spark": "SELECT DISTINCT * FROM A LIMIT 3",
|
||||
},
|
||||
write={
|
||||
"spark": "SELECT * FROM A LIMIT 3",
|
||||
"spark": "SELECT DISTINCT * FROM A LIMIT 3",
|
||||
"teradata": "SELECT DISTINCT TOP 3 * FROM A",
|
||||
"tsql": "SELECT DISTINCT TOP 3 * FROM A",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
|
@ -1292,6 +1376,26 @@ WHERE
|
|||
},
|
||||
)
|
||||
|
||||
def test_temporal_table(self):
|
||||
self.validate_identity(
|
||||
"""CREATE TABLE test ("data" CHAR(7), "valid_from" DATETIME2(2) GENERATED ALWAYS AS ROW START NOT NULL, "valid_to" DATETIME2(2) GENERATED ALWAYS AS ROW END NOT NULL, PERIOD FOR SYSTEM_TIME ("valid_from", "valid_to")) WITH(SYSTEM_VERSIONING=ON)"""
|
||||
)
|
||||
self.validate_identity(
|
||||
"""CREATE TABLE test ("data" CHAR(7), "valid_from" DATETIME2(2) GENERATED ALWAYS AS ROW START HIDDEN NOT NULL, "valid_to" DATETIME2(2) GENERATED ALWAYS AS ROW END HIDDEN NOT NULL, PERIOD FOR SYSTEM_TIME ("valid_from", "valid_to")) WITH(SYSTEM_VERSIONING=ON(HISTORY_TABLE="dbo"."benchmark_history", DATA_CONSISTENCY_CHECK=ON))"""
|
||||
)
|
||||
self.validate_identity(
|
||||
"""CREATE TABLE test ("data" CHAR(7), "valid_from" DATETIME2(2) GENERATED ALWAYS AS ROW START NOT NULL, "valid_to" DATETIME2(2) GENERATED ALWAYS AS ROW END NOT NULL, PERIOD FOR SYSTEM_TIME ("valid_from", "valid_to")) WITH(SYSTEM_VERSIONING=ON(HISTORY_TABLE="dbo"."benchmark_history", DATA_CONSISTENCY_CHECK=ON))"""
|
||||
)
|
||||
self.validate_identity(
|
||||
"""CREATE TABLE test ("data" CHAR(7), "valid_from" DATETIME2(2) GENERATED ALWAYS AS ROW START NOT NULL, "valid_to" DATETIME2(2) GENERATED ALWAYS AS ROW END NOT NULL, PERIOD FOR SYSTEM_TIME ("valid_from", "valid_to")) WITH(SYSTEM_VERSIONING=ON(HISTORY_TABLE="dbo"."benchmark_history", DATA_CONSISTENCY_CHECK=OFF))"""
|
||||
)
|
||||
self.validate_identity(
|
||||
"""CREATE TABLE test ("data" CHAR(7), "valid_from" DATETIME2(2) GENERATED ALWAYS AS ROW START NOT NULL, "valid_to" DATETIME2(2) GENERATED ALWAYS AS ROW END NOT NULL, PERIOD FOR SYSTEM_TIME ("valid_from", "valid_to")) WITH(SYSTEM_VERSIONING=ON(HISTORY_TABLE="dbo"."benchmark_history"))"""
|
||||
)
|
||||
self.validate_identity(
|
||||
"""CREATE TABLE test ("data" CHAR(7), "valid_from" DATETIME2(2) GENERATED ALWAYS AS ROW START NOT NULL, "valid_to" DATETIME2(2) GENERATED ALWAYS AS ROW END NOT NULL, PERIOD FOR SYSTEM_TIME ("valid_from", "valid_to")) WITH(SYSTEM_VERSIONING=ON(HISTORY_TABLE="dbo"."benchmark_history"))"""
|
||||
)
|
||||
|
||||
def test_system_time(self):
|
||||
self.validate_all(
|
||||
"SELECT [x] FROM [a].[b] FOR SYSTEM_TIME AS OF 'foo'",
|
||||
|
@ -1433,3 +1537,28 @@ FROM OPENJSON(@json) WITH (
|
|||
"spark": "SET count = (SELECT COUNT(1) FROM x)",
|
||||
},
|
||||
)
|
||||
|
||||
def test_qualify_derived_table_outputs(self):
|
||||
self.validate_identity(
|
||||
"WITH t AS (SELECT 1) SELECT * FROM t",
|
||||
'WITH t AS (SELECT 1 AS "1") SELECT * FROM t',
|
||||
)
|
||||
self.validate_identity(
|
||||
'WITH t AS (SELECT "c") SELECT * FROM t',
|
||||
'WITH t AS (SELECT "c" AS "c") SELECT * FROM t',
|
||||
)
|
||||
self.validate_identity(
|
||||
"SELECT * FROM (SELECT 1) AS subq",
|
||||
'SELECT * FROM (SELECT 1 AS "1") AS subq',
|
||||
)
|
||||
self.validate_identity(
|
||||
'SELECT * FROM (SELECT "c") AS subq',
|
||||
'SELECT * FROM (SELECT "c" AS "c") AS subq',
|
||||
)
|
||||
|
||||
self.validate_all(
|
||||
"WITH t1(c) AS (SELECT 1), t2 AS (SELECT CAST(c AS INTEGER) AS c FROM t1) SELECT * FROM t2",
|
||||
read={
|
||||
"duckdb": "WITH t1(c) AS (SELECT 1), t2 AS (SELECT CAST(c AS INTEGER) FROM t1) SELECT * FROM t2",
|
||||
},
|
||||
)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue