1
0
Fork 0

Adding upstream version 18.7.0.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-02-13 21:03:05 +01:00
parent c4fc25c23b
commit be16920347
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
96 changed files with 59037 additions and 52828 deletions

View file

@ -18,16 +18,28 @@ class TestTSQL(Validator):
'CREATE TABLE x (CONSTRAINT "pk_mytable" UNIQUE NONCLUSTERED (a DESC)) ON b (c)'
)
self.validate_identity(
self.validate_all(
"""
CREATE TABLE x(
[zip_cd] [varchar](5) NULL NOT FOR REPLICATION
CONSTRAINT [pk_mytable] PRIMARY KEY CLUSTERED
([zip_cd_mkey] ASC)
WITH (PAD_INDEX = ON, STATISTICS_NORECOMPUTE = OFF) ON [PRIMARY]
) ON [PRIMARY]
[zip_cd] [varchar](5) NULL NOT FOR REPLICATION,
[zip_cd_mkey] [varchar](5) NOT NULL,
CONSTRAINT [pk_mytable] PRIMARY KEY CLUSTERED ([zip_cd_mkey] ASC)
WITH (PAD_INDEX = ON, STATISTICS_NORECOMPUTE = OFF) ON [INDEX]
) ON [SECONDARY]
""",
'CREATE TABLE x ("zip_cd" VARCHAR(5) NULL NOT FOR REPLICATION CONSTRAINT "pk_mytable" PRIMARY KEY CLUSTERED ("zip_cd_mkey") WITH (PAD_INDEX=ON, STATISTICS_NORECOMPUTE=OFF) ON "PRIMARY") ON "PRIMARY"',
write={
"tsql": 'CREATE TABLE x ("zip_cd" VARCHAR(5) NULL NOT FOR REPLICATION, "zip_cd_mkey" VARCHAR(5) NOT NULL, CONSTRAINT "pk_mytable" PRIMARY KEY CLUSTERED ("zip_cd_mkey" ASC) WITH (PAD_INDEX=ON, STATISTICS_NORECOMPUTE=OFF) ON "INDEX") ON "SECONDARY"',
"spark2": "CREATE TABLE x (`zip_cd` VARCHAR(5), `zip_cd_mkey` VARCHAR(5) NOT NULL, CONSTRAINT `pk_mytable` PRIMARY KEY (`zip_cd_mkey`))",
},
)
self.validate_identity("CREATE TABLE x (A INTEGER NOT NULL, B INTEGER NULL)")
self.validate_all(
"CREATE TABLE x ( A INTEGER NOT NULL, B INTEGER NULL )",
write={
"hive": "CREATE TABLE x (A INT NOT NULL, B INT)",
},
)
self.validate_identity(
@ -123,10 +135,10 @@ class TestTSQL(Validator):
self.validate_all(
"STRING_AGG(x, '|') WITHIN GROUP (ORDER BY z ASC)",
write={
"tsql": "STRING_AGG(x, '|') WITHIN GROUP (ORDER BY z)",
"mysql": "GROUP_CONCAT(x ORDER BY z SEPARATOR '|')",
"tsql": "STRING_AGG(x, '|') WITHIN GROUP (ORDER BY z ASC)",
"mysql": "GROUP_CONCAT(x ORDER BY z ASC SEPARATOR '|')",
"sqlite": "GROUP_CONCAT(x, '|')",
"postgres": "STRING_AGG(x, '|' ORDER BY z NULLS FIRST)",
"postgres": "STRING_AGG(x, '|' ORDER BY z ASC NULLS FIRST)",
},
)
self.validate_all(
@ -186,6 +198,7 @@ class TestTSQL(Validator):
},
)
self.validate_identity("HASHBYTES('MD2', 'x')")
self.validate_identity("LOG(n, b)")
def test_types(self):
self.validate_identity("CAST(x AS XML)")
@ -493,6 +506,12 @@ class TestTSQL(Validator):
"tsql": "CREATE TABLE tbl (id INTEGER NOT NULL IDENTITY(10, 1) PRIMARY KEY)",
},
)
self.validate_all(
"SELECT * INTO foo.bar.baz FROM (SELECT * FROM a.b.c) AS temp",
read={
"": "CREATE TABLE foo.bar.baz AS SELECT * FROM a.b.c",
},
)
self.validate_all(
"IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id('db.tbl') AND name = 'idx') EXEC('CREATE INDEX idx ON db.tbl')",
read={
@ -507,12 +526,17 @@ class TestTSQL(Validator):
},
)
self.validate_all(
"IF NOT EXISTS (SELECT * FROM information_schema.tables WHERE table_name = 'foo') EXEC('CREATE TABLE foo (a INTEGER)')",
"IF NOT EXISTS (SELECT * FROM information_schema.tables WHERE table_name = 'baz' AND table_schema = 'bar' AND table_catalog = 'foo') EXEC('CREATE TABLE foo.bar.baz (a INTEGER)')",
read={
"": "CREATE TABLE IF NOT EXISTS foo (a INTEGER)",
"": "CREATE TABLE IF NOT EXISTS foo.bar.baz (a INTEGER)",
},
)
self.validate_all(
"IF NOT EXISTS (SELECT * FROM information_schema.tables WHERE table_name = 'baz' AND table_schema = 'bar' AND table_catalog = 'foo') EXEC('SELECT * INTO foo.bar.baz FROM (SELECT ''2020'' AS z FROM a.b.c) AS temp')",
read={
"": "CREATE TABLE IF NOT EXISTS foo.bar.baz AS SELECT '2020' AS z FROM a.b.c",
},
)
self.validate_all(
"CREATE OR ALTER VIEW a.b AS SELECT 1",
read={
@ -553,15 +577,11 @@ class TestTSQL(Validator):
"oracle": "CREATE TEMPORARY TABLE mytemptable (a NUMBER)",
},
)
def test_insert_cte(self):
self.validate_all(
"CREATE TABLE #mytemptable AS SELECT a FROM Source_Table",
write={
"duckdb": "CREATE TEMPORARY TABLE mytemptable AS SELECT a FROM Source_Table",
"oracle": "CREATE TEMPORARY TABLE mytemptable AS SELECT a FROM Source_Table",
"snowflake": "CREATE TEMPORARY TABLE mytemptable AS SELECT a FROM Source_Table",
"spark": "CREATE TEMPORARY VIEW mytemptable AS SELECT a FROM Source_Table",
"tsql": "CREATE TABLE #mytemptable AS SELECT a FROM Source_Table",
},
"INSERT INTO foo.bar WITH cte AS (SELECT 1 AS one) SELECT * FROM cte",
write={"tsql": "WITH cte AS (SELECT 1 AS one) INSERT INTO foo.bar SELECT * FROM cte"},
)
def test_transaction(self):
@ -709,18 +729,14 @@ WHERE
SET @CurrentDate = CONVERT(VARCHAR(20), GETDATE(), 120);
CREATE TABLE [target_schema].[target_table]
WITH (DISTRIBUTION = REPLICATE, HEAP)
AS
SELECT
@CurrentDate AS DWCreatedDate
FROM source_schema.sourcetable;
(a INTEGER)
WITH (DISTRIBUTION = REPLICATE, HEAP);
"""
expected_sqls = [
'CREATE PROC "dbo"."transform_proc" AS DECLARE @CurrentDate VARCHAR(20)',
"SET @CurrentDate = CAST(FORMAT(GETDATE(), 'yyyy-MM-dd HH:mm:ss') AS VARCHAR(20))",
'CREATE TABLE "target_schema"."target_table" WITH (DISTRIBUTION=REPLICATE, HEAP) AS SELECT @CurrentDate AS DWCreatedDate FROM source_schema.sourcetable',
'CREATE TABLE "target_schema"."target_table" (a INTEGER) WITH (DISTRIBUTION=REPLICATE, HEAP)',
]
for expr, expected_sql in zip(parse(sql, read="tsql"), expected_sqls):
@ -1178,6 +1194,16 @@ WHERE
self.assertIsInstance(table.this, exp.Parameter)
self.assertIsInstance(table.this.this, exp.Var)
self.validate_all(
"SELECT @x",
write={
"databricks": "SELECT ${x}",
"hive": "SELECT ${x}",
"spark": "SELECT ${x}",
"tsql": "SELECT @x",
},
)
def test_temp_table(self):
self.validate_all(
"SELECT * FROM #mytemptable",
@ -1319,3 +1345,21 @@ FROM OPENJSON(@json) WITH (
},
pretty=True,
)
def test_set(self):
self.validate_all(
"SET KEY VALUE",
write={
"tsql": "SET KEY VALUE",
"duckdb": "SET KEY = VALUE",
"spark": "SET KEY = VALUE",
},
)
self.validate_all(
"SET @count = (SELECT COUNT(1) FROM x)",
write={
"databricks": "SET count = (SELECT COUNT(1) FROM x)",
"tsql": "SET @count = (SELECT COUNT(1) FROM x)",
"spark": "SET count = (SELECT COUNT(1) FROM x)",
},
)