Merging upstream version 11.7.1.
Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
parent
0c053462ae
commit
8d96084fad
144 changed files with 44104 additions and 39367 deletions
|
@ -66,6 +66,54 @@ class TestTSQL(Validator):
|
|||
"postgres": "STRING_AGG(x, '|')",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT CAST([a].[b] AS SMALLINT) FROM foo",
|
||||
write={
|
||||
"tsql": 'SELECT CAST("a"."b" AS SMALLINT) FROM foo',
|
||||
"spark": "SELECT CAST(`a`.`b` AS SHORT) FROM foo",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"HASHBYTES('SHA1', x)",
|
||||
read={
|
||||
"spark": "SHA(x)",
|
||||
},
|
||||
write={
|
||||
"tsql": "HASHBYTES('SHA1', x)",
|
||||
"spark": "SHA(x)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"HASHBYTES('SHA2_256', x)",
|
||||
read={
|
||||
"spark": "SHA2(x, 256)",
|
||||
},
|
||||
write={
|
||||
"tsql": "HASHBYTES('SHA2_256', x)",
|
||||
"spark": "SHA2(x, 256)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"HASHBYTES('SHA2_512', x)",
|
||||
read={
|
||||
"spark": "SHA2(x, 512)",
|
||||
},
|
||||
write={
|
||||
"tsql": "HASHBYTES('SHA2_512', x)",
|
||||
"spark": "SHA2(x, 512)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"HASHBYTES('MD5', 'x')",
|
||||
read={
|
||||
"spark": "MD5('x')",
|
||||
},
|
||||
write={
|
||||
"tsql": "HASHBYTES('MD5', 'x')",
|
||||
"spark": "MD5('x')",
|
||||
},
|
||||
)
|
||||
self.validate_identity("HASHBYTES('MD2', 'x')")
|
||||
|
||||
def test_types(self):
|
||||
self.validate_identity("CAST(x AS XML)")
|
||||
|
@ -399,7 +447,7 @@ WHERE
|
|||
self.validate_all(
|
||||
"SELECT CONVERT(VARCHAR(10), testdb.dbo.test.x, 120) y FROM testdb.dbo.test",
|
||||
write={
|
||||
"mysql": "SELECT CAST(TIME_TO_STR(testdb.dbo.test.x, '%Y-%m-%d %H:%M:%S') AS VARCHAR(10)) AS y FROM testdb.dbo.test",
|
||||
"mysql": "SELECT CAST(DATE_FORMAT(testdb.dbo.test.x, '%Y-%m-%d %T') AS VARCHAR(10)) AS y FROM testdb.dbo.test",
|
||||
"spark": "SELECT CAST(DATE_FORMAT(testdb.dbo.test.x, 'yyyy-MM-dd HH:mm:ss') AS VARCHAR(10)) AS y FROM testdb.dbo.test",
|
||||
},
|
||||
)
|
||||
|
@ -482,6 +530,12 @@ WHERE
|
|||
"spark": "SELECT x.a, x.b, t.v, t.y FROM x LEFT JOIN LATERAL (SELECT v, y FROM t) AS t(v, y)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT x.a, x.b, t.v, t.y, s.v, s.y FROM x OUTER APPLY (SELECT v, y FROM t) t(v, y) OUTER APPLY (SELECT v, y FROM t) s(v, y) LEFT JOIN z ON z.id = s.id",
|
||||
write={
|
||||
"spark": "SELECT x.a, x.b, t.v, t.y, s.v, s.y FROM x LEFT JOIN LATERAL (SELECT v, y FROM t) AS t(v, y) LEFT JOIN LATERAL (SELECT v, y FROM t) AS s(v, y) LEFT JOIN z ON z.id = s.id",
|
||||
},
|
||||
)
|
||||
|
||||
def test_lateral_table_valued_function(self):
|
||||
self.validate_all(
|
||||
|
@ -631,3 +685,38 @@ WHERE
|
|||
"SUSER_SNAME()",
|
||||
write={"spark": "CURRENT_USER()"},
|
||||
)
|
||||
self.validate_all(
|
||||
"SYSTEM_USER()",
|
||||
write={"spark": "CURRENT_USER()"},
|
||||
)
|
||||
self.validate_all(
|
||||
"SYSTEM_USER",
|
||||
write={"spark": "CURRENT_USER()"},
|
||||
)
|
||||
|
||||
def test_hints(self):
|
||||
self.validate_all(
|
||||
"SELECT x FROM a INNER HASH JOIN b ON b.id = a.id",
|
||||
write={"spark": "SELECT x FROM a INNER JOIN b ON b.id = a.id"},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT x FROM a INNER LOOP JOIN b ON b.id = a.id",
|
||||
write={"spark": "SELECT x FROM a INNER JOIN b ON b.id = a.id"},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT x FROM a INNER REMOTE JOIN b ON b.id = a.id",
|
||||
write={"spark": "SELECT x FROM a INNER JOIN b ON b.id = a.id"},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT x FROM a INNER MERGE JOIN b ON b.id = a.id",
|
||||
write={"spark": "SELECT x FROM a INNER JOIN b ON b.id = a.id"},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT x FROM a WITH (NOLOCK)",
|
||||
write={
|
||||
"spark": "SELECT x FROM a",
|
||||
"tsql": "SELECT x FROM a WITH (NOLOCK)",
|
||||
"": "SELECT x FROM a WITH (NOLOCK)",
|
||||
},
|
||||
)
|
||||
self.validate_identity("SELECT x FROM a INNER LOOP JOIN b ON b.id = a.id")
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue