1
0
Fork 0

Adding upstream version 9.0.6.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-02-13 14:51:09 +01:00
parent 40002be1ba
commit 24752785d9
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
31 changed files with 694 additions and 196 deletions

View file

@ -260,6 +260,20 @@ class TestTSQL(Validator):
"spark": "CAST(x AS INT)",
},
)
self.validate_all(
"SELECT CONVERT(VARCHAR(10), testdb.dbo.test.x, 120) y FROM testdb.dbo.test",
write={
"mysql": "SELECT CAST(TIME_TO_STR(testdb.dbo.test.x, '%Y-%m-%d %H:%M:%S') AS VARCHAR(10)) AS y FROM testdb.dbo.test",
"spark": "SELECT CAST(DATE_FORMAT(testdb.dbo.test.x, 'yyyy-MM-dd HH:mm:ss') AS VARCHAR(10)) AS y FROM testdb.dbo.test",
},
)
self.validate_all(
"SELECT CONVERT(VARCHAR(10), y.x) z FROM testdb.dbo.test y",
write={
"mysql": "SELECT CAST(y.x AS VARCHAR(10)) AS z FROM testdb.dbo.test AS y",
"spark": "SELECT CAST(y.x AS VARCHAR(10)) AS z FROM testdb.dbo.test AS y",
},
)
def test_add_date(self):
self.validate_identity("SELECT DATEADD(year, 1, '2017/08/25')")
@ -267,7 +281,10 @@ class TestTSQL(Validator):
"SELECT DATEADD(year, 1, '2017/08/25')", write={"spark": "SELECT ADD_MONTHS('2017/08/25', 12)"}
)
self.validate_all("SELECT DATEADD(qq, 1, '2017/08/25')", write={"spark": "SELECT ADD_MONTHS('2017/08/25', 3)"})
self.validate_all("SELECT DATEADD(wk, 1, '2017/08/25')", write={"spark": "SELECT DATE_ADD('2017/08/25', 7)"})
self.validate_all(
"SELECT DATEADD(wk, 1, '2017/08/25')",
write={"spark": "SELECT DATE_ADD('2017/08/25', 7)", "databricks": "SELECT DATEADD(week, 1, '2017/08/25')"},
)
def test_date_diff(self):
self.validate_identity("SELECT DATEDIFF(year, '2020/01/01', '2021/01/01')")
@ -279,11 +296,19 @@ class TestTSQL(Validator):
},
)
self.validate_all(
"SELECT DATEDIFF(month, 'start','end')",
write={"spark": "SELECT MONTHS_BETWEEN('end', 'start')", "tsql": "SELECT DATEDIFF(month, 'start', 'end')"},
"SELECT DATEDIFF(mm, 'start','end')",
write={
"spark": "SELECT MONTHS_BETWEEN('end', 'start')",
"tsql": "SELECT DATEDIFF(month, 'start', 'end')",
"databricks": "SELECT DATEDIFF(month, 'start', 'end')",
},
)
self.validate_all(
"SELECT DATEDIFF(quarter, 'start', 'end')", write={"spark": "SELECT MONTHS_BETWEEN('end', 'start') / 3"}
"SELECT DATEDIFF(quarter, 'start', 'end')",
write={
"spark": "SELECT MONTHS_BETWEEN('end', 'start') / 3",
"databricks": "SELECT DATEDIFF(quarter, 'start', 'end')",
},
)
def test_iif(self):
@ -294,3 +319,64 @@ class TestTSQL(Validator):
"spark": "SELECT IF(cond, 'True', 'False')",
},
)
def test_lateral_subquery(self):
self.validate_all(
"SELECT x.a, x.b, t.v, t.y FROM x CROSS APPLY (SELECT v, y FROM t) t(v, y)",
write={
"spark": "SELECT x.a, x.b, t.v, t.y FROM x JOIN LATERAL (SELECT v, y FROM t) AS t(v, y)",
},
)
self.validate_all(
"SELECT x.a, x.b, t.v, t.y FROM x OUTER APPLY (SELECT v, y FROM t) t(v, y)",
write={
"spark": "SELECT x.a, x.b, t.v, t.y FROM x LEFT JOIN LATERAL (SELECT v, y FROM t) AS t(v, y)",
},
)
def test_lateral_table_valued_function(self):
self.validate_all(
"SELECT t.x, y.z FROM x CROSS APPLY tvfTest(t.x)y(z)",
write={
"spark": "SELECT t.x, y.z FROM x JOIN LATERAL TVFTEST(t.x) y AS z",
},
)
self.validate_all(
"SELECT t.x, y.z FROM x OUTER APPLY tvfTest(t.x)y(z)",
write={
"spark": "SELECT t.x, y.z FROM x LEFT JOIN LATERAL TVFTEST(t.x) y AS z",
},
)
def test_top(self):
self.validate_all(
"SELECT TOP 3 * FROM A",
write={
"spark": "SELECT * FROM A LIMIT 3",
},
)
self.validate_all(
"SELECT TOP (3) * FROM A",
write={
"spark": "SELECT * FROM A LIMIT 3",
},
)
def test_format(self):
self.validate_identity("SELECT FORMAT('01-01-1991', 'd.mm.yyyy')")
self.validate_identity("SELECT FORMAT(12345, '###.###.###')")
self.validate_identity("SELECT FORMAT(1234567, 'f')")
self.validate_all(
"SELECT FORMAT(1000000.01,'###,###.###')",
write={"spark": "SELECT FORMAT_NUMBER(1000000.01, '###,###.###')"},
)
self.validate_all("SELECT FORMAT(1234567, 'f')", write={"spark": "SELECT FORMAT_NUMBER(1234567, 'f')"})
self.validate_all(
"SELECT FORMAT('01-01-1991', 'dd.mm.yyyy')",
write={"spark": "SELECT DATE_FORMAT('01-01-1991', 'dd.mm.yyyy')"},
)
self.validate_all(
"SELECT FORMAT(date_col, 'dd.mm.yyyy')", write={"spark": "SELECT DATE_FORMAT(date_col, 'dd.mm.yyyy')"}
)
self.validate_all("SELECT FORMAT(date_col, 'm')", write={"spark": "SELECT DATE_FORMAT(date_col, 'MMMM d')"})
self.validate_all("SELECT FORMAT(num_col, 'c')", write={"spark": "SELECT FORMAT_NUMBER(num_col, 'c')"})