Merging upstream version 21.0.1.
Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
parent
3759c601a7
commit
96b10de29a
115 changed files with 66603 additions and 60920 deletions
|
@ -886,7 +886,7 @@ WHERE
|
|||
"END",
|
||||
]
|
||||
|
||||
with self.assertLogs(parser_logger) as cm:
|
||||
with self.assertLogs(parser_logger):
|
||||
for expr, expected_sql in zip(parse(sql, read="tsql"), expected_sqls):
|
||||
self.assertEqual(expr.sql(dialect="tsql"), expected_sql)
|
||||
|
||||
|
@ -907,7 +907,7 @@ WHERE
|
|||
"CREATE TABLE [target_schema].[target_table] (a INTEGER) WITH (DISTRIBUTION=REPLICATE, HEAP)",
|
||||
]
|
||||
|
||||
with self.assertLogs(parser_logger) as cm:
|
||||
with self.assertLogs(parser_logger):
|
||||
for expr, expected_sql in zip(parse(sql, read="tsql"), expected_sqls):
|
||||
self.assertEqual(expr.sql(dialect="tsql"), expected_sql)
|
||||
|
||||
|
@ -941,6 +941,16 @@ WHERE
|
|||
self.validate_all(
|
||||
"LEN(x)", read={"": "LENGTH(x)"}, write={"spark": "LENGTH(CAST(x AS STRING))"}
|
||||
)
|
||||
self.validate_all(
|
||||
"RIGHT(x, 1)",
|
||||
read={"": "RIGHT(CAST(x AS STRING), 1)"},
|
||||
write={"spark": "RIGHT(CAST(x AS STRING), 1)"},
|
||||
)
|
||||
self.validate_all(
|
||||
"LEFT(x, 1)",
|
||||
read={"": "LEFT(CAST(x AS STRING), 1)"},
|
||||
write={"spark": "LEFT(CAST(x AS STRING), 1)"},
|
||||
)
|
||||
self.validate_all("LEN(1)", write={"tsql": "LEN(1)", "spark": "LENGTH(CAST(1 AS STRING))"})
|
||||
self.validate_all("LEN('x')", write={"tsql": "LEN('x')", "spark": "LENGTH('x')"})
|
||||
|
||||
|
@ -950,10 +960,20 @@ WHERE
|
|||
def test_isnull(self):
|
||||
self.validate_all("ISNULL(x, y)", write={"spark": "COALESCE(x, y)"})
|
||||
|
||||
def test_jsonvalue(self):
|
||||
def test_json(self):
|
||||
self.validate_all(
|
||||
"JSON_QUERY(r.JSON, '$.Attr_INT')",
|
||||
write={
|
||||
"spark": "GET_JSON_OBJECT(r.JSON, '$.Attr_INT')",
|
||||
"tsql": "ISNULL(JSON_QUERY(r.JSON, '$.Attr_INT'), JSON_VALUE(r.JSON, '$.Attr_INT'))",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"JSON_VALUE(r.JSON, '$.Attr_INT')",
|
||||
write={"spark": "GET_JSON_OBJECT(r.JSON, '$.Attr_INT')"},
|
||||
write={
|
||||
"spark": "GET_JSON_OBJECT(r.JSON, '$.Attr_INT')",
|
||||
"tsql": "ISNULL(JSON_QUERY(r.JSON, '$.Attr_INT'), JSON_VALUE(r.JSON, '$.Attr_INT'))",
|
||||
},
|
||||
)
|
||||
|
||||
def test_datefromparts(self):
|
||||
|
@ -1438,7 +1458,7 @@ WHERE
|
|||
"mysql": "LAST_DAY(DATE(CURRENT_TIMESTAMP()))",
|
||||
"postgres": "CAST(DATE_TRUNC('MONTH', CAST(CURRENT_TIMESTAMP AS DATE)) + INTERVAL '1 MONTH' - INTERVAL '1 DAY' AS DATE)",
|
||||
"presto": "LAST_DAY_OF_MONTH(CAST(CAST(CURRENT_TIMESTAMP AS TIMESTAMP) AS DATE))",
|
||||
"redshift": "LAST_DAY(CAST(SYSDATE AS DATE))",
|
||||
"redshift": "LAST_DAY(CAST(GETDATE() AS DATE))",
|
||||
"snowflake": "LAST_DAY(CAST(CURRENT_TIMESTAMP() AS DATE))",
|
||||
"spark": "LAST_DAY(TO_DATE(CURRENT_TIMESTAMP()))",
|
||||
"tsql": "EOMONTH(CAST(GETDATE() AS DATE))",
|
||||
|
@ -1453,7 +1473,7 @@ WHERE
|
|||
"mysql": "LAST_DAY(DATE_ADD(CURRENT_TIMESTAMP(), INTERVAL -1 MONTH))",
|
||||
"postgres": "CAST(DATE_TRUNC('MONTH', CAST(CURRENT_TIMESTAMP AS DATE) + INTERVAL '-1 MONTH') + INTERVAL '1 MONTH' - INTERVAL '1 DAY' AS DATE)",
|
||||
"presto": "LAST_DAY_OF_MONTH(DATE_ADD('MONTH', CAST(-1 AS BIGINT), CAST(CAST(CURRENT_TIMESTAMP AS TIMESTAMP) AS DATE)))",
|
||||
"redshift": "LAST_DAY(DATEADD(MONTH, -1, CAST(SYSDATE AS DATE)))",
|
||||
"redshift": "LAST_DAY(DATEADD(MONTH, -1, CAST(GETDATE() AS DATE)))",
|
||||
"snowflake": "LAST_DAY(DATEADD(MONTH, -1, CAST(CURRENT_TIMESTAMP() AS DATE)))",
|
||||
"spark": "LAST_DAY(ADD_MONTHS(TO_DATE(CURRENT_TIMESTAMP()), -1))",
|
||||
"tsql": "EOMONTH(DATEADD(MONTH, -1, CAST(GETDATE() AS DATE)))",
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue