1
0
Fork 0

Adding upstream version 25.20.1.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-02-13 21:53:56 +01:00
parent b35dbeb6b6
commit 0b78a18345
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
119 changed files with 78094 additions and 71498 deletions

View file

@ -420,6 +420,11 @@ class TestTSQL(Validator):
"SELECT val FROM (VALUES ((TRUE), (FALSE), (NULL))) AS t(val)",
write_sql="SELECT val FROM (VALUES ((1), (0), (NULL))) AS t(val)",
)
self.validate_identity("'a' + 'b'")
self.validate_identity(
"'a' || 'b'",
"'a' + 'b'",
)
def test_option(self):
possible_options = [
@ -1701,7 +1706,7 @@ WHERE
"duckdb": "LAST_DAY(CAST(CURRENT_TIMESTAMP AS DATE) + INTERVAL (-1) MONTH)",
"mysql": "LAST_DAY(DATE_ADD(CURRENT_TIMESTAMP(), INTERVAL -1 MONTH))",
"postgres": "CAST(DATE_TRUNC('MONTH', CAST(CURRENT_TIMESTAMP AS DATE) + INTERVAL '-1 MONTH') + INTERVAL '1 MONTH' - INTERVAL '1 DAY' AS DATE)",
"presto": "LAST_DAY_OF_MONTH(DATE_ADD('MONTH', CAST(-1 AS BIGINT), CAST(CAST(CURRENT_TIMESTAMP AS TIMESTAMP) AS DATE)))",
"presto": "LAST_DAY_OF_MONTH(DATE_ADD('MONTH', -1, CAST(CAST(CURRENT_TIMESTAMP AS TIMESTAMP) AS DATE)))",
"redshift": "LAST_DAY(DATEADD(MONTH, -1, CAST(GETDATE() AS DATE)))",
"snowflake": "LAST_DAY(DATEADD(MONTH, -1, TO_DATE(CURRENT_TIMESTAMP())))",
"spark": "LAST_DAY(ADD_MONTHS(TO_DATE(CURRENT_TIMESTAMP()), -1))",
@ -1965,3 +1970,31 @@ FROM OPENJSON(@json) WITH (
base_sql = expr.sql()
self.assertEqual(base_sql, f"SCOPE_RESOLUTION({lhs + ', ' if lhs else ''}{rhs})")
self.assertEqual(parse_one(base_sql).sql("tsql"), f"{lhs}::{rhs}")
def test_count(self):
count = annotate_types(self.validate_identity("SELECT COUNT(1) FROM x"))
self.assertEqual(count.expressions[0].type.this, exp.DataType.Type.INT)
count_big = annotate_types(self.validate_identity("SELECT COUNT_BIG(1) FROM x"))
self.assertEqual(count_big.expressions[0].type.this, exp.DataType.Type.BIGINT)
self.validate_all(
"SELECT COUNT_BIG(1) FROM x",
read={
"duckdb": "SELECT COUNT(1) FROM x",
"spark": "SELECT COUNT(1) FROM x",
},
write={
"duckdb": "SELECT COUNT(1) FROM x",
"spark": "SELECT COUNT(1) FROM x",
"tsql": "SELECT COUNT_BIG(1) FROM x",
},
)
self.validate_all(
"SELECT COUNT(1) FROM x",
write={
"duckdb": "SELECT COUNT(1) FROM x",
"spark": "SELECT COUNT(1) FROM x",
"tsql": "SELECT COUNT(1) FROM x",
},
)