1
0
Fork 0

Merging upstream version 26.0.0.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-02-13 21:58:54 +01:00
parent 27e05888f9
commit ae1eb339f9
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
57 changed files with 26226 additions and 25977 deletions

View file

@ -899,6 +899,16 @@ class TestDuckDB(Validator):
"NOT a ILIKE b",
)
self.validate_all(
"SELECT e'Hello\nworld'",
read={
"duckdb": "SELECT E'Hello\nworld'",
},
write={
"duckdb": "SELECT e'Hello\nworld'",
},
)
def test_array_index(self):
with self.assertLogs(helper_logger) as cm:
self.validate_all(

View file

@ -1,7 +1,7 @@
import unittest
import sys
from sqlglot import expressions as exp
from sqlglot import UnsupportedError, expressions as exp
from sqlglot.dialects.mysql import MySQL
from tests.dialects.test_dialect import Validator
@ -1344,3 +1344,33 @@ COMMENT='客户账户表'"""
for format in ("JSON", "TRADITIONAL", "TREE"):
self.validate_identity(f"DESCRIBE FORMAT={format} UPDATE test SET test_col = 'abc'")
def test_number_format(self):
self.validate_all(
"SELECT FORMAT(12332.123456, 4)",
write={
"duckdb": "SELECT FORMAT('{:,.4f}', 12332.123456)",
"mysql": "SELECT FORMAT(12332.123456, 4)",
},
)
self.validate_all(
"SELECT FORMAT(12332.1, 4)",
write={
"duckdb": "SELECT FORMAT('{:,.4f}', 12332.1)",
"mysql": "SELECT FORMAT(12332.1, 4)",
},
)
self.validate_all(
"SELECT FORMAT(12332.2, 0)",
write={
"duckdb": "SELECT FORMAT('{:,.0f}', 12332.2)",
"mysql": "SELECT FORMAT(12332.2, 0)",
},
)
self.validate_all(
"SELECT FORMAT(12332.2, 2, 'de_DE')",
write={
"duckdb": UnsupportedError,
"mysql": "SELECT FORMAT(12332.2, 2, 'de_DE')",
},
)

View file

@ -976,6 +976,12 @@ WHERE
"snowflake": "EDITDISTANCE(col1, col2, 3)",
},
)
self.validate_identity("SELECT BITOR(a, b) FROM table")
self.validate_identity("SELECT BIT_OR(a, b) FROM table", "SELECT BITOR(a, b) FROM table")
# Test BITOR with three arguments, padding on the left
self.validate_identity("SELECT BITOR(a, b, 'LEFT') FROM table_name")
def test_null_treatment(self):
self.validate_all(
@ -1450,6 +1456,9 @@ WHERE
self.validate_identity(
"""CREATE OR REPLACE FUNCTION ibis_udfs.public.object_values("obj" OBJECT) RETURNS ARRAY LANGUAGE JAVASCRIPT STRICT AS ' return Object.values(obj) '"""
)
self.validate_identity(
"CREATE OR REPLACE TABLE TEST (SOME_REF DECIMAL(38, 0) NOT NULL FOREIGN KEY REFERENCES SOME_OTHER_TABLE (ID))"
)
self.validate_identity(
"CREATE OR REPLACE FUNCTION my_udf(location OBJECT(city VARCHAR, zipcode DECIMAL(38, 0), val ARRAY(BOOLEAN))) RETURNS VARCHAR AS $$ SELECT 'foo' $$",
"CREATE OR REPLACE FUNCTION my_udf(location OBJECT(city VARCHAR, zipcode DECIMAL(38, 0), val ARRAY(BOOLEAN))) RETURNS VARCHAR AS ' SELECT \\'foo\\' '",

View file

@ -515,16 +515,6 @@ class TestTSQL(Validator):
self.validate_identity("CAST(x AS SQL_VARIANT)")
self.validate_identity("CAST(x AS BIT)")
self.validate_all(
"CAST(x AS DATETIME2)",
read={
"": "CAST(x AS DATETIME)",
},
write={
"mysql": "CAST(x AS DATETIME)",
"tsql": "CAST(x AS DATETIME2)",
},
)
self.validate_all(
"CAST(x AS DATETIME2(6))",
write={
@ -542,6 +532,19 @@ class TestTSQL(Validator):
},
)
for temporal_type in ("SMALLDATETIME", "DATETIME", "DATETIME2"):
self.validate_all(
f"CAST(x AS {temporal_type})",
read={
"": f"CAST(x AS {temporal_type})",
},
write={
"mysql": "CAST(x AS DATETIME)",
"duckdb": "CAST(x AS TIMESTAMP)",
"tsql": f"CAST(x AS {temporal_type})",
},
)
def test_types_ints(self):
self.validate_all(
"CAST(X AS INT)",
@ -750,14 +753,6 @@ class TestTSQL(Validator):
},
)
self.validate_all(
"CAST(x as SMALLDATETIME)",
write={
"spark": "CAST(x AS TIMESTAMP)",
"tsql": "CAST(x AS DATETIME2)",
},
)
def test_types_bin(self):
self.validate_all(
"CAST(x as BIT)",
@ -1094,7 +1089,7 @@ WHERE
expected_sqls = [
"CREATE PROCEDURE [TRANSF].[SP_Merge_Sales_Real] @Loadid INTEGER, @NumberOfRows INTEGER WITH EXECUTE AS OWNER, SCHEMABINDING, NATIVE_COMPILATION AS BEGIN SET XACT_ABORT ON",
"DECLARE @DWH_DateCreated AS DATETIME2 = CONVERT(DATETIME2, GETDATE(), 104)",
"DECLARE @DWH_DateCreated AS DATETIME = CONVERT(DATETIME, GETDATE(), 104)",
"DECLARE @DWH_DateModified AS DATETIME2 = CONVERT(DATETIME2, GETDATE(), 104)",
"DECLARE @DWH_IdUserCreated AS INTEGER = SUSER_ID(CURRENT_USER())",
"DECLARE @DWH_IdUserModified AS INTEGER = SUSER_ID(CURRENT_USER())",
@ -1438,7 +1433,7 @@ WHERE
"CONVERT(DATETIME, x, 121)",
write={
"spark": "TO_TIMESTAMP(x, 'yyyy-MM-dd HH:mm:ss.SSSSSS')",
"tsql": "CONVERT(DATETIME2, x, 121)",
"tsql": "CONVERT(DATETIME, x, 121)",
},
)
self.validate_all(
@ -1899,7 +1894,7 @@ WHERE
*
FROM OPENJSON(@json) WITH (
Number VARCHAR(200) '$.Order.Number',
Date DATETIME2 '$.Order.Date',
Date DATETIME '$.Order.Date',
Customer VARCHAR(200) '$.AccountNumber',
Quantity INTEGER '$.Item.Quantity',
[Order] NVARCHAR(MAX) AS JSON

View file

@ -14,6 +14,10 @@ class TestParser(unittest.TestCase):
parse_one("")
def test_parse_into(self):
self.assertIsInstance(parse_one("(1)", into=exp.Tuple), exp.Tuple)
self.assertIsInstance(parse_one("(1,)", into=exp.Tuple), exp.Tuple)
self.assertIsInstance(parse_one("(x=1)", into=exp.Tuple), exp.Tuple)
self.assertIsInstance(parse_one("select * from t", into=exp.Select), exp.Select)
self.assertIsInstance(parse_one("select * from t limit 5", into=exp.Select), exp.Select)
self.assertIsInstance(parse_one("left join foo", into=exp.Join), exp.Join)
@ -23,9 +27,9 @@ class TestParser(unittest.TestCase):
self.assertIsInstance(
parse_one(
"WHEN MATCHED THEN UPDATE SET target.salary = COALESCE(source.salary, target.salary)",
into=exp.When,
into=exp.Whens,
),
exp.When,
exp.Whens,
)
with self.assertRaises(ParseError) as ctx: