1
0
Fork 0

Merging upstream version 25.26.0.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-02-13 21:56:02 +01:00
parent 9138e4b92a
commit 829a709061
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
117 changed files with 49296 additions and 47316 deletions

View file

@ -107,6 +107,7 @@ LANGUAGE js AS
select_with_quoted_udf = self.validate_identity("SELECT `p.d.UdF`(data) FROM `p.d.t`")
self.assertEqual(select_with_quoted_udf.selects[0].name, "p.d.UdF")
self.validate_identity("SELECT ARRAY_CONCAT([1])")
self.validate_identity("SELECT * FROM READ_CSV('bla.csv')")
self.validate_identity("CAST(x AS STRUCT<list ARRAY<INT64>>)")
self.validate_identity("assert.true(1 = 1)")

View file

@ -2,6 +2,7 @@ from datetime import date
from sqlglot import exp, parse_one
from sqlglot.dialects import ClickHouse
from sqlglot.expressions import convert
from sqlglot.optimizer import traverse_scope
from tests.dialects.test_dialect import Validator
from sqlglot.errors import ErrorLevel
@ -28,6 +29,7 @@ class TestClickhouse(Validator):
self.assertEqual(expr.sql(dialect="clickhouse"), "COUNT(x)")
self.assertIsNone(expr._meta)
self.validate_identity("CAST(1 AS Bool)")
self.validate_identity("SELECT toString(CHAR(104.1, 101, 108.9, 108.9, 111, 32))")
self.validate_identity("@macro").assert_is(exp.Parameter).this.assert_is(exp.Var)
self.validate_identity("SELECT toFloat(like)")
@ -420,11 +422,6 @@ class TestClickhouse(Validator):
" GROUP BY loyalty ORDER BY loyalty ASC"
},
)
self.validate_identity("SELECT s, arr FROM arrays_test ARRAY JOIN arr")
self.validate_identity("SELECT s, arr, a FROM arrays_test LEFT ARRAY JOIN arr AS a")
self.validate_identity(
"SELECT s, arr_external FROM arrays_test ARRAY JOIN [1, 2, 3] AS arr_external"
)
self.validate_all(
"SELECT quantile(0.5)(a)",
read={"duckdb": "SELECT quantile(a, 0.5)"},
@ -1100,3 +1097,36 @@ LIFETIME(MIN 0 MAX 0)""",
def test_grant(self):
self.validate_identity("GRANT SELECT(x, y) ON db.table TO john WITH GRANT OPTION")
self.validate_identity("GRANT INSERT(x, y) ON db.table TO john")
def test_array_join(self):
expr = self.validate_identity(
"SELECT * FROM arrays_test ARRAY JOIN arr1, arrays_test.arr2 AS foo, ['a', 'b', 'c'] AS elem"
)
joins = expr.args["joins"]
self.assertEqual(len(joins), 1)
join = joins[0]
self.assertEqual(join.kind, "ARRAY")
self.assertIsInstance(join.this, exp.Column)
self.assertEqual(len(join.expressions), 2)
self.assertIsInstance(join.expressions[0], exp.Alias)
self.assertIsInstance(join.expressions[0].this, exp.Column)
self.assertIsInstance(join.expressions[1], exp.Alias)
self.assertIsInstance(join.expressions[1].this, exp.Array)
self.validate_identity("SELECT s, arr FROM arrays_test ARRAY JOIN arr")
self.validate_identity("SELECT s, arr, a FROM arrays_test LEFT ARRAY JOIN arr AS a")
self.validate_identity(
"SELECT s, arr_external FROM arrays_test ARRAY JOIN [1, 2, 3] AS arr_external"
)
self.validate_identity(
"SELECT * FROM arrays_test ARRAY JOIN [1, 2, 3] AS arr_external1, ['a', 'b', 'c'] AS arr_external2, splitByString(',', 'asd,qwerty,zxc') AS arr_external3"
)
def test_traverse_scope(self):
sql = "SELECT * FROM t FINAL"
scopes = traverse_scope(parse_one(sql, dialect=self.dialect))
self.assertEqual(len(scopes), 1)
self.assertEqual(set(scopes[0].sources), {"t"})

View file

@ -7,6 +7,7 @@ class TestDatabricks(Validator):
dialect = "databricks"
def test_databricks(self):
self.validate_identity("SELECT t.current_time FROM t")
self.validate_identity("ALTER TABLE labels ADD COLUMN label_score FLOAT")
self.validate_identity("DESCRIBE HISTORY a.b")
self.validate_identity("DESCRIBE history.tbl")

View file

@ -1762,6 +1762,7 @@ class TestDialect(Validator):
self.validate_all(
"LEVENSHTEIN(col1, col2)",
write={
"bigquery": "EDIT_DISTANCE(col1, col2)",
"duckdb": "LEVENSHTEIN(col1, col2)",
"drill": "LEVENSHTEIN_DISTANCE(col1, col2)",
"presto": "LEVENSHTEIN_DISTANCE(col1, col2)",
@ -1772,6 +1773,7 @@ class TestDialect(Validator):
self.validate_all(
"LEVENSHTEIN(coalesce(col1, col2), coalesce(col2, col1))",
write={
"bigquery": "EDIT_DISTANCE(COALESCE(col1, col2), COALESCE(col2, col1))",
"duckdb": "LEVENSHTEIN(COALESCE(col1, col2), COALESCE(col2, col1))",
"drill": "LEVENSHTEIN_DISTANCE(COALESCE(col1, col2), COALESCE(col2, col1))",
"presto": "LEVENSHTEIN_DISTANCE(COALESCE(col1, col2), COALESCE(col2, col1))",

View file

@ -256,6 +256,9 @@ class TestDuckDB(Validator):
parse_one("a // b", read="duckdb").assert_is(exp.IntDiv).sql(dialect="duckdb"), "a // b"
)
self.validate_identity("SELECT UNNEST([1, 2])").selects[0].assert_is(exp.UDTF)
self.validate_identity("'red' IN flags").args["field"].assert_is(exp.Column)
self.validate_identity("'red' IN tbl.flags")
self.validate_identity("CREATE TABLE tbl1 (u UNION(num INT, str TEXT))")
self.validate_identity("INSERT INTO x BY NAME SELECT 1 AS y")
self.validate_identity("SELECT 1 AS x UNION ALL BY NAME SELECT 2 AS x")

View file

@ -187,6 +187,7 @@ class TestHive(Validator):
"SELECT a, b FROM x LATERAL VIEW EXPLODE(y) t AS a LATERAL VIEW EXPLODE(z) u AS b",
write={
"presto": "SELECT a, b FROM x CROSS JOIN UNNEST(y) AS t(a) CROSS JOIN UNNEST(z) AS u(b)",
"duckdb": "SELECT a, b FROM x CROSS JOIN UNNEST(y) AS t(a) CROSS JOIN UNNEST(z) AS u(b)",
"hive": "SELECT a, b FROM x LATERAL VIEW EXPLODE(y) t AS a LATERAL VIEW EXPLODE(z) u AS b",
"spark": "SELECT a, b FROM x LATERAL VIEW EXPLODE(y) t AS a LATERAL VIEW EXPLODE(z) u AS b",
},
@ -195,6 +196,7 @@ class TestHive(Validator):
"SELECT a FROM x LATERAL VIEW EXPLODE(y) t AS a",
write={
"presto": "SELECT a FROM x CROSS JOIN UNNEST(y) AS t(a)",
"duckdb": "SELECT a FROM x CROSS JOIN UNNEST(y) AS t(a)",
"hive": "SELECT a FROM x LATERAL VIEW EXPLODE(y) t AS a",
"spark": "SELECT a FROM x LATERAL VIEW EXPLODE(y) t AS a",
},
@ -211,6 +213,7 @@ class TestHive(Validator):
"SELECT a FROM x LATERAL VIEW EXPLODE(ARRAY(y)) t AS a",
write={
"presto": "SELECT a FROM x CROSS JOIN UNNEST(ARRAY[y]) AS t(a)",
"duckdb": "SELECT a FROM x CROSS JOIN UNNEST([y]) AS t(a)",
"hive": "SELECT a FROM x LATERAL VIEW EXPLODE(ARRAY(y)) t AS a",
"spark": "SELECT a FROM x LATERAL VIEW EXPLODE(ARRAY(y)) t AS a",
},

View file

@ -139,6 +139,7 @@ class TestMySQL(Validator):
)
def test_identity(self):
self.validate_identity("SELECT HIGH_PRIORITY STRAIGHT_JOIN SQL_CALC_FOUND_ROWS * FROM t")
self.validate_identity("SELECT CAST(COALESCE(`id`, 'NULL') AS CHAR CHARACTER SET binary)")
self.validate_identity("SELECT e.* FROM e STRAIGHT_JOIN p ON e.x = p.y")
self.validate_identity("ALTER TABLE test_table ALTER COLUMN test_column SET DEFAULT 1")
@ -1305,3 +1306,12 @@ COMMENT='客户账户表'"""
for sql in grant_cmds:
with self.subTest(f"Testing MySQL's GRANT command statement: {sql}"):
self.validate_identity(sql, check_command_warning=True)
def test_explain(self):
self.validate_identity(
"EXPLAIN ANALYZE SELECT * FROM t", "DESCRIBE ANALYZE SELECT * FROM t"
)
expression = self.parse_one("EXPLAIN ANALYZE SELECT * FROM t")
self.assertIsInstance(expression, exp.Describe)
self.assertEqual(expression.text("style"), "ANALYZE")

View file

@ -329,6 +329,57 @@ class TestOracle(Validator):
)
self.validate_identity("INSERT /*+ APPEND */ INTO IAP_TBL (id, col1) VALUES (2, 'test2')")
self.validate_identity("INSERT /*+ APPEND_VALUES */ INTO dest_table VALUES (i, 'Value')")
self.validate_identity(
"SELECT /*+ LEADING(departments employees) USE_NL(employees) */ * FROM employees JOIN departments ON employees.department_id = departments.department_id",
"""SELECT /*+ LEADING(departments employees)
USE_NL(employees) */
*
FROM employees
JOIN departments
ON employees.department_id = departments.department_id""",
pretty=True,
)
self.validate_identity(
"SELECT /*+ USE_NL(bbbbbbbbbbbbbbbbbbbbbbbb) LEADING(aaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbb cccccccccccccccccccccccc dddddddddddddddddddddddd) INDEX(cccccccccccccccccccccccc) */ * FROM aaaaaaaaaaaaaaaaaaaaaaaa JOIN bbbbbbbbbbbbbbbbbbbbbbbb ON aaaaaaaaaaaaaaaaaaaaaaaa.id = bbbbbbbbbbbbbbbbbbbbbbbb.a_id JOIN cccccccccccccccccccccccc ON bbbbbbbbbbbbbbbbbbbbbbbb.id = cccccccccccccccccccccccc.b_id JOIN dddddddddddddddddddddddd ON cccccccccccccccccccccccc.id = dddddddddddddddddddddddd.c_id",
)
self.validate_identity(
"SELECT /*+ USE_NL(bbbbbbbbbbbbbbbbbbbbbbbb) LEADING(aaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbb cccccccccccccccccccccccc dddddddddddddddddddddddd) INDEX(cccccccccccccccccccccccc) */ * FROM aaaaaaaaaaaaaaaaaaaaaaaa JOIN bbbbbbbbbbbbbbbbbbbbbbbb ON aaaaaaaaaaaaaaaaaaaaaaaa.id = bbbbbbbbbbbbbbbbbbbbbbbb.a_id JOIN cccccccccccccccccccccccc ON bbbbbbbbbbbbbbbbbbbbbbbb.id = cccccccccccccccccccccccc.b_id JOIN dddddddddddddddddddddddd ON cccccccccccccccccccccccc.id = dddddddddddddddddddddddd.c_id",
"""SELECT /*+ USE_NL(bbbbbbbbbbbbbbbbbbbbbbbb)
LEADING(
aaaaaaaaaaaaaaaaaaaaaaaa
bbbbbbbbbbbbbbbbbbbbbbbb
cccccccccccccccccccccccc
dddddddddddddddddddddddd
)
INDEX(cccccccccccccccccccccccc) */
*
FROM aaaaaaaaaaaaaaaaaaaaaaaa
JOIN bbbbbbbbbbbbbbbbbbbbbbbb
ON aaaaaaaaaaaaaaaaaaaaaaaa.id = bbbbbbbbbbbbbbbbbbbbbbbb.a_id
JOIN cccccccccccccccccccccccc
ON bbbbbbbbbbbbbbbbbbbbbbbb.id = cccccccccccccccccccccccc.b_id
JOIN dddddddddddddddddddddddd
ON cccccccccccccccccccccccc.id = dddddddddddddddddddddddd.c_id""",
pretty=True,
)
# Test that parsing error with keywords like select where etc falls back
self.validate_identity(
"SELECT /*+ LEADING(departments employees) USE_NL(employees) select where group by is order by */ * FROM employees JOIN departments ON employees.department_id = departments.department_id",
"""SELECT /*+ LEADING(departments employees) USE_NL(employees) select where group by is order by */
*
FROM employees
JOIN departments
ON employees.department_id = departments.department_id""",
pretty=True,
)
# Test that parsing error with , inside hint function falls back
self.validate_identity(
"SELECT /*+ LEADING(departments, employees) */ * FROM employees JOIN departments ON employees.department_id = departments.department_id"
)
# Test that parsing error with keyword inside hint function falls back
self.validate_identity(
"SELECT /*+ LEADING(departments select) */ * FROM employees JOIN departments ON employees.department_id = departments.department_id"
)
def test_xml_table(self):
self.validate_identity("XMLTABLE('x')")

View file

@ -13,6 +13,16 @@ class TestPresto(Validator):
self.validate_identity("CAST(TDIGEST_AGG(1) AS TDIGEST)")
self.validate_identity("CAST(x AS HYPERLOGLOG)")
self.validate_all(
"CAST(x AS BOOLEAN)",
read={
"tsql": "CAST(x AS BIT)",
},
write={
"presto": "CAST(x AS BOOLEAN)",
"tsql": "CAST(x AS BIT)",
},
)
self.validate_all(
"SELECT FROM_ISO8601_TIMESTAMP('2020-05-11T11:15:05')",
write={

View file

@ -927,6 +927,29 @@ WHERE
"bigquery": "GENERATE_UUID()",
},
)
self.validate_identity("TRY_TO_TIMESTAMP(foo)").assert_is(exp.Anonymous)
self.validate_identity("TRY_TO_TIMESTAMP('12345')").assert_is(exp.Anonymous)
self.validate_all(
"SELECT TRY_TO_TIMESTAMP('2024-01-15 12:30:00.000')",
write={
"snowflake": "SELECT TRY_CAST('2024-01-15 12:30:00.000' AS TIMESTAMP)",
"duckdb": "SELECT TRY_CAST('2024-01-15 12:30:00.000' AS TIMESTAMP)",
},
)
self.validate_all(
"SELECT TRY_TO_TIMESTAMP('invalid')",
write={
"snowflake": "SELECT TRY_CAST('invalid' AS TIMESTAMP)",
"duckdb": "SELECT TRY_CAST('invalid' AS TIMESTAMP)",
},
)
self.validate_all(
"SELECT TRY_TO_TIMESTAMP('04/05/2013 01:02:03', 'mm/DD/yyyy hh24:mi:ss')",
write={
"snowflake": "SELECT TRY_TO_TIMESTAMP('04/05/2013 01:02:03', 'mm/DD/yyyy hh24:mi:ss')",
"duckdb": "SELECT CAST(TRY_STRPTIME('04/05/2013 01:02:03', '%m/%d/%Y %H:%M:%S') AS TIMESTAMP)",
},
)
def test_null_treatment(self):
self.validate_all(
@ -1085,6 +1108,20 @@ WHERE
"spark": "SELECT * FROM (SELECT * FROM t1 JOIN t2 ON t1.a = t2.c) TABLESAMPLE (1 PERCENT)",
},
)
self.validate_all(
"TO_DOUBLE(expr)",
write={
"snowflake": "TO_DOUBLE(expr)",
"duckdb": "CAST(expr AS DOUBLE)",
},
)
self.validate_all(
"TO_DOUBLE(expr, fmt)",
write={
"snowflake": "TO_DOUBLE(expr, fmt)",
"duckdb": UnsupportedError,
},
)
def test_timestamps(self):
self.validate_identity("SELECT CAST('12:00:00' AS TIME)")

View file

@ -5,6 +5,9 @@ from tests.dialects.test_dialect import Validator
class TestStarrocks(Validator):
dialect = "starrocks"
def test_starrocks(self):
self.validate_identity("ALTER TABLE a SWAP WITH b")
def test_ddl(self):
ddl_sqls = [
"DISTRIBUTED BY HASH (col1) BUCKETS 1",

View file

@ -9,6 +9,9 @@ class TestTrino(Validator):
self.validate_identity("JSON_QUERY(content, 'lax $.HY.*')")
self.validate_identity("JSON_QUERY(content, 'strict $.HY.*' WITH UNCONDITIONAL WRAPPER)")
self.validate_identity("JSON_QUERY(content, 'strict $.HY.*' WITHOUT CONDITIONAL WRAPPER)")
self.validate_identity(
"SELECT LISTAGG(DISTINCT col, ',') WITHIN GROUP (ORDER BY col ASC) FROM tbl"
)
def test_trim(self):
self.validate_identity("SELECT TRIM('!' FROM '!foo!')")

View file

@ -1,6 +1,6 @@
from sqlglot import exp, parse, parse_one
from tests.dialects.test_dialect import Validator
from sqlglot.errors import ParseError
from sqlglot.errors import ParseError, UnsupportedError
from sqlglot.optimizer.annotate_types import annotate_types
@ -1001,6 +1001,17 @@ class TestTSQL(Validator):
)
self.validate_identity("CREATE PROC foo AS SELECT BAR() AS baz")
self.validate_identity("CREATE PROCEDURE foo AS SELECT BAR() AS baz")
self.validate_identity("CREATE PROCEDURE foo WITH ENCRYPTION AS SELECT 1")
self.validate_identity("CREATE PROCEDURE foo WITH RECOMPILE AS SELECT 1")
self.validate_identity("CREATE PROCEDURE foo WITH SCHEMABINDING AS SELECT 1")
self.validate_identity("CREATE PROCEDURE foo WITH NATIVE_COMPILATION AS SELECT 1")
self.validate_identity("CREATE PROCEDURE foo WITH EXECUTE AS OWNER AS SELECT 1")
self.validate_identity("CREATE PROCEDURE foo WITH EXECUTE AS 'username' AS SELECT 1")
self.validate_identity(
"CREATE PROCEDURE foo WITH EXECUTE AS OWNER, SCHEMABINDING, NATIVE_COMPILATION AS SELECT 1"
)
self.validate_identity("CREATE FUNCTION foo(@bar INTEGER) RETURNS TABLE AS RETURN SELECT 1")
self.validate_identity("CREATE FUNCTION dbo.ISOweek(@DATE DATETIME2) RETURNS INTEGER")
@ -1059,6 +1070,7 @@ WHERE
CREATE procedure [TRANSF].[SP_Merge_Sales_Real]
@Loadid INTEGER
,@NumberOfRows INTEGER
WITH EXECUTE AS OWNER, SCHEMABINDING, NATIVE_COMPILATION
AS
BEGIN
SET XACT_ABORT ON;
@ -1074,7 +1086,7 @@ WHERE
"""
expected_sqls = [
"CREATE PROCEDURE [TRANSF].[SP_Merge_Sales_Real] @Loadid INTEGER, @NumberOfRows INTEGER AS BEGIN SET XACT_ABORT ON",
"CREATE PROCEDURE [TRANSF].[SP_Merge_Sales_Real] @Loadid INTEGER, @NumberOfRows INTEGER WITH EXECUTE AS OWNER, SCHEMABINDING, NATIVE_COMPILATION AS BEGIN SET XACT_ABORT ON",
"DECLARE @DWH_DateCreated AS DATETIME2 = CONVERT(DATETIME2, GETDATE(), 104)",
"DECLARE @DWH_DateModified AS DATETIME2 = CONVERT(DATETIME2, GETDATE(), 104)",
"DECLARE @DWH_IdUserCreated AS INTEGER = SUSER_ID(CURRENT_USER())",
@ -2014,3 +2026,39 @@ FROM OPENJSON(@json) WITH (
self.validate_identity(
"GRANT EXECUTE ON TestProc TO User2 AS TesterRole", check_command_warning=True
)
def test_parsename(self):
for i in range(4):
with self.subTest("Testing PARSENAME <-> SPLIT_PART"):
self.validate_all(
f"SELECT PARSENAME('1.2.3', {i})",
read={
"spark": f"SELECT SPLIT_PART('1.2.3', '.', {4 - i})",
"databricks": f"SELECT SPLIT_PART('1.2.3', '.', {4 - i})",
},
write={
"spark": f"SELECT SPLIT_PART('1.2.3', '.', {4 - i})",
"databricks": f"SELECT SPLIT_PART('1.2.3', '.', {4 - i})",
"tsql": f"SELECT PARSENAME('1.2.3', {i})",
},
)
# Test non-dot delimiter
self.validate_all(
"SELECT SPLIT_PART('1,2,3', ',', 1)",
write={
"spark": "SELECT SPLIT_PART('1,2,3', ',', 1)",
"databricks": "SELECT SPLIT_PART('1,2,3', ',', 1)",
"tsql": UnsupportedError,
},
)
# Test column-type parameters
self.validate_all(
"WITH t AS (SELECT 'a.b.c' AS value, 1 AS idx) SELECT SPLIT_PART(value, '.', idx) FROM t",
write={
"spark": "WITH t AS (SELECT 'a.b.c' AS value, 1 AS idx) SELECT SPLIT_PART(value, '.', idx) FROM t",
"databricks": "WITH t AS (SELECT 'a.b.c' AS value, 1 AS idx) SELECT SPLIT_PART(value, '.', idx) FROM t",
"tsql": UnsupportedError,
},
)