1
0
Fork 0

Adding upstream version 10.5.10.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-02-13 15:06:33 +01:00
parent 556602e7d9
commit 26f20be569
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
43 changed files with 1384 additions and 356 deletions

View file

@ -17,7 +17,8 @@ class TestClickhouse(Validator):
self.validate_identity("SELECT quantile(0.5)(a)")
self.validate_identity("SELECT quantiles(0.5)(a) AS x FROM t")
self.validate_identity("SELECT * FROM foo WHERE x GLOBAL IN (SELECT * FROM bar)")
self.validate_identity("position(a, b)")
self.validate_identity("position(haystack, needle)")
self.validate_identity("position(haystack, needle, position)")
self.validate_all(
"SELECT fname, lname, age FROM person ORDER BY age DESC NULLS FIRST, fname ASC NULLS LAST, lname",
@ -48,6 +49,10 @@ class TestClickhouse(Validator):
"clickhouse": "SELECT quantileIf(0.5)(a, TRUE)",
},
)
self.validate_all(
"SELECT position(needle IN haystack)",
write={"clickhouse": "SELECT position(haystack, needle)"},
)
def test_cte(self):
self.validate_identity("WITH 'x' AS foo SELECT foo")

View file

@ -950,40 +950,40 @@ class TestDialect(Validator):
},
)
self.validate_all(
"POSITION(' ' in x)",
"POSITION(needle in haystack)",
write={
"drill": "STRPOS(x, ' ')",
"duckdb": "STRPOS(x, ' ')",
"postgres": "STRPOS(x, ' ')",
"presto": "STRPOS(x, ' ')",
"spark": "LOCATE(' ', x)",
"clickhouse": "position(x, ' ')",
"snowflake": "POSITION(' ', x)",
"mysql": "LOCATE(' ', x)",
"drill": "STRPOS(haystack, needle)",
"duckdb": "STRPOS(haystack, needle)",
"postgres": "STRPOS(haystack, needle)",
"presto": "STRPOS(haystack, needle)",
"spark": "LOCATE(needle, haystack)",
"clickhouse": "position(haystack, needle)",
"snowflake": "POSITION(needle, haystack)",
"mysql": "LOCATE(needle, haystack)",
},
)
self.validate_all(
"STR_POSITION(x, 'a')",
"STR_POSITION(haystack, needle)",
write={
"drill": "STRPOS(x, 'a')",
"duckdb": "STRPOS(x, 'a')",
"postgres": "STRPOS(x, 'a')",
"presto": "STRPOS(x, 'a')",
"spark": "LOCATE('a', x)",
"clickhouse": "position(x, 'a')",
"snowflake": "POSITION('a', x)",
"mysql": "LOCATE('a', x)",
"drill": "STRPOS(haystack, needle)",
"duckdb": "STRPOS(haystack, needle)",
"postgres": "STRPOS(haystack, needle)",
"presto": "STRPOS(haystack, needle)",
"spark": "LOCATE(needle, haystack)",
"clickhouse": "position(haystack, needle)",
"snowflake": "POSITION(needle, haystack)",
"mysql": "LOCATE(needle, haystack)",
},
)
self.validate_all(
"POSITION('a', x, 3)",
"POSITION(needle, haystack, pos)",
write={
"drill": "STRPOS(SUBSTR(x, 3), 'a') + 3 - 1",
"presto": "STRPOS(x, 'a', 3)",
"spark": "LOCATE('a', x, 3)",
"clickhouse": "position(x, 'a', 3)",
"snowflake": "POSITION('a', x, 3)",
"mysql": "LOCATE('a', x, 3)",
"drill": "STRPOS(SUBSTR(haystack, pos), needle) + pos - 1",
"presto": "STRPOS(haystack, needle, pos)",
"spark": "LOCATE(needle, haystack, pos)",
"clickhouse": "position(haystack, needle, pos)",
"snowflake": "POSITION(needle, haystack, pos)",
"mysql": "LOCATE(needle, haystack, pos)",
},
)
self.validate_all(
@ -1365,3 +1365,19 @@ SELECT
"spark": "MERGE INTO target USING source ON target.id = source.id WHEN MATCHED THEN UPDATE * WHEN NOT MATCHED THEN INSERT *",
},
)
self.validate_all(
"""
MERGE a b USING c d ON b.id = d.id
WHEN MATCHED AND EXISTS (
SELECT b.name
EXCEPT
SELECT d.name
)
THEN UPDATE SET b.name = d.name
""",
write={
"bigquery": "MERGE INTO a AS b USING c AS d ON b.id = d.id WHEN MATCHED AND EXISTS(SELECT b.name EXCEPT DISTINCT SELECT d.name) THEN UPDATE SET b.name = d.name",
"snowflake": "MERGE INTO a AS b USING c AS d ON b.id = d.id WHEN MATCHED AND EXISTS(SELECT b.name EXCEPT SELECT d.name) THEN UPDATE SET b.name = d.name",
"spark": "MERGE INTO a AS b USING c AS d ON b.id = d.id WHEN MATCHED AND EXISTS(SELECT b.name EXCEPT SELECT d.name) THEN UPDATE SET b.name = d.name",
},
)

View file

@ -75,6 +75,15 @@ class TestMySQL(Validator):
"spark": "CAST(x AS TEXT) + CAST(y AS TEXT)",
},
)
self.validate_all(
"CAST(x AS MEDIUMBLOB) + CAST(y AS LONGBLOB)",
read={
"mysql": "CAST(x AS MEDIUMBLOB) + CAST(y AS LONGBLOB)",
},
write={
"spark": "CAST(x AS BLOB) + CAST(y AS BLOB)",
},
)
def test_canonical_functions(self):
self.validate_identity("SELECT LEFT('str', 2)", "SELECT SUBSTRING('str', 1, 2)")

View file

@ -12,6 +12,24 @@ class TestSnowflake(Validator):
"snowflake": "SELECT * FROM xxx WHERE col ILIKE '%Don\\'t%'",
},
)
self.validate_all(
"SELECT * EXCLUDE a, b FROM xxx",
write={
"snowflake": "SELECT * EXCLUDE (a, b) FROM xxx",
},
)
self.validate_all(
"SELECT * RENAME a AS b, c AS d FROM xxx",
write={
"snowflake": "SELECT * RENAME (a AS b, c AS d) FROM xxx",
},
)
self.validate_all(
"SELECT * EXCLUDE a, b RENAME (c AS d, E as F) FROM xxx",
write={
"snowflake": "SELECT * EXCLUDE (a, b) RENAME (c AS d, E AS F) FROM xxx",
},
)
self.validate_all(
'x:a:"b c"',
write={

View file

@ -1,3 +1,4 @@
from sqlglot import exp, parse, parse_one
from tests.dialects.test_dialect import Validator
@ -5,6 +6,10 @@ class TestTSQL(Validator):
dialect = "tsql"
def test_tsql(self):
self.validate_identity("SELECT CASE WHEN a > 1 THEN b END")
self.validate_identity("END")
self.validate_identity("@x")
self.validate_identity("#x")
self.validate_identity("DECLARE @TestVariable AS VARCHAR(100)='Save Our Planet'")
self.validate_identity("PRINT @TestVariable")
self.validate_identity("SELECT Employee_ID, Department_ID FROM @MyTableVar")
@ -87,6 +92,95 @@ class TestTSQL(Validator):
},
)
def test_udf(self):
self.validate_identity(
"CREATE PROCEDURE foo @a INTEGER, @b INTEGER AS SELECT @a = SUM(bla) FROM baz AS bar"
)
self.validate_identity(
"CREATE PROC foo @ID INTEGER, @AGE INTEGER AS SELECT DB_NAME(@ID) AS ThatDB"
)
self.validate_identity("CREATE PROC foo AS SELECT BAR() AS baz")
self.validate_identity("CREATE PROCEDURE foo AS SELECT BAR() AS baz")
self.validate_identity("CREATE FUNCTION foo(@bar INTEGER) RETURNS TABLE AS RETURN SELECT 1")
self.validate_identity("CREATE FUNCTION dbo.ISOweek(@DATE DATETIME2) RETURNS INTEGER")
# The following two cases don't necessarily correspond to valid TSQL, but they are used to verify
# that the syntax RETURNS @return_variable TABLE <table_type_definition> ... is parsed correctly.
#
# See also "Transact-SQL Multi-Statement Table-Valued Function Syntax"
# https://learn.microsoft.com/en-us/sql/t-sql/statements/create-function-transact-sql?view=sql-server-ver16
self.validate_identity(
"CREATE FUNCTION foo(@bar INTEGER) RETURNS @foo TABLE (x INTEGER, y NUMERIC) AS RETURN SELECT 1"
)
self.validate_identity(
"CREATE FUNCTION foo() RETURNS @contacts TABLE (first_name VARCHAR(50), phone VARCHAR(25)) AS SELECT @fname, @phone"
)
self.validate_all(
"""
CREATE FUNCTION udfProductInYear (
@model_year INT
)
RETURNS TABLE
AS
RETURN
SELECT
product_name,
model_year,
list_price
FROM
production.products
WHERE
model_year = @model_year
""",
write={
"tsql": """CREATE FUNCTION udfProductInYear(
@model_year INTEGER
)
RETURNS TABLE AS
RETURN SELECT
product_name,
model_year,
list_price
FROM production.products
WHERE
model_year = @model_year""",
},
pretty=True,
)
sql = """
CREATE procedure [TRANSF].[SP_Merge_Sales_Real]
@Loadid INTEGER
,@NumberOfRows INTEGER
AS
BEGIN
SET XACT_ABORT ON;
DECLARE @DWH_DateCreated DATETIME = CONVERT(DATETIME, getdate(), 104);
DECLARE @DWH_DateModified DATETIME = CONVERT(DATETIME, getdate(), 104);
DECLARE @DWH_IdUserCreated INTEGER = SUSER_ID (SYSTEM_USER);
DECLARE @DWH_IdUserModified INTEGER = SUSER_ID (SYSTEM_USER);
DECLARE @SalesAmountBefore float;
SELECT @SalesAmountBefore=SUM(SalesAmount) FROM TRANSF.[Pre_Merge_Sales_Real] S;
END
"""
expected_sqls = [
'CREATE PROCEDURE "TRANSF"."SP_Merge_Sales_Real" @Loadid INTEGER, @NumberOfRows INTEGER AS BEGIN SET XACT_ABORT ON',
"DECLARE @DWH_DateCreated DATETIME = CONVERT(DATETIME, getdate(), 104)",
"DECLARE @DWH_DateModified DATETIME = CONVERT(DATETIME, getdate(), 104)",
"DECLARE @DWH_IdUserCreated INTEGER = SUSER_ID (SYSTEM_USER)",
"DECLARE @DWH_IdUserModified INTEGER = SUSER_ID (SYSTEM_USER)",
"DECLARE @SalesAmountBefore float",
'SELECT @SalesAmountBefore = SUM(SalesAmount) FROM TRANSF."Pre_Merge_Sales_Real" AS S',
"END",
]
for expr, expected_sql in zip(parse(sql, read="tsql"), expected_sqls):
self.assertEqual(expr.sql(dialect="tsql"), expected_sql)
def test_charindex(self):
self.validate_all(
"CHARINDEX(x, y, 9)",
@ -472,3 +566,51 @@ class TestTSQL(Validator):
"EOMONTH(GETDATE(), -1)",
write={"spark": "LAST_DAY(ADD_MONTHS(CURRENT_TIMESTAMP(), -1))"},
)
def test_variables(self):
# In TSQL @, # can be used as a prefix for variables/identifiers
expr = parse_one("@x", read="tsql")
self.assertIsInstance(expr, exp.Column)
self.assertIsInstance(expr.this, exp.Identifier)
expr = parse_one("#x", read="tsql")
self.assertIsInstance(expr, exp.Column)
self.assertIsInstance(expr.this, exp.Identifier)
def test_system_time(self):
self.validate_all(
"SELECT [x] FROM [a].[b] FOR SYSTEM_TIME AS OF 'foo'",
write={
"tsql": """SELECT "x" FROM "a"."b" FOR SYSTEM_TIME AS OF 'foo'""",
},
)
self.validate_all(
"SELECT [x] FROM [a].[b] FOR SYSTEM_TIME AS OF 'foo' AS alias",
write={
"tsql": """SELECT "x" FROM "a"."b" FOR SYSTEM_TIME AS OF 'foo' AS alias""",
},
)
self.validate_all(
"SELECT [x] FROM [a].[b] FOR SYSTEM_TIME FROM c TO d",
write={
"tsql": """SELECT "x" FROM "a"."b" FOR SYSTEM_TIME FROM c TO d""",
},
)
self.validate_all(
"SELECT [x] FROM [a].[b] FOR SYSTEM_TIME BETWEEN c AND d",
write={
"tsql": """SELECT "x" FROM "a"."b" FOR SYSTEM_TIME BETWEEN c AND d""",
},
)
self.validate_all(
"SELECT [x] FROM [a].[b] FOR SYSTEM_TIME CONTAINED IN (c, d)",
write={
"tsql": """SELECT "x" FROM "a"."b" FOR SYSTEM_TIME CONTAINED IN (c, d)""",
},
)
self.validate_all(
"SELECT [x] FROM [a].[b] FOR SYSTEM_TIME ALL AS alias",
write={
"tsql": """SELECT "x" FROM "a"."b" FOR SYSTEM_TIME ALL AS alias""",
},
)

View file

@ -94,8 +94,8 @@ CONCAT_WS('-', 'a', 'b')
CONCAT_WS('-', 'a', 'b', 'c')
POSEXPLODE("x") AS ("a", "b")
POSEXPLODE("x") AS ("a", "b", "c")
STR_POSITION(x, 'a')
STR_POSITION(x, 'a', 3)
STR_POSITION(haystack, needle)
STR_POSITION(haystack, needle, pos)
LEVENSHTEIN('gumbo', 'gambol', 2, 1, 1)
SPLIT(SPLIT(referrer, 'utm_source=')[OFFSET(1)], "&")[OFFSET(0)]
x[ORDINAL(1)][SAFE_OFFSET(2)]
@ -375,12 +375,16 @@ SELECT * FROM (SELECT 1 UNION ALL SELECT 2) AS x
SELECT * FROM (SELECT 1 UNION ALL SELECT 2)
SELECT * FROM ((SELECT 1) AS a UNION ALL (SELECT 2) AS b)
SELECT * FROM ((SELECT 1) AS a(b))
SELECT * FROM ((SELECT 1) UNION (SELECT 2) UNION (SELECT 3))
SELECT * FROM x AS y(a, b)
SELECT * EXCEPT (a, b)
SELECT * EXCEPT (a, b) FROM y
SELECT * REPLACE (a AS b, b AS C)
SELECT * REPLACE (a + 1 AS b, b AS C)
SELECT * EXCEPT (a, b) REPLACE (a AS b, b AS C)
SELECT * EXCEPT (a, b) REPLACE (a AS b, b AS C) FROM y
SELECT a.* EXCEPT (a, b), b.* REPLACE (a AS b, b AS C)
SELECT a.* EXCEPT (a, b), b.* REPLACE (a AS b, b AS C) FROM x
SELECT zoo, animals FROM (VALUES ('oakland', ARRAY('a', 'b')), ('sf', ARRAY('b', 'c'))) AS t(zoo, animals)
SELECT zoo, animals FROM UNNEST(ARRAY(STRUCT('oakland' AS zoo, ARRAY('a', 'b') AS animals), STRUCT('sf' AS zoo, ARRAY('b', 'c') AS animals))) AS t(zoo, animals)
WITH a AS (SELECT 1) SELECT 1 UNION ALL SELECT 2
@ -438,6 +442,8 @@ SELECT SUM(x) OVER (PARTITION BY a RANGE BETWEEN 1 FOLLOWING AND UNBOUNDED FOLLO
SELECT AVG(x) OVER (ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) FROM t
SELECT LISTAGG(x) WITHIN GROUP (ORDER BY x) AS y
SELECT LISTAGG(x) WITHIN GROUP (ORDER BY x DESC)
SELECT PERCENTILE_CONT(0.5) WITHIN GROUP (ORDER BY x)
SELECT PERCENTILE_DISC(0.5) WITHIN GROUP (ORDER BY x)
SELECT SUM(x) FILTER(WHERE x > 1)
SELECT SUM(x) FILTER(WHERE x > 1) OVER (ORDER BY y)
SELECT COUNT(DISTINCT a) OVER (PARTITION BY c ORDER BY d ROWS BETWEEN 1 PRECEDING AND 1 FOLLOWING)
@ -611,6 +617,7 @@ WITH a AS (SELECT * FROM b) DELETE FROM a
WITH a AS (SELECT * FROM b) CACHE TABLE a
SELECT ? AS ? FROM x WHERE b BETWEEN ? AND ? GROUP BY ?, 1 LIMIT ?
SELECT :hello, ? FROM x LIMIT :my_limit
SELECT * FROM x FETCH NEXT @take ROWS ONLY OFFSET @skip
WITH a AS ((SELECT b.foo AS foo, b.bar AS bar FROM b) UNION ALL (SELECT c.foo AS foo, c.bar AS bar FROM c)) SELECT * FROM a
WITH a AS ((SELECT 1 AS b) UNION ALL (SELECT 1 AS b)) SELECT * FROM a
SELECT (WITH x AS (SELECT 1 AS y) SELECT * FROM x) AS z
@ -670,3 +677,17 @@ CREATE TABLE products (x INT GENERATED ALWAYS AS IDENTITY)
CREATE TABLE IF NOT EXISTS customer (pk BIGINT NOT NULL GENERATED ALWAYS AS IDENTITY (INCREMENT BY 1))
CREATE TABLE customer (pk BIGINT NOT NULL GENERATED ALWAYS AS IDENTITY (START WITH 10 INCREMENT BY 1))
CREATE TABLE customer (pk BIGINT NOT NULL GENERATED ALWAYS AS IDENTITY (START WITH 10))
ALTER TABLE "schema"."tablename" ADD CONSTRAINT "CHK_Name" CHECK (NOT "IdDwh" IS NULL AND "IdDwh" <> (0))
ALTER TABLE persons ADD CONSTRAINT persons_pk PRIMARY KEY (first_name, last_name)
ALTER TABLE pets ADD CONSTRAINT pets_persons_fk FOREIGN KEY (owner_first_name, owner_last_name) REFERENCES persons
ALTER TABLE pets ADD CONSTRAINT pets_name_not_cute_chk CHECK (LENGTH(name) < 20)
ALTER TABLE people10m ADD CONSTRAINT dateWithinRange CHECK (birthDate > '1900-01-01')
ALTER TABLE people10m ADD CONSTRAINT validIds CHECK (id > 1 AND id < 99999999) ENFORCED
ALTER TABLE baa ADD CONSTRAINT boo PRIMARY KEY (x, y) NOT ENFORCED DEFERRABLE INITIALLY DEFERRED NORELY
ALTER TABLE baa ADD CONSTRAINT boo PRIMARY KEY (x, y) NOT ENFORCED DEFERRABLE INITIALLY DEFERRED NORELY
ALTER TABLE baa ADD CONSTRAINT boo FOREIGN KEY (x, y) REFERENCES persons ON UPDATE NO ACTION ON DELETE NO ACTION MATCH FULL
ALTER TABLE a ADD PRIMARY KEY (x, y) NOT ENFORCED
ALTER TABLE a ADD FOREIGN KEY (x, y) REFERENCES bla
CREATE TABLE foo (baz_id INT REFERENCES baz(id) DEFERRABLE)
SELECT end FROM a
SELECT id FROM b.a AS a QUALIFY ROW_NUMBER() OVER (PARTITION BY br ORDER BY sadf DESC) = 1

View file

@ -18,3 +18,6 @@ WITH y AS (SELECT *) SELECT * FROM x AS x;
WITH y AS (SELECT * FROM y AS y2 JOIN x AS z2) SELECT * FROM x AS x JOIN y as y;
WITH y AS (SELECT * FROM (SELECT * FROM y AS y) AS y2 JOIN (SELECT * FROM x AS x) AS z2) SELECT * FROM (SELECT * FROM x AS x) AS x JOIN y AS y;
SELECT * FROM x AS x JOIN xx AS y;
SELECT * FROM (SELECT * FROM x AS x) AS x JOIN xx AS y;

View file

@ -2,7 +2,7 @@ SELECT a FROM (SELECT * FROM x);
SELECT _q_0.a AS a FROM (SELECT x.a AS a FROM x AS x) AS _q_0;
SELECT 1 FROM (SELECT * FROM x) WHERE b = 2;
SELECT 1 AS _col_0 FROM (SELECT x.b AS b FROM x AS x) AS _q_0 WHERE _q_0.b = 2;
SELECT 1 AS "1" FROM (SELECT x.b AS b FROM x AS x) AS _q_0 WHERE _q_0.b = 2;
SELECT (SELECT c FROM y WHERE q.b = y.b) FROM (SELECT * FROM x) AS q;
SELECT (SELECT y.c AS c FROM y AS y WHERE q.b = y.b) AS _col_0 FROM (SELECT x.b AS b FROM x AS x) AS q;

View file

@ -4,6 +4,14 @@
SELECT a FROM x;
SELECT x.a AS a FROM x AS x;
# execute: false
SELECT a FROM zz GROUP BY a ORDER BY a;
SELECT zz.a AS a FROM zz AS zz GROUP BY zz.a ORDER BY a;
# execute: false
SELECT x, p FROM (SELECT x from xx) xx CROSS JOIN yy;
SELECT xx.x AS x, yy.p AS p FROM (SELECT xx.x AS x FROM xx AS xx) AS xx CROSS JOIN yy AS yy;
SELECT a FROM x AS z;
SELECT z.a AS a FROM x AS z;
@ -20,8 +28,8 @@ SELECT a AS b FROM x;
SELECT x.a AS b FROM x AS x;
# execute: false
SELECT 1, 2 FROM x;
SELECT 1 AS _col_0, 2 AS _col_1 FROM x AS x;
SELECT 1, 2 + 3 FROM x;
SELECT 1 AS "1", 2 + 3 AS _col_1 FROM x AS x;
# execute: false
SELECT a + b FROM x;
@ -57,6 +65,10 @@ SELECT x.a AS j, x.b AS a FROM x AS x ORDER BY x.a;
SELECT SUM(a) AS c, SUM(b) AS d FROM x ORDER BY 1, 2;
SELECT SUM(x.a) AS c, SUM(x.b) AS d FROM x AS x ORDER BY SUM(x.a), SUM(x.b);
# execute: false
SELECT CAST(a AS INT) FROM x ORDER BY a;
SELECT CAST(x.a AS INT) AS a FROM x AS x ORDER BY a;
# execute: false
SELECT SUM(a), SUM(b) AS c FROM x ORDER BY 1, 2;
SELECT SUM(x.a) AS _col_0, SUM(x.b) AS c FROM x AS x ORDER BY SUM(x.a), SUM(x.b);

View file

@ -1,4 +1,3 @@
SELECT a FROM zz;
SELECT * FROM zz;
SELECT z.a FROM x;
SELECT z.* FROM x;
@ -11,3 +10,4 @@ SELECT q.a FROM (SELECT x.b FROM x) AS z JOIN (SELECT a FROM z) AS q ON z.b = q.
SELECT b FROM x AS a CROSS JOIN y AS b CROSS JOIN y AS c;
SELECT x.a FROM x JOIN y USING (a);
SELECT a, SUM(b) FROM x GROUP BY 3;
SELECT p FROM (SELECT x from xx) y CROSS JOIN yy CROSS JOIN zz

View file

@ -481,11 +481,11 @@ class TestExecutor(unittest.TestCase):
def test_static_queries(self):
for sql, cols, rows in [
("SELECT 1", ["_col_0"], [(1,)]),
("SELECT 1", ["1"], [(1,)]),
("SELECT 1 + 2 AS x", ["x"], [(3,)]),
("SELECT CONCAT('a', 'b') AS x", ["x"], [("ab",)]),
("SELECT 1 AS x, 2 AS y", ["x", "y"], [(1, 2)]),
("SELECT 'foo' LIMIT 1", ["_col_0"], [("foo",)]),
("SELECT 'foo' LIMIT 1", ["foo"], [("foo",)]),
(
"SELECT SUM(x), COUNT(x) FROM (SELECT 1 AS x WHERE FALSE)",
["_col_0", "_col_1"],

View file

@ -189,6 +189,27 @@ class TestExpressions(unittest.TestCase):
"SELECT * FROM (SELECT a FROM tbl1) WHERE b > 100",
)
def test_function_building(self):
self.assertEqual(exp.func("bla", 1, "foo").sql(), "BLA(1, 'foo')")
self.assertEqual(exp.func("COUNT", exp.Star()).sql(), "COUNT(*)")
self.assertEqual(exp.func("bloo").sql(), "BLOO()")
self.assertEqual(
exp.func("locate", "x", "xo", dialect="hive").sql("hive"), "LOCATE('x', 'xo')"
)
self.assertIsInstance(exp.func("instr", "x", "b", dialect="mysql"), exp.StrPosition)
self.assertIsInstance(exp.func("bla", 1, "foo"), exp.Anonymous)
self.assertIsInstance(
exp.func("cast", this=exp.Literal.number(5), to=exp.DataType.build("DOUBLE")),
exp.Cast,
)
with self.assertRaises(ValueError):
exp.func("some_func", 1, arg2="foo")
with self.assertRaises(ValueError):
exp.func("abs")
def test_named_selects(self):
expression = parse_one(
"SELECT a, b AS B, c + d AS e, *, 'zz', 'zz' AS z FROM foo as bar, baz"

20
tests/test_lineage.py Normal file
View file

@ -0,0 +1,20 @@
import unittest
from sqlglot.lineage import lineage
class TestLineage(unittest.TestCase):
maxDiff = None
def test_lineage(self) -> None:
node = lineage(
"a",
"SELECT a FROM y",
schema={"x": {"a": "int"}},
sources={"y": "SELECT * FROM x"},
)
self.assertEqual(
node.source.sql(),
"SELECT y.a AS a FROM (SELECT x.a AS a FROM x AS x) AS y /* source: y */",
)
self.assertGreater(len(node.to_html()._repr_html_()), 1000)

View file

@ -117,6 +117,7 @@ class TestOptimizer(unittest.TestCase):
self.check_file(
"isolate_table_selects",
optimizer.isolate_table_selects.isolate_table_selects,
schema=self.schema,
)
def test_qualify_tables(self):

View file

@ -17,6 +17,11 @@ class TestSchema(unittest.TestCase):
with self.assertRaises(SchemaError):
schema.column_names(to_table(table))
def assert_column_names_empty(self, schema, *tables):
for table in tables:
with self.subTest(table):
self.assertEqual(schema.column_names(to_table(table)), [])
def test_schema(self):
schema = ensure_schema(
{
@ -38,7 +43,7 @@ class TestSchema(unittest.TestCase):
("z.x.y", ["b", "c"]),
)
self.assert_column_names_raises(
self.assert_column_names_empty(
schema,
"z",
"z.z",
@ -76,6 +81,10 @@ class TestSchema(unittest.TestCase):
self.assert_column_names_raises(
schema,
"x",
)
self.assert_column_names_empty(
schema,
"z.x",
"z.y",
)
@ -129,12 +138,16 @@ class TestSchema(unittest.TestCase):
self.assert_column_names_raises(
schema,
"q",
"d2.x",
"y",
"z",
"d1.y",
"d1.z",
)
self.assert_column_names_empty(
schema,
"q",
"d2.x",
"a.b.c",
)