Merging upstream version 11.0.1.
Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
parent
fdac67ef7f
commit
ba0f3f0bfa
112 changed files with 126100 additions and 230 deletions
|
@ -4,6 +4,9 @@ from tests.dialects.test_dialect import Validator
|
|||
class TestDatabricks(Validator):
|
||||
dialect = "databricks"
|
||||
|
||||
def test_databricks(self):
|
||||
self.validate_identity("CREATE FUNCTION a.b(x INT) RETURNS INT RETURN x + 1")
|
||||
|
||||
def test_datediff(self):
|
||||
self.validate_all(
|
||||
"SELECT DATEDIFF(year, 'start', 'end')",
|
||||
|
|
|
@ -508,7 +508,7 @@ class TestDialect(Validator):
|
|||
},
|
||||
write={
|
||||
"bigquery": "DATE_ADD(x, INTERVAL 1 'day')",
|
||||
"drill": "DATE_ADD(x, INTERVAL '1' DAY)",
|
||||
"drill": "DATE_ADD(x, INTERVAL 1 DAY)",
|
||||
"duckdb": "x + INTERVAL 1 day",
|
||||
"hive": "DATE_ADD(x, 1)",
|
||||
"mysql": "DATE_ADD(x, INTERVAL 1 DAY)",
|
||||
|
@ -525,7 +525,7 @@ class TestDialect(Validator):
|
|||
"DATE_ADD(x, 1)",
|
||||
write={
|
||||
"bigquery": "DATE_ADD(x, INTERVAL 1 'day')",
|
||||
"drill": "DATE_ADD(x, INTERVAL '1' DAY)",
|
||||
"drill": "DATE_ADD(x, INTERVAL 1 DAY)",
|
||||
"duckdb": "x + INTERVAL 1 DAY",
|
||||
"hive": "DATE_ADD(x, 1)",
|
||||
"mysql": "DATE_ADD(x, INTERVAL 1 DAY)",
|
||||
|
@ -628,7 +628,7 @@ class TestDialect(Validator):
|
|||
self.validate_all(
|
||||
"TS_OR_DS_ADD('2021-02-01', 1, 'DAY')",
|
||||
write={
|
||||
"drill": "DATE_ADD(CAST('2021-02-01' AS DATE), INTERVAL '1' DAY)",
|
||||
"drill": "DATE_ADD(CAST('2021-02-01' AS DATE), INTERVAL 1 DAY)",
|
||||
"duckdb": "CAST('2021-02-01' AS DATE) + INTERVAL 1 DAY",
|
||||
"hive": "DATE_ADD('2021-02-01', 1)",
|
||||
"presto": "DATE_ADD('DAY', 1, DATE_PARSE(SUBSTR('2021-02-01', 1, 10), '%Y-%m-%d'))",
|
||||
|
@ -638,7 +638,7 @@ class TestDialect(Validator):
|
|||
self.validate_all(
|
||||
"DATE_ADD(CAST('2020-01-01' AS DATE), 1)",
|
||||
write={
|
||||
"drill": "DATE_ADD(CAST('2020-01-01' AS DATE), INTERVAL '1' DAY)",
|
||||
"drill": "DATE_ADD(CAST('2020-01-01' AS DATE), INTERVAL 1 DAY)",
|
||||
"duckdb": "CAST('2020-01-01' AS DATE) + INTERVAL 1 DAY",
|
||||
"hive": "DATE_ADD(CAST('2020-01-01' AS DATE), 1)",
|
||||
"presto": "DATE_ADD('day', 1, CAST('2020-01-01' AS DATE))",
|
||||
|
|
|
@ -343,6 +343,10 @@ class TestDuckDB(Validator):
|
|||
},
|
||||
)
|
||||
|
||||
self.validate_all(
|
||||
"CAST(x AS DATE) + INTERVAL (7 * -1) DAY", read={"spark": "DATE_SUB(x, 7)"}
|
||||
)
|
||||
|
||||
def test_bool_or(self):
|
||||
self.validate_all(
|
||||
"SELECT a, LOGICAL_OR(b) FROM table GROUP BY a",
|
||||
|
|
|
@ -287,7 +287,7 @@ class TestHive(Validator):
|
|||
self.validate_all(
|
||||
"DATE_SUB('2020-01-01', 1)",
|
||||
write={
|
||||
"duckdb": "CAST('2020-01-01' AS DATE) + INTERVAL 1 * -1 DAY",
|
||||
"duckdb": "CAST('2020-01-01' AS DATE) + INTERVAL (1 * -1) DAY",
|
||||
"presto": "DATE_ADD('DAY', 1 * -1, DATE_PARSE(SUBSTR('2020-01-01', 1, 10), '%Y-%m-%d'))",
|
||||
"hive": "DATE_ADD('2020-01-01', 1 * -1)",
|
||||
"spark": "DATE_ADD('2020-01-01', 1 * -1)",
|
||||
|
|
|
@ -6,6 +6,43 @@ class TestSnowflake(Validator):
|
|||
dialect = "snowflake"
|
||||
|
||||
def test_snowflake(self):
|
||||
self.validate_identity("SELECT REGEXP_LIKE(a, b, c)")
|
||||
self.validate_identity("PUT file:///dir/tmp.csv @%table")
|
||||
self.validate_identity("CREATE TABLE foo (bar FLOAT AUTOINCREMENT START 0 INCREMENT 1)")
|
||||
self.validate_identity(
|
||||
'COPY INTO NEW_TABLE ("foo", "bar") FROM (SELECT $1, $2, $3, $4 FROM @%old_table)'
|
||||
)
|
||||
|
||||
self.validate_all(
|
||||
"CREATE OR REPLACE TEMPORARY TABLE x (y NUMBER IDENTITY(0, 1))",
|
||||
write={
|
||||
"snowflake": "CREATE OR REPLACE TEMPORARY TABLE x (y DECIMAL AUTOINCREMENT START 0 INCREMENT 1)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"CREATE TEMPORARY TABLE x (y NUMBER AUTOINCREMENT(0, 1))",
|
||||
write={
|
||||
"snowflake": "CREATE TEMPORARY TABLE x (y DECIMAL AUTOINCREMENT START 0 INCREMENT 1)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"CREATE TABLE x (y NUMBER IDENTITY START 0 INCREMENT 1)",
|
||||
write={
|
||||
"snowflake": "CREATE TABLE x (y DECIMAL AUTOINCREMENT START 0 INCREMENT 1)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"ALTER TABLE foo ADD COLUMN id INT identity(1, 1)",
|
||||
write={
|
||||
"snowflake": "ALTER TABLE foo ADD COLUMN id INT AUTOINCREMENT START 1 INCREMENT 1",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT DAYOFWEEK('2016-01-02T23:39:20.123-07:00'::TIMESTAMP)",
|
||||
write={
|
||||
"snowflake": "SELECT DAYOFWEEK(CAST('2016-01-02T23:39:20.123-07:00' AS TIMESTAMPNTZ))",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT * FROM xxx WHERE col ilike '%Don''t%'",
|
||||
write={
|
||||
|
@ -165,10 +202,10 @@ class TestSnowflake(Validator):
|
|||
self.validate_all(
|
||||
r"SELECT $$a ' \ \t \x21 z $ $$",
|
||||
write={
|
||||
"snowflake": r"SELECT 'a \' \\ \\t \\x21 z $ '",
|
||||
"snowflake": r"SELECT 'a \' \\ \t \\x21 z $ '",
|
||||
},
|
||||
)
|
||||
self.validate_identity("SELECT REGEXP_LIKE(a, b, c)")
|
||||
self.validate_identity(r"REGEXP_REPLACE('target', 'pattern', '\n')")
|
||||
self.validate_all(
|
||||
"SELECT RLIKE(a, b)",
|
||||
write={
|
||||
|
@ -253,6 +290,8 @@ class TestSnowflake(Validator):
|
|||
)
|
||||
|
||||
def test_timestamps(self):
|
||||
self.validate_identity("SELECT EXTRACT(month FROM a)")
|
||||
|
||||
self.validate_all(
|
||||
"SELECT CAST(a AS TIMESTAMP)",
|
||||
write={
|
||||
|
@ -277,7 +316,6 @@ class TestSnowflake(Validator):
|
|||
"snowflake": "SELECT CAST(a AS TIMESTAMPLTZ)",
|
||||
},
|
||||
)
|
||||
self.validate_identity("SELECT EXTRACT(month FROM a)")
|
||||
self.validate_all(
|
||||
"SELECT EXTRACT('month', a)",
|
||||
write={
|
||||
|
@ -313,6 +351,8 @@ class TestSnowflake(Validator):
|
|||
|
||||
def test_semi_structured_types(self):
|
||||
self.validate_identity("SELECT CAST(a AS VARIANT)")
|
||||
self.validate_identity("SELECT CAST(a AS ARRAY)")
|
||||
|
||||
self.validate_all(
|
||||
"SELECT a::VARIANT",
|
||||
write={
|
||||
|
@ -320,7 +360,6 @@ class TestSnowflake(Validator):
|
|||
"tsql": "SELECT CAST(a AS SQL_VARIANT)",
|
||||
},
|
||||
)
|
||||
self.validate_identity("SELECT CAST(a AS ARRAY)")
|
||||
self.validate_all(
|
||||
"ARRAY_CONSTRUCT(0, 1, 2)",
|
||||
write={
|
||||
|
@ -343,6 +382,7 @@ class TestSnowflake(Validator):
|
|||
"CREATE TABLE a (x DATE, y BIGINT) WITH (PARTITION BY (x), integration='q', auto_refresh=TRUE, file_format=(type = parquet))"
|
||||
)
|
||||
self.validate_identity("CREATE MATERIALIZED VIEW a COMMENT='...' AS SELECT 1 FROM x")
|
||||
|
||||
self.validate_all(
|
||||
"CREATE OR REPLACE TRANSIENT TABLE a (id INT)",
|
||||
read={
|
||||
|
|
12
tests/fixtures/identity.sql
vendored
12
tests/fixtures/identity.sql
vendored
|
@ -17,6 +17,7 @@ SUM(CASE WHEN x > 1 THEN 1 ELSE 0 END) / y
|
|||
'\x'
|
||||
"x"
|
||||
""
|
||||
"""x"""
|
||||
N'abc'
|
||||
x
|
||||
x % 1
|
||||
|
@ -101,6 +102,8 @@ SPLIT(SPLIT(referrer, 'utm_source=')[OFFSET(1)], "&")[OFFSET(0)]
|
|||
x[ORDINAL(1)][SAFE_OFFSET(2)]
|
||||
x GLOB '??-*'
|
||||
x GLOB y
|
||||
LIKE(x, 'z')
|
||||
ILIKE(x, 'z')
|
||||
x LIKE SUBSTR('abc', 1, 1)
|
||||
x LIKE y
|
||||
x LIKE a.y
|
||||
|
@ -113,7 +116,7 @@ INTERVAL '1' day
|
|||
INTERVAL '1' MONTH
|
||||
INTERVAL '1 day'
|
||||
INTERVAL 2 months
|
||||
INTERVAL 1 + 3 DAYS
|
||||
INTERVAL (1 + 3) DAYS
|
||||
CAST('45' AS INTERVAL DAYS)
|
||||
TIMESTAMP_DIFF(CURRENT_TIMESTAMP(), 1, DAY)
|
||||
DATETIME_DIFF(CURRENT_DATE, 1, DAY)
|
||||
|
@ -221,6 +224,7 @@ SELECT JSON_EXTRACT(x, '$.name')
|
|||
SELECT JSON_EXTRACT_SCALAR(x, '$.name')
|
||||
SELECT x LIKE '%x%' FROM test
|
||||
SELECT * FROM test LIMIT 100
|
||||
SELECT * FROM test LIMIT 1 + 1
|
||||
SELECT * FROM test LIMIT 100 OFFSET 200
|
||||
SELECT * FROM test FETCH FIRST ROWS ONLY
|
||||
SELECT * FROM test FETCH FIRST 1 ROWS ONLY
|
||||
|
@ -393,6 +397,7 @@ SELECT * EXCEPT (a, b) REPLACE (a AS b, b AS C)
|
|||
SELECT * EXCEPT (a, b) REPLACE (a AS b, b AS C) FROM y
|
||||
SELECT a.* EXCEPT (a, b), b.* REPLACE (a AS b, b AS C)
|
||||
SELECT a.* EXCEPT (a, b), b.* REPLACE (a AS b, b AS C) FROM x
|
||||
SELECT A.* EXCEPT (A.COL_1) FROM TABLE_1 AS A
|
||||
SELECT zoo, animals FROM (VALUES ('oakland', ARRAY('a', 'b')), ('sf', ARRAY('b', 'c'))) AS t(zoo, animals)
|
||||
SELECT zoo, animals FROM UNNEST(ARRAY(STRUCT('oakland' AS zoo, ARRAY('a', 'b') AS animals), STRUCT('sf' AS zoo, ARRAY('b', 'c') AS animals))) AS t(zoo, animals)
|
||||
WITH a AS (SELECT 1) SELECT 1 UNION ALL SELECT 2
|
||||
|
@ -558,6 +563,7 @@ CREATE FUNCTION f AS 'g'
|
|||
CREATE FUNCTION a(b INT, c VARCHAR) AS 'SELECT 1'
|
||||
CREATE FUNCTION a() LANGUAGE sql
|
||||
CREATE FUNCTION a() LANGUAGE sql RETURNS INT
|
||||
CREATE FUNCTION a.b(x INT) RETURNS INT AS RETURN x + 1
|
||||
CREATE FUNCTION a.b.c()
|
||||
CREATE INDEX abc ON t (a)
|
||||
CREATE INDEX abc ON t (a, b, b)
|
||||
|
@ -585,6 +591,7 @@ INSERT OVERWRITE TABLE a.b PARTITION(ds) SELECT x FROM y
|
|||
INSERT OVERWRITE TABLE a.b PARTITION(ds = 'YYYY-MM-DD') SELECT x FROM y
|
||||
INSERT OVERWRITE TABLE a.b PARTITION(ds, hour) SELECT x FROM y
|
||||
INSERT OVERWRITE TABLE a.b PARTITION(ds = 'YYYY-MM-DD', hour = 'hh') SELECT x FROM y
|
||||
ALTER SESSION SET STATEMENT_TIMEOUT_IN_SECONDS=3
|
||||
ALTER AGGREGATE bla(foo) OWNER TO CURRENT_USER
|
||||
ALTER RULE foo ON bla RENAME TO baz
|
||||
ALTER ROLE CURRENT_USER WITH REPLICATION
|
||||
|
@ -721,3 +728,6 @@ ALTER TABLE a ADD PRIMARY KEY (x, y) NOT ENFORCED
|
|||
ALTER TABLE a ADD FOREIGN KEY (x, y) REFERENCES bla
|
||||
SELECT end FROM a
|
||||
SELECT id FROM b.a AS a QUALIFY ROW_NUMBER() OVER (PARTITION BY br ORDER BY sadf DESC) = 1
|
||||
SELECT LEFT.FOO FROM BLA AS LEFT
|
||||
SELECT RIGHT.FOO FROM BLA AS RIGHT
|
||||
SELECT LEFT FROM LEFT LEFT JOIN RIGHT RIGHT JOIN LEFT
|
||||
|
|
40
tests/fixtures/optimizer/expand_laterals.sql
vendored
Normal file
40
tests/fixtures/optimizer/expand_laterals.sql
vendored
Normal file
|
@ -0,0 +1,40 @@
|
|||
# title: expand alias reference
|
||||
SELECT
|
||||
x.a + 1 AS i,
|
||||
i + 1 AS j,
|
||||
j + 1 AS k
|
||||
FROM x;
|
||||
SELECT
|
||||
x.a + 1 AS i,
|
||||
x.a + 1 + 1 AS j,
|
||||
x.a + 1 + 1 + 1 AS k
|
||||
FROM x;
|
||||
|
||||
# title: noop - reference comes before alias
|
||||
SELECT
|
||||
b + 1 AS j,
|
||||
x.a + 1 AS i
|
||||
FROM x;
|
||||
SELECT
|
||||
b + 1 AS j,
|
||||
x.a + 1 AS i
|
||||
FROM x;
|
||||
|
||||
|
||||
# title: subquery
|
||||
SELECT
|
||||
*
|
||||
FROM (
|
||||
SELECT
|
||||
x.a + 1 AS i,
|
||||
i + 1 AS j
|
||||
FROM x
|
||||
);
|
||||
SELECT
|
||||
*
|
||||
FROM (
|
||||
SELECT
|
||||
x.a + 1 AS i,
|
||||
x.a + 1 + 1 AS j
|
||||
FROM x
|
||||
);
|
23
tests/fixtures/optimizer/optimizer.sql
vendored
23
tests/fixtures/optimizer/optimizer.sql
vendored
|
@ -1,14 +1,20 @@
|
|||
# title: lateral
|
||||
# execute: false
|
||||
SELECT a, m FROM z LATERAL VIEW EXPLODE([1, 2]) q AS m;
|
||||
WITH "z_2" AS (
|
||||
SELECT
|
||||
"z"."a" AS "a"
|
||||
FROM "z" AS "z"
|
||||
)
|
||||
SELECT
|
||||
"z"."a" AS "a",
|
||||
"q"."m" AS "m"
|
||||
FROM "z" AS "z"
|
||||
FROM "z_2" AS "z"
|
||||
LATERAL VIEW
|
||||
EXPLODE(ARRAY(1, 2)) q AS "m";
|
||||
|
||||
# title: unnest
|
||||
# execute: false
|
||||
SELECT x FROM UNNEST([1, 2]) AS q(x, y);
|
||||
SELECT
|
||||
"q"."x" AS "x"
|
||||
|
@ -369,3 +375,18 @@ SELECT
|
|||
FROM "x" AS "x"
|
||||
RIGHT JOIN "y_2" AS "y"
|
||||
ON "x"."a" = "y"."b";
|
||||
|
||||
|
||||
# title: lateral column alias reference
|
||||
SELECT x.a + 1 AS c, c + 1 AS d FROM x;
|
||||
SELECT
|
||||
"x"."a" + 1 AS "c",
|
||||
"x"."a" + 2 AS "d"
|
||||
FROM "x" AS "x";
|
||||
|
||||
# title: column reference takes priority over lateral column alias reference
|
||||
SELECT x.a + 1 AS b, b + 1 AS c FROM x;
|
||||
SELECT
|
||||
"x"."a" + 1 AS "b",
|
||||
"x"."b" + 1 AS "c"
|
||||
FROM "x" AS "x";
|
||||
|
|
57
tests/fixtures/optimizer/qualify_columns.sql
vendored
57
tests/fixtures/optimizer/qualify_columns.sql
vendored
|
@ -104,14 +104,6 @@ SELECT x.a AS a FROM x AS x ORDER BY x.b;
|
|||
SELECT SUM(a) AS a FROM x ORDER BY SUM(a);
|
||||
SELECT SUM(x.a) AS a FROM x AS x ORDER BY SUM(x.a);
|
||||
|
||||
# dialect: bigquery
|
||||
SELECT ROW_NUMBER() OVER (PARTITION BY a ORDER BY b) AS row_num FROM x QUALIFY row_num = 1;
|
||||
SELECT ROW_NUMBER() OVER (PARTITION BY x.a ORDER BY x.b) AS row_num FROM x AS x QUALIFY row_num = 1;
|
||||
|
||||
# dialect: bigquery
|
||||
SELECT x.b, x.a FROM x LEFT JOIN y ON x.b = y.b QUALIFY ROW_NUMBER() OVER(PARTITION BY x.b ORDER BY x.a DESC) = 1;
|
||||
SELECT x.b AS b, x.a AS a FROM x AS x LEFT JOIN y AS y ON x.b = y.b QUALIFY ROW_NUMBER() OVER (PARTITION BY x.b ORDER BY x.a DESC) = 1;
|
||||
|
||||
# execute: false
|
||||
SELECT AGGREGATE(ARRAY(a, x.b), 0, (x, acc) -> x + acc + a) AS sum_agg FROM x;
|
||||
SELECT AGGREGATE(ARRAY(x.a, x.b), 0, (x, acc) -> x + acc + x.a) AS sum_agg FROM x AS x;
|
||||
|
@ -199,15 +191,6 @@ SELECT x.a AS a FROM x AS x WHERE x.b IN (SELECT x.b AS b FROM y AS x);
|
|||
SELECT a FROM x AS i WHERE b IN (SELECT b FROM y AS j WHERE j.b IN (SELECT c FROM y AS k WHERE k.b = j.b));
|
||||
SELECT i.a AS a FROM x AS i WHERE i.b IN (SELECT j.b AS b FROM y AS j WHERE j.b IN (SELECT k.c AS c FROM y AS k WHERE k.b = j.b));
|
||||
|
||||
# execute: false
|
||||
# dialect: bigquery
|
||||
SELECT aa FROM x, UNNEST(a) AS aa;
|
||||
SELECT aa AS aa FROM x AS x, UNNEST(x.a) AS aa;
|
||||
|
||||
# execute: false
|
||||
SELECT aa FROM x, UNNEST(a) AS t(aa);
|
||||
SELECT t.aa AS aa FROM x AS x, UNNEST(x.a) AS t(aa);
|
||||
|
||||
--------------------------------------
|
||||
-- Expand *
|
||||
--------------------------------------
|
||||
|
@ -302,3 +285,43 @@ SELECT COALESCE(x.b, y.b, z.b) AS b FROM x AS x JOIN y AS y ON x.b = y.b JOIN z
|
|||
# dialect: spark
|
||||
SELECT /*+ BROADCAST(y) */ x.b FROM x JOIN y ON x.b = y.b;
|
||||
SELECT /*+ BROADCAST(y) */ x.b AS b FROM x AS x JOIN y AS y ON x.b = y.b;
|
||||
|
||||
--------------------------------------
|
||||
-- UDTF
|
||||
--------------------------------------
|
||||
# execute: false
|
||||
SELECT c FROM x LATERAL VIEW EXPLODE (a) AS c;
|
||||
SELECT _q_0.c AS c FROM x AS x LATERAL VIEW EXPLODE(x.a) _q_0 AS c;
|
||||
|
||||
# execute: false
|
||||
SELECT c FROM xx LATERAL VIEW EXPLODE (a) AS c;
|
||||
SELECT _q_0.c AS c FROM xx AS xx LATERAL VIEW EXPLODE(xx.a) _q_0 AS c;
|
||||
|
||||
# execute: false
|
||||
SELECT c FROM x LATERAL VIEW EXPLODE (a) t AS c;
|
||||
SELECT t.c AS c FROM x AS x LATERAL VIEW EXPLODE(x.a) t AS c;
|
||||
|
||||
# execute: false
|
||||
SELECT aa FROM x, UNNEST(a) AS t(aa);
|
||||
SELECT t.aa AS aa FROM x AS x, UNNEST(x.a) AS t(aa);
|
||||
|
||||
# execute: false
|
||||
# dialect: bigquery
|
||||
SELECT aa FROM x, UNNEST(a) AS aa;
|
||||
SELECT aa AS aa FROM x AS x, UNNEST(x.a) AS aa;
|
||||
|
||||
--------------------------------------
|
||||
-- Window functions
|
||||
--------------------------------------
|
||||
|
||||
-- ORDER BY in window function
|
||||
SELECT a + 1 AS a, ROW_NUMBER() OVER (PARTITION BY b ORDER BY a) AS row_num FROM x;
|
||||
SELECT x.a + 1 AS a, ROW_NUMBER() OVER (PARTITION BY x.b ORDER BY x.a) AS row_num FROM x AS x;
|
||||
|
||||
# dialect: bigquery
|
||||
SELECT ROW_NUMBER() OVER (PARTITION BY a ORDER BY b) AS row_num FROM x QUALIFY row_num = 1;
|
||||
SELECT ROW_NUMBER() OVER (PARTITION BY x.a ORDER BY x.b) AS row_num FROM x AS x QUALIFY row_num = 1;
|
||||
|
||||
# dialect: bigquery
|
||||
SELECT x.b, x.a FROM x LEFT JOIN y ON x.b = y.b QUALIFY ROW_NUMBER() OVER(PARTITION BY x.b ORDER BY x.a DESC) = 1;
|
||||
SELECT x.b AS b, x.a AS a FROM x AS x LEFT JOIN y AS y ON x.b = y.b QUALIFY ROW_NUMBER() OVER (PARTITION BY x.b ORDER BY x.a DESC) = 1;
|
||||
|
|
5
tests/fixtures/pretty.sql
vendored
5
tests/fixtures/pretty.sql
vendored
|
@ -342,3 +342,8 @@ SELECT
|
|||
basket_index
|
||||
FROM table_data
|
||||
CROSS JOIN UNNEST(fruit_basket) AS fruit WITH OFFSET AS basket_index;
|
||||
SELECT A.* EXCEPT A.COL_1, A.COL_2 FROM TABLE_1 A;
|
||||
SELECT
|
||||
A.*
|
||||
EXCEPT (A.COL_1, A.COL_2)
|
||||
FROM TABLE_1 AS A;
|
||||
|
|
|
@ -122,6 +122,10 @@ class TestExpressions(unittest.TestCase):
|
|||
["first", "second", "third"],
|
||||
)
|
||||
|
||||
self.assertEqual(parse_one("x.*").name, "*")
|
||||
self.assertEqual(parse_one("NULL").name, "NULL")
|
||||
self.assertEqual(parse_one("a.b.c").name, "c")
|
||||
|
||||
def test_table_name(self):
|
||||
self.assertEqual(exp.table_name(parse_one("a", into=exp.Table)), "a")
|
||||
self.assertEqual(exp.table_name(parse_one("a.b", into=exp.Table)), "a.b")
|
||||
|
|
|
@ -163,7 +163,10 @@ class TestOptimizer(unittest.TestCase):
|
|||
for sql in load_sql_fixtures("optimizer/qualify_columns__invalid.sql"):
|
||||
with self.subTest(sql):
|
||||
with self.assertRaises((OptimizeError, SchemaError)):
|
||||
optimizer.qualify_columns.qualify_columns(parse_one(sql), schema=self.schema)
|
||||
expression = optimizer.qualify_columns.qualify_columns(
|
||||
parse_one(sql), schema=self.schema
|
||||
)
|
||||
optimizer.qualify_columns.validate_qualify_columns(expression)
|
||||
|
||||
def test_lower_identities(self):
|
||||
self.check_file("lower_identities", optimizer.lower_identities.lower_identities)
|
||||
|
@ -190,6 +193,14 @@ class TestOptimizer(unittest.TestCase):
|
|||
def test_pushdown_predicates(self):
|
||||
self.check_file("pushdown_predicates", optimizer.pushdown_predicates.pushdown_predicates)
|
||||
|
||||
def test_expand_laterals(self):
|
||||
self.check_file(
|
||||
"expand_laterals",
|
||||
optimizer.expand_laterals.expand_laterals,
|
||||
pretty=True,
|
||||
execute=True,
|
||||
)
|
||||
|
||||
def test_expand_multi_table_selects(self):
|
||||
self.check_file(
|
||||
"expand_multi_table_selects",
|
||||
|
@ -369,6 +380,12 @@ FROM READ_CSV('tests/fixtures/optimizer/tpc-h/nation.csv.gz', 'delimiter', '|')
|
|||
self.assertEqual(expression.right.this.left.type.this, exp.DataType.Type.INT)
|
||||
self.assertEqual(expression.right.this.right.type.this, exp.DataType.Type.INT)
|
||||
|
||||
def test_lateral_annotation(self):
|
||||
expression = optimizer.optimize(
|
||||
parse_one("SELECT c FROM (select 1 a) as x LATERAL VIEW EXPLODE (a) AS c")
|
||||
).expressions[0]
|
||||
self.assertEqual(expression.type.this, exp.DataType.Type.INT)
|
||||
|
||||
def test_derived_tables_column_annotation(self):
|
||||
schema = {"x": {"cola": "INT"}, "y": {"cola": "FLOAT"}}
|
||||
sql = """
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue