1
0
Fork 0

Merging upstream version 26.12.0.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-03-31 15:54:56 +02:00
parent d24d19e9ea
commit 69b6dd9501
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
70 changed files with 1134 additions and 340 deletions

View file

@ -15,6 +15,7 @@ from sqlglot import (
from sqlglot.helper import logger as helper_logger
from sqlglot.parser import logger as parser_logger
from tests.dialects.test_dialect import Validator
from sqlglot.optimizer.annotate_types import annotate_types
class TestBigQuery(Validator):
@ -196,6 +197,9 @@ LANGUAGE js AS
self.validate_identity("CAST(x AS TIMESTAMPTZ)", "CAST(x AS TIMESTAMP)")
self.validate_identity("CAST(x AS RECORD)", "CAST(x AS STRUCT)")
self.validate_identity("SELECT * FROM x WHERE x.y >= (SELECT MAX(a) FROM b-c) - 20")
self.validate_identity(
"SELECT FORMAT_TIMESTAMP('%Y-%m-%d %H:%M:%S', CURRENT_TIMESTAMP(), 'Europe/Berlin') AS ts"
)
self.validate_identity(
"SELECT cars, apples FROM some_table PIVOT(SUM(total_counts) FOR products IN ('general.cars' AS cars, 'food.apples' AS apples))"
)
@ -317,6 +321,13 @@ LANGUAGE js AS
"SELECT CAST(1 AS INT64)",
)
self.validate_all(
"SELECT DATE_SUB(CURRENT_DATE(), INTERVAL 2 DAY)",
write={
"bigquery": "SELECT DATE_SUB(CURRENT_DATE, INTERVAL '2' DAY)",
"databricks": "SELECT DATE_ADD(CURRENT_DATE, -2)",
},
)
self.validate_all(
"SELECT DATE_SUB(DATE '2008-12-25', INTERVAL 5 DAY)",
write={
@ -1309,8 +1320,8 @@ LANGUAGE js AS
"mysql": "DATE_ADD(CURRENT_DATE, INTERVAL '-1' DAY)",
"postgres": "CURRENT_DATE + INTERVAL '-1 DAY'",
"presto": "DATE_ADD('DAY', CAST('-1' AS BIGINT), CURRENT_DATE)",
"hive": "DATE_ADD(CURRENT_DATE, '-1')",
"spark": "DATE_ADD(CURRENT_DATE, '-1')",
"hive": "DATE_ADD(CURRENT_DATE, -1)",
"spark": "DATE_ADD(CURRENT_DATE, -1)",
},
)
self.validate_all(
@ -2356,3 +2367,18 @@ OPTIONS (
"STRING_AGG(DISTINCT a ORDER BY b DESC, c DESC LIMIT 10)",
"STRING_AGG(DISTINCT a, ',' ORDER BY b DESC, c DESC LIMIT 10)",
)
def test_annotate_timestamps(self):
sql = """
SELECT
CURRENT_TIMESTAMP() AS curr_ts,
TIMESTAMP_SECONDS(2) AS ts_seconds,
PARSE_TIMESTAMP('%c', 'Thu Dec 25 07:30:00 2008', 'UTC') AS parsed_ts,
TIMESTAMP_ADD(TIMESTAMP "2008-12-25 15:30:00+00", INTERVAL 10 MINUTE) AS ts_add,
TIMESTAMP_SUB(TIMESTAMP "2008-12-25 15:30:00+00", INTERVAL 10 MINUTE) AS ts_sub,
"""
annotated = annotate_types(self.parse_one(sql), dialect="bigquery")
for select in annotated.selects:
self.assertEqual(select.type.sql("bigquery"), "TIMESTAMP")

View file

@ -50,6 +50,7 @@ class TestDatabricks(Validator):
self.validate_identity(
"COPY INTO target FROM `s3://link` FILEFORMAT = AVRO VALIDATE = ALL FILES = ('file1', 'file2') FORMAT_OPTIONS ('opt1'='true', 'opt2'='test') COPY_OPTIONS ('mergeSchema'='true')"
)
self.validate_identity("SELECT PARSE_JSON('{}')")
self.validate_identity(
"SELECT DATE_FORMAT(CAST(FROM_UTC_TIMESTAMP(foo, 'America/Los_Angeles') AS TIMESTAMP), 'yyyy-MM-dd HH:mm:ss') AS foo FROM t",
"SELECT DATE_FORMAT(CAST(FROM_UTC_TIMESTAMP(CAST(foo AS TIMESTAMP), 'America/Los_Angeles') AS TIMESTAMP), 'yyyy-MM-dd HH:mm:ss') AS foo FROM t",

View file

@ -1569,3 +1569,29 @@ class TestDuckDB(Validator):
""",
"SELECT l_returnflag, l_linestatus, SUM(l_quantity) AS sum_qty, SUM(l_extendedprice) AS sum_base_price, SUM(l_extendedprice * (1 - l_discount)) AS sum_disc_price, SUM(l_extendedprice * (1 - l_discount) * (1 + l_tax)) AS sum_charge, AVG(l_quantity) AS avg_qty, AVG(l_extendedprice) AS avg_price, AVG(l_discount) AS avg_disc, COUNT(*) AS count_order",
)
def test_at_sign_to_abs(self):
self.validate_identity(
"SELECT @col FROM t",
"SELECT ABS(col) FROM t",
)
self.validate_identity(
"SELECT @col + 1 FROM t",
"SELECT ABS(col + 1) FROM t",
)
self.validate_identity(
"SELECT (@col) + 1 FROM t",
"SELECT (ABS(col)) + 1 FROM t",
)
self.validate_identity(
"SELECT @(-1)",
"SELECT ABS((-1))",
)
self.validate_identity(
"SELECT @(-1) + 1",
"SELECT ABS((-1) + 1)",
)
self.validate_identity(
"SELECT (@-1) + 1",
"SELECT (ABS(-1)) + 1",
)

View file

@ -394,7 +394,7 @@ class TestSnowflake(Validator):
"""SELECT PARSE_JSON('{"fruit":"banana"}'):fruit""",
write={
"bigquery": """SELECT JSON_EXTRACT(PARSE_JSON('{"fruit":"banana"}'), '$.fruit')""",
"databricks": """SELECT '{"fruit":"banana"}':fruit""",
"databricks": """SELECT PARSE_JSON('{"fruit":"banana"}'):fruit""",
"duckdb": """SELECT JSON('{"fruit":"banana"}') -> '$.fruit'""",
"mysql": """SELECT JSON_EXTRACT('{"fruit":"banana"}', '$.fruit')""",
"presto": """SELECT JSON_EXTRACT(JSON_PARSE('{"fruit":"banana"}'), '$.fruit')""",
@ -1057,6 +1057,9 @@ class TestSnowflake(Validator):
staged_file.sql(dialect="snowflake"),
)
self.validate_identity('SELECT * FROM @"mystage"')
self.validate_identity('SELECT * FROM @"myschema"."mystage"/file.gz')
self.validate_identity('SELECT * FROM @"my_DB"."schEMA1".mystage/file.gz')
self.validate_identity("SELECT metadata$filename FROM @s1/")
self.validate_identity("SELECT * FROM @~")
self.validate_identity("SELECT * FROM @~/some/path/to/file.csv")
@ -1463,6 +1466,7 @@ class TestSnowflake(Validator):
"CREATE TABLE t (id INT TAG (key1='value_1', key2='value_2'))",
)
self.validate_identity("CREATE OR REPLACE TABLE foo COPY GRANTS USING TEMPLATE (SELECT 1)")
self.validate_identity("USE SECONDARY ROLES ALL")
self.validate_identity("USE SECONDARY ROLES NONE")
self.validate_identity("USE SECONDARY ROLES a, b, c")
@ -2386,11 +2390,14 @@ SINGLE = TRUE""",
)
def test_put_to_stage(self):
self.validate_identity('PUT \'file:///dir/tmp.csv\' @"my_DB"."schEMA1"."MYstage"')
# PUT with file path and stage ref containing spaces (wrapped in single quotes)
ast = parse_one("PUT 'file://my file.txt' '@s1/my folder'", read="snowflake")
self.assertIsInstance(ast, exp.Put)
self.assertEqual(ast.this, exp.Literal(this="file://my file.txt", is_string=True))
self.assertEqual(ast.args["target"], exp.Var(this="@s1/my folder"))
self.assertEqual(ast.args["target"], exp.Var(this="'@s1/my folder'"))
self.assertEqual(ast.sql("snowflake"), "PUT 'file://my file.txt' '@s1/my folder'")
# expression with additional properties
ast = parse_one(

View file

@ -322,6 +322,13 @@ TBLPROPERTIES (
},
)
self.validate_all(
"SELECT id_column, name, age FROM test_table LATERAL VIEW INLINE(struc_column) explode_view AS name, age",
write={
"presto": "SELECT id_column, name, age FROM test_table CROSS JOIN UNNEST(struc_column) AS explode_view(name, age)",
"spark": "SELECT id_column, name, age FROM test_table LATERAL VIEW INLINE(struc_column) explode_view AS name, age",
},
)
self.validate_all(
"SELECT ARRAY_AGG(x) FILTER (WHERE x = 5) FROM (SELECT 1 UNION ALL SELECT NULL) AS t(x)",
write={
@ -843,7 +850,7 @@ TBLPROPERTIES (
},
)
def test_explode_to_unnest(self):
def test_explode_projection_to_unnest(self):
self.validate_all(
"SELECT EXPLODE(x) FROM tbl",
write={
@ -951,3 +958,42 @@ TBLPROPERTIES (
self.validate_identity(
"ANALYZE TABLE ctlg.db.tbl PARTITION(foo = 'foo', bar = 'bar') COMPUTE STATISTICS NOSCAN"
)
def test_transpile_annotated_exploded_column(self):
from sqlglot.optimizer.annotate_types import annotate_types
from sqlglot.optimizer.qualify import qualify
for db_prefix in ("", "explode_view."):
with self.subTest(f"Annotated exploded column with prefix: {db_prefix}."):
sql = f"""
WITH test_table AS (
SELECT
12345 AS id_column,
ARRAY(
STRUCT('John' AS name, 30 AS age),
STRUCT('Mary' AS name, 20 AS age),
STRUCT('Mike' AS name, 80 AS age),
STRUCT('Dan' AS name, 50 AS age)
) AS struct_column
)
SELECT
id_column,
{db_prefix}new_column.name,
{db_prefix}new_column.age
FROM test_table
LATERAL VIEW EXPLODE(struct_column) explode_view AS new_column
"""
expr = self.parse_one(sql)
qualified = qualify(expr, dialect="spark")
annotated = annotate_types(qualified, dialect="spark")
self.assertEqual(
annotated.sql("spark"),
"WITH `test_table` AS (SELECT 12345 AS `id_column`, ARRAY(STRUCT('John' AS `name`, 30 AS `age`), STRUCT('Mary' AS `name`, 20 AS `age`), STRUCT('Mike' AS `name`, 80 AS `age`), STRUCT('Dan' AS `name`, 50 AS `age`)) AS `struct_column`) SELECT `test_table`.`id_column` AS `id_column`, `explode_view`.`new_column`.`name` AS `name`, `explode_view`.`new_column`.`age` AS `age` FROM `test_table` AS `test_table` LATERAL VIEW EXPLODE(`test_table`.`struct_column`) explode_view AS `new_column`",
)
self.assertEqual(
annotated.sql("presto"),
"""WITH "test_table" AS (SELECT 12345 AS "id_column", ARRAY[CAST(ROW('John', 30) AS ROW("name" VARCHAR, "age" INTEGER)), CAST(ROW('Mary', 20) AS ROW("name" VARCHAR, "age" INTEGER)), CAST(ROW('Mike', 80) AS ROW("name" VARCHAR, "age" INTEGER)), CAST(ROW('Dan', 50) AS ROW("name" VARCHAR, "age" INTEGER))] AS "struct_column") SELECT "test_table"."id_column" AS "id_column", "explode_view"."name" AS "name", "explode_view"."age" AS "age" FROM "test_table" AS "test_table" CROSS JOIN UNNEST("test_table"."struct_column") AS "explode_view"("name", "age")""",
)

View file

@ -133,12 +133,25 @@ class TestTSQL(Validator):
},
)
self.validate_all(
"WITH t(c) AS (SELECT 1) SELECT * INTO TEMP UNLOGGED foo FROM (SELECT c AS c FROM t) AS temp",
"WITH t(c) AS (SELECT 1) SELECT * INTO UNLOGGED #foo FROM (SELECT c AS c FROM t) AS temp",
write={
"duckdb": "CREATE TEMPORARY TABLE foo AS WITH t(c) AS (SELECT 1) SELECT * FROM (SELECT c AS c FROM t) AS temp",
"postgres": "WITH t(c) AS (SELECT 1) SELECT * INTO TEMPORARY foo FROM (SELECT c AS c FROM t) AS temp",
},
)
self.validate_all(
"WITH t(c) AS (SELECT 1) SELECT c INTO #foo FROM t",
read={
"tsql": "WITH t(c) AS (SELECT 1) SELECT c INTO #foo FROM t",
"postgres": "WITH t(c) AS (SELECT 1) SELECT c INTO TEMPORARY foo FROM t",
},
write={
"tsql": "WITH t(c) AS (SELECT 1) SELECT c INTO #foo FROM t",
"postgres": "WITH t(c) AS (SELECT 1) SELECT c INTO TEMPORARY foo FROM t",
"duckdb": "CREATE TEMPORARY TABLE foo AS WITH t(c) AS (SELECT 1) SELECT c FROM t",
"snowflake": "CREATE TEMPORARY TABLE foo AS WITH t(c) AS (SELECT 1) SELECT c FROM t",
},
)
self.validate_all(
"WITH t(c) AS (SELECT 1) SELECT * INTO UNLOGGED foo FROM (SELECT c AS c FROM t) AS temp",
write={
@ -151,6 +164,13 @@ class TestTSQL(Validator):
"duckdb": "CREATE TABLE foo AS WITH t(c) AS (SELECT 1) SELECT * FROM (SELECT c AS c FROM t) AS temp",
},
)
self.validate_all(
"WITH y AS (SELECT 2 AS c) INSERT INTO #t SELECT * FROM y",
write={
"duckdb": "WITH y AS (SELECT 2 AS c) INSERT INTO t SELECT * FROM y",
"postgres": "WITH y AS (SELECT 2 AS c) INSERT INTO t SELECT * FROM y",
},
)
self.validate_all(
"WITH y AS (SELECT 2 AS c) INSERT INTO t SELECT * FROM y",
read={
@ -850,6 +870,9 @@ class TestTSQL(Validator):
)
def test_ddl(self):
for colstore in ("NONCLUSTERED COLUMNSTORE", "CLUSTERED COLUMNSTORE"):
self.validate_identity(f"CREATE {colstore} INDEX index_name ON foo.bar")
for view_attr in ("ENCRYPTION", "SCHEMABINDING", "VIEW_METADATA"):
self.validate_identity(f"CREATE VIEW a.b WITH {view_attr} AS SELECT * FROM x")
@ -871,19 +894,19 @@ class TestTSQL(Validator):
self.validate_identity("CREATE SCHEMA testSchema")
self.validate_identity("CREATE VIEW t AS WITH cte AS (SELECT 1 AS c) SELECT c FROM cte")
self.validate_identity("ALTER TABLE tbl SET SYSTEM_VERSIONING=OFF")
self.validate_identity("ALTER TABLE tbl SET FILESTREAM_ON = 'test'")
self.validate_identity("ALTER TABLE tbl SET DATA_DELETION=ON")
self.validate_identity("ALTER TABLE tbl SET DATA_DELETION=OFF")
self.validate_identity(
"ALTER TABLE tbl SET SYSTEM_VERSIONING=ON(HISTORY_TABLE=db.tbl, DATA_CONSISTENCY_CHECK=OFF, HISTORY_RETENTION_PERIOD=5 DAYS)"
)
self.validate_identity(
"ALTER TABLE tbl SET SYSTEM_VERSIONING=ON(HISTORY_TABLE=db.tbl, HISTORY_RETENTION_PERIOD=INFINITE)"
)
self.validate_identity("ALTER TABLE tbl SET SYSTEM_VERSIONING=OFF")
self.validate_identity("ALTER TABLE tbl SET FILESTREAM_ON = 'test'")
self.validate_identity(
"ALTER TABLE tbl SET DATA_DELETION=ON(FILTER_COLUMN=col, RETENTION_PERIOD=5 MONTHS)"
)
self.validate_identity("ALTER TABLE tbl SET DATA_DELETION=ON")
self.validate_identity("ALTER TABLE tbl SET DATA_DELETION=OFF")
self.validate_identity("ALTER VIEW v AS SELECT a, b, c, d FROM foo")
self.validate_identity("ALTER VIEW v AS SELECT * FROM foo WHERE c > 100")
@ -899,10 +922,44 @@ class TestTSQL(Validator):
"ALTER VIEW v WITH VIEW_METADATA AS SELECT * FROM foo WHERE c > 100",
check_command_warning=True,
)
self.validate_identity(
"CREATE COLUMNSTORE INDEX index_name ON foo.bar",
"CREATE NONCLUSTERED COLUMNSTORE INDEX index_name ON foo.bar",
)
self.validate_identity(
"CREATE PROCEDURE foo AS BEGIN DELETE FROM bla WHERE foo < CURRENT_TIMESTAMP - 7 END",
"CREATE PROCEDURE foo AS BEGIN DELETE FROM bla WHERE foo < GETDATE() - 7 END",
)
self.validate_identity(
"INSERT INTO Production.UpdatedInventory SELECT ProductID, LocationID, NewQty, PreviousQty FROM (MERGE INTO Production.ProductInventory AS pi USING (SELECT ProductID, SUM(OrderQty) FROM Sales.SalesOrderDetail AS sod INNER JOIN Sales.SalesOrderHeader AS soh ON sod.SalesOrderID = soh.SalesOrderID AND soh.OrderDate BETWEEN '20030701' AND '20030731' GROUP BY ProductID) AS src(ProductID, OrderQty) ON pi.ProductID = src.ProductID WHEN MATCHED AND pi.Quantity - src.OrderQty >= 0 THEN UPDATE SET pi.Quantity = pi.Quantity - src.OrderQty WHEN MATCHED AND pi.Quantity - src.OrderQty <= 0 THEN DELETE OUTPUT $action, Inserted.ProductID, Inserted.LocationID, Inserted.Quantity AS NewQty, Deleted.Quantity AS PreviousQty) AS Changes(Action, ProductID, LocationID, NewQty, PreviousQty) WHERE Action = 'UPDATE'",
"""INSERT INTO Production.UpdatedInventory
SELECT
ProductID,
LocationID,
NewQty,
PreviousQty
FROM (
MERGE INTO Production.ProductInventory AS pi
USING (
SELECT
ProductID,
SUM(OrderQty)
FROM Sales.SalesOrderDetail AS sod
INNER JOIN Sales.SalesOrderHeader AS soh
ON sod.SalesOrderID = soh.SalesOrderID
AND soh.OrderDate BETWEEN '20030701' AND '20030731'
GROUP BY
ProductID
) AS src(ProductID, OrderQty)
ON pi.ProductID = src.ProductID
WHEN MATCHED AND pi.Quantity - src.OrderQty >= 0 THEN UPDATE SET pi.Quantity = pi.Quantity - src.OrderQty
WHEN MATCHED AND pi.Quantity - src.OrderQty <= 0 THEN DELETE
OUTPUT $action, Inserted.ProductID, Inserted.LocationID, Inserted.Quantity AS NewQty, Deleted.Quantity AS PreviousQty
) AS Changes(Action, ProductID, LocationID, NewQty, PreviousQty)
WHERE
Action = 'UPDATE'""",
pretty=True,
)
self.validate_all(
"CREATE TABLE [#temptest] (name INTEGER)",
@ -1003,14 +1060,6 @@ class TestTSQL(Validator):
},
)
for colstore in ("NONCLUSTERED COLUMNSTORE", "CLUSTERED COLUMNSTORE"):
self.validate_identity(f"CREATE {colstore} INDEX index_name ON foo.bar")
self.validate_identity(
"CREATE COLUMNSTORE INDEX index_name ON foo.bar",
"CREATE NONCLUSTERED COLUMNSTORE INDEX index_name ON foo.bar",
)
def test_insert_cte(self):
self.validate_all(
"INSERT INTO foo.bar WITH cte AS (SELECT 1 AS one) SELECT * FROM cte",

View file

@ -19,6 +19,15 @@ INT;
LEAST(1, 2.5, 3);
DOUBLE;
CURRENT_TIME();
TIME;
TIME_ADD(CAST('09:05:03' AS TIME), INTERVAL 2 HOUR);
TIME;
TIME_SUB(CAST('09:05:03' AS TIME), INTERVAL 2 HOUR);
TIME;
--------------------------------------
-- Spark2 / Spark3 / Databricks
--------------------------------------

View file

@ -259,7 +259,7 @@ FROM
t1;
WITH t1 AS (SELECT x.a AS a, x.b AS b, ROW_NUMBER() OVER (PARTITION BY x.a ORDER BY x.a) AS row_num FROM x AS x) SELECT SUM(t1.row_num) AS total_rows FROM t1 AS t1;
# title: Test prevent merging of window if in group by func
# title: Test prevent merging of window if in group by
with t1 as (
SELECT
x.a,
@ -277,7 +277,7 @@ GROUP BY t1.row_num
ORDER BY t1.row_num;
WITH t1 AS (SELECT x.a AS a, x.b AS b, ROW_NUMBER() OVER (PARTITION BY x.a ORDER BY x.a) AS row_num FROM x AS x) SELECT t1.row_num AS row_num, SUM(t1.a) AS total FROM t1 AS t1 GROUP BY t1.row_num ORDER BY row_num;
# title: Test prevent merging of window if in order by func
# title: Test prevent merging of window if in order by
with t1 as (
SELECT
x.a,
@ -294,6 +294,23 @@ FROM
ORDER BY t1.row_num, t1.a;
WITH t1 AS (SELECT x.a AS a, x.b AS b, ROW_NUMBER() OVER (PARTITION BY x.a ORDER BY x.a) AS row_num FROM x AS x) SELECT t1.row_num AS row_num, t1.a AS a FROM t1 AS t1 ORDER BY t1.row_num, t1.a;
# title: Test preventing merging of window nested under complex projection if in order by
WITH t1 AS (
SELECT
x.a,
x.b,
ROW_NUMBER() OVER (PARTITION BY x.a ORDER BY x.a) - 1 AS row_num
FROM
x
)
SELECT
t1.row_num AS row_num,
t1.a AS a
FROM
t1
ORDER BY t1.row_num, t1.a;
WITH t1 AS (SELECT x.a AS a, x.b AS b, ROW_NUMBER() OVER (PARTITION BY x.a ORDER BY x.a) - 1 AS row_num FROM x AS x) SELECT t1.row_num AS row_num, t1.a AS a FROM t1 AS t1 ORDER BY t1.row_num, t1.a;
# title: Test allow merging of window function
with t1 as (
SELECT

View file

@ -760,7 +760,10 @@ SELECT
`_q_0`.`first_half_sales` AS `first_half_sales`,
`_q_0`.`second_half_sales` AS `second_half_sales`
FROM `produce` AS `produce`
UNPIVOT((`first_half_sales`, `second_half_sales`) FOR `semesters` IN ((`produce`.`q1`, `produce`.`q2`) AS 'semester_1', (`produce`.`q3`, `produce`.`q4`) AS 'semester_2')) AS `_q_0`;
UNPIVOT((`first_half_sales`, `second_half_sales`) FOR `semesters` IN (
(`produce`.`q1`, `produce`.`q2`) AS 'semester_1',
(`produce`.`q3`, `produce`.`q4`) AS 'semester_2'
)) AS `_q_0`;
# title: quoting is preserved
# dialect: snowflake
@ -1382,7 +1385,13 @@ LEFT JOIN `_u_3` AS `_u_3`
ON `_u_3`.`_u_4` = `cs1`.`cs_order_number`
JOIN `call_center` AS `call_center`
ON `call_center`.`cc_call_center_sk` = `cs1`.`cs_call_center_sk`
AND `call_center`.`cc_county` IN ('Williamson County', 'Williamson County', 'Williamson County', 'Williamson County', 'Williamson County')
AND `call_center`.`cc_county` IN (
'Williamson County',
'Williamson County',
'Williamson County',
'Williamson County',
'Williamson County'
)
JOIN `customer_address` AS `customer_address`
ON `cs1`.`cs_ship_addr_sk` = `customer_address`.`ca_address_sk`
AND `customer_address`.`ca_state` = 'GA'

View file

@ -770,6 +770,12 @@ WITH RECURSIVE t AS (SELECT 1 AS c UNION ALL SELECT t.c + 1 AS c FROM t AS t WHE
SELECT DISTINCT ON (new_col, b + 1, 1) t1.a AS new_col FROM x AS t1 ORDER BY new_col;
SELECT DISTINCT ON (new_col, t1.b + 1, new_col) t1.a AS new_col FROM x AS t1 ORDER BY new_col;
# title: Oracle does not support lateral alias expansion
# dialect: oracle
# execute: false
SELECT a AS b, b AS a FROM c;
SELECT C.A AS B, C.B AS A FROM C C;
--------------------------------------
-- Wrapped tables / join constructs
--------------------------------------

View file

@ -1227,7 +1227,408 @@ WITH "a1" AS (
SUBSTRING("customer_address"."ca_zip", 1, 5) AS "ca_zip"
FROM "customer_address" AS "customer_address"
WHERE
SUBSTRING("customer_address"."ca_zip", 1, 5) IN ('67436', '26121', '38443', '63157', '68856', '19485', '86425', '26741', '70991', '60899', '63573', '47556', '56193', '93314', '87827', '62017', '85067', '95390', '48091', '10261', '81845', '41790', '42853', '24675', '12840', '60065', '84430', '57451', '24021', '91735', '75335', '71935', '34482', '56943', '70695', '52147', '56251', '28411', '86653', '23005', '22478', '29031', '34398', '15365', '42460', '33337', '59433', '73943', '72477', '74081', '74430', '64605', '39006', '11226', '49057', '97308', '42663', '18187', '19768', '43454', '32147', '76637', '51975', '11181', '45630', '33129', '45995', '64386', '55522', '26697', '20963', '35154', '64587', '49752', '66386', '30586', '59286', '13177', '66646', '84195', '74316', '36853', '32927', '12469', '11904', '36269', '17724', '55346', '12595', '53988', '65439', '28015', '63268', '73590', '29216', '82575', '69267', '13805', '91678', '79460', '94152', '14961', '15419', '48277', '62588', '55493', '28360', '14152', '55225', '18007', '53705', '56573', '80245', '71769', '57348', '36845', '13039', '17270', '22363', '83474', '25294', '43269', '77666', '15488', '99146', '64441', '43338', '38736', '62754', '48556', '86057', '23090', '38114', '66061', '18910', '84385', '23600', '19975', '27883', '65719', '19933', '32085', '49731', '40473', '27190', '46192', '23949', '44738', '12436', '64794', '68741', '15333', '24282', '49085', '31844', '71156', '48441', '17100', '98207', '44982', '20277', '71496', '96299', '37583', '22206', '89174', '30589', '61924', '53079', '10976', '13104', '42794', '54772', '15809', '56434', '39975', '13874', '30753', '77598', '78229', '59478', '12345', '55547', '57422', '42600', '79444', '29074', '29752', '21676', '32096', '43044', '39383', '37296', '36295', '63077', '16572', '31275', '18701', '40197', '48242', '27219', '49865', '84175', '30446', '25165', '13807', '72142', '70499', '70464', '71429', '18111', '70857', '29545', '36425', '52706', '36194', '42963', '75068', '47921', '74763', '90990', '89456', '62073', '88397', '73963', '75885', '62657', '12530', '81146', '57434', '25099', '41429', '98441', '48713', '52552', '31667', '14072', '13903', '44709', '85429', '58017', '38295', '44875', '73541', '30091', '12707', '23762', '62258', '33247', '78722', '77431', '14510', '35656', '72428', '92082', '35267', '43759', '24354', '90952', '11512', '21242', '22579', '56114', '32339', '52282', '41791', '24484', '95020', '28408', '99710', '11899', '43344', '72915', '27644', '62708', '74479', '17177', '32619', '12351', '91339', '31169', '57081', '53522', '16712', '34419', '71779', '44187', '46206', '96099', '61910', '53664', '12295', '31837', '33096', '10813', '63048', '31732', '79118', '73084', '72783', '84952', '46965', '77956', '39815', '32311', '75329', '48156', '30826', '49661', '13736', '92076', '74865', '88149', '92397', '52777', '68453', '32012', '21222', '52721', '24626', '18210', '42177', '91791', '75251', '82075', '44372', '45542', '20609', '60115', '17362', '22750', '90434', '31852', '54071', '33762', '14705', '40718', '56433', '30996', '40657', '49056', '23585', '66455', '41021', '74736', '72151', '37007', '21729', '60177', '84558', '59027', '93855', '60022', '86443', '19541', '86886', '30532', '39062', '48532', '34713', '52077', '22564', '64638', '15273', '31677', '36138', '62367', '60261', '80213', '42818', '25113', '72378', '69802', '69096', '55443', '28820', '13848', '78258', '37490', '30556', '77380', '28447', '44550', '26791', '70609', '82182', '33306', '43224', '22322', '86959', '68519', '14308', '46501', '81131', '34056', '61991', '19896', '87804', '65774', '92564')
SUBSTRING("customer_address"."ca_zip", 1, 5) IN (
'67436',
'26121',
'38443',
'63157',
'68856',
'19485',
'86425',
'26741',
'70991',
'60899',
'63573',
'47556',
'56193',
'93314',
'87827',
'62017',
'85067',
'95390',
'48091',
'10261',
'81845',
'41790',
'42853',
'24675',
'12840',
'60065',
'84430',
'57451',
'24021',
'91735',
'75335',
'71935',
'34482',
'56943',
'70695',
'52147',
'56251',
'28411',
'86653',
'23005',
'22478',
'29031',
'34398',
'15365',
'42460',
'33337',
'59433',
'73943',
'72477',
'74081',
'74430',
'64605',
'39006',
'11226',
'49057',
'97308',
'42663',
'18187',
'19768',
'43454',
'32147',
'76637',
'51975',
'11181',
'45630',
'33129',
'45995',
'64386',
'55522',
'26697',
'20963',
'35154',
'64587',
'49752',
'66386',
'30586',
'59286',
'13177',
'66646',
'84195',
'74316',
'36853',
'32927',
'12469',
'11904',
'36269',
'17724',
'55346',
'12595',
'53988',
'65439',
'28015',
'63268',
'73590',
'29216',
'82575',
'69267',
'13805',
'91678',
'79460',
'94152',
'14961',
'15419',
'48277',
'62588',
'55493',
'28360',
'14152',
'55225',
'18007',
'53705',
'56573',
'80245',
'71769',
'57348',
'36845',
'13039',
'17270',
'22363',
'83474',
'25294',
'43269',
'77666',
'15488',
'99146',
'64441',
'43338',
'38736',
'62754',
'48556',
'86057',
'23090',
'38114',
'66061',
'18910',
'84385',
'23600',
'19975',
'27883',
'65719',
'19933',
'32085',
'49731',
'40473',
'27190',
'46192',
'23949',
'44738',
'12436',
'64794',
'68741',
'15333',
'24282',
'49085',
'31844',
'71156',
'48441',
'17100',
'98207',
'44982',
'20277',
'71496',
'96299',
'37583',
'22206',
'89174',
'30589',
'61924',
'53079',
'10976',
'13104',
'42794',
'54772',
'15809',
'56434',
'39975',
'13874',
'30753',
'77598',
'78229',
'59478',
'12345',
'55547',
'57422',
'42600',
'79444',
'29074',
'29752',
'21676',
'32096',
'43044',
'39383',
'37296',
'36295',
'63077',
'16572',
'31275',
'18701',
'40197',
'48242',
'27219',
'49865',
'84175',
'30446',
'25165',
'13807',
'72142',
'70499',
'70464',
'71429',
'18111',
'70857',
'29545',
'36425',
'52706',
'36194',
'42963',
'75068',
'47921',
'74763',
'90990',
'89456',
'62073',
'88397',
'73963',
'75885',
'62657',
'12530',
'81146',
'57434',
'25099',
'41429',
'98441',
'48713',
'52552',
'31667',
'14072',
'13903',
'44709',
'85429',
'58017',
'38295',
'44875',
'73541',
'30091',
'12707',
'23762',
'62258',
'33247',
'78722',
'77431',
'14510',
'35656',
'72428',
'92082',
'35267',
'43759',
'24354',
'90952',
'11512',
'21242',
'22579',
'56114',
'32339',
'52282',
'41791',
'24484',
'95020',
'28408',
'99710',
'11899',
'43344',
'72915',
'27644',
'62708',
'74479',
'17177',
'32619',
'12351',
'91339',
'31169',
'57081',
'53522',
'16712',
'34419',
'71779',
'44187',
'46206',
'96099',
'61910',
'53664',
'12295',
'31837',
'33096',
'10813',
'63048',
'31732',
'79118',
'73084',
'72783',
'84952',
'46965',
'77956',
'39815',
'32311',
'75329',
'48156',
'30826',
'49661',
'13736',
'92076',
'74865',
'88149',
'92397',
'52777',
'68453',
'32012',
'21222',
'52721',
'24626',
'18210',
'42177',
'91791',
'75251',
'82075',
'44372',
'45542',
'20609',
'60115',
'17362',
'22750',
'90434',
'31852',
'54071',
'33762',
'14705',
'40718',
'56433',
'30996',
'40657',
'49056',
'23585',
'66455',
'41021',
'74736',
'72151',
'37007',
'21729',
'60177',
'84558',
'59027',
'93855',
'60022',
'86443',
'19541',
'86886',
'30532',
'39062',
'48532',
'34713',
'52077',
'22564',
'64638',
'15273',
'31677',
'36138',
'62367',
'60261',
'80213',
'42818',
'25113',
'72378',
'69802',
'69096',
'55443',
'28820',
'13848',
'78258',
'37490',
'30556',
'77380',
'28447',
'44550',
'26791',
'70609',
'82182',
'33306',
'43224',
'22322',
'86959',
'68519',
'14308',
'46501',
'81131',
'34056',
'61991',
'19896',
'87804',
'65774',
'92564'
)
INTERSECT
SELECT
"a1"."ca_zip" AS "ca_zip"
@ -1580,7 +1981,13 @@ LEFT JOIN "_u_4" AS "_u_4"
ON "_u_4"."_u_5" = "c"."c_customer_sk"
JOIN "customer_address" AS "ca"
ON "c"."c_current_addr_sk" = "ca"."ca_address_sk"
AND "ca"."ca_county" IN ('Lycoming County', 'Sheridan County', 'Kandiyohi County', 'Pike County', 'Greene County')
AND "ca"."ca_county" IN (
'Lycoming County',
'Sheridan County',
'Kandiyohi County',
'Pike County',
'Greene County'
)
JOIN "customer_demographics" AS "customer_demographics"
ON "c"."c_current_cdemo_sk" = "customer_demographics"."cd_demo_sk"
WHERE
@ -2413,7 +2820,13 @@ LEFT JOIN "_u_3" AS "_u_3"
ON "_u_3"."_u_4" = "cs1"."cs_order_number"
JOIN "call_center" AS "call_center"
ON "call_center"."cc_call_center_sk" = "cs1"."cs_call_center_sk"
AND "call_center"."cc_county" IN ('Williamson County', 'Williamson County', 'Williamson County', 'Williamson County', 'Williamson County')
AND "call_center"."cc_county" IN (
'Williamson County',
'Williamson County',
'Williamson County',
'Williamson County',
'Williamson County'
)
JOIN "customer_address" AS "customer_address"
ON "cs1"."cs_ship_addr_sk" = "customer_address"."ca_address_sk"
AND "customer_address"."ca_state" = 'IA'
@ -4221,7 +4634,16 @@ WITH "dn" AS (
ELSE NULL
END > 1.2
JOIN "store" AS "store"
ON "store"."s_county" IN ('Williamson County', 'Williamson County', 'Williamson County', 'Williamson County', 'Williamson County', 'Williamson County', 'Williamson County', 'Williamson County')
ON "store"."s_county" IN (
'Williamson County',
'Williamson County',
'Williamson County',
'Williamson County',
'Williamson County',
'Williamson County',
'Williamson County',
'Williamson County'
)
AND "store"."s_store_sk" = "store_sales"."ss_store_sk"
GROUP BY
"store_sales"."ss_ticket_number",
@ -6339,7 +6761,12 @@ WITH "tmp1" AS (
WHERE
(
"item"."i_brand" IN ('amalgimporto #1', 'edu packscholar #1', 'exportiimporto #1', 'importoamalg #1')
OR "item"."i_brand" IN ('scholaramalgamalg #14', 'scholaramalgamalg #7', 'exportiunivamalg #9', 'scholaramalgamalg #9')
OR "item"."i_brand" IN (
'scholaramalgamalg #14',
'scholaramalgamalg #7',
'exportiunivamalg #9',
'scholaramalgamalg #9'
)
)
AND (
"item"."i_brand" IN ('amalgimporto #1', 'edu packscholar #1', 'exportiimporto #1', 'importoamalg #1')
@ -6350,11 +6777,21 @@ WITH "tmp1" AS (
OR "item"."i_class" IN ('personal', 'portable', 'reference', 'self-help')
)
AND (
"item"."i_brand" IN ('scholaramalgamalg #14', 'scholaramalgamalg #7', 'exportiunivamalg #9', 'scholaramalgamalg #9')
"item"."i_brand" IN (
'scholaramalgamalg #14',
'scholaramalgamalg #7',
'exportiunivamalg #9',
'scholaramalgamalg #9'
)
OR "item"."i_category" IN ('Women', 'Music', 'Men')
)
AND (
"item"."i_brand" IN ('scholaramalgamalg #14', 'scholaramalgamalg #7', 'exportiunivamalg #9', 'scholaramalgamalg #9')
"item"."i_brand" IN (
'scholaramalgamalg #14',
'scholaramalgamalg #7',
'exportiunivamalg #9',
'scholaramalgamalg #9'
)
OR "item"."i_class" IN ('accessories', 'classical', 'fragrances', 'pants')
)
AND (
@ -7755,7 +8192,12 @@ WITH "tmp1" AS (
WHERE
(
"item"."i_brand" IN ('amalgimporto #1', 'edu packscholar #1', 'exportiimporto #1', 'importoamalg #1')
OR "item"."i_brand" IN ('scholaramalgamalg #14', 'scholaramalgamalg #7', 'exportiunivamalg #9', 'scholaramalgamalg #9')
OR "item"."i_brand" IN (
'scholaramalgamalg #14',
'scholaramalgamalg #7',
'exportiunivamalg #9',
'scholaramalgamalg #9'
)
)
AND (
"item"."i_brand" IN ('amalgimporto #1', 'edu packscholar #1', 'exportiimporto #1', 'importoamalg #1')
@ -7766,11 +8208,21 @@ WITH "tmp1" AS (
OR "item"."i_class" IN ('personal', 'portable', 'reference', 'self-help')
)
AND (
"item"."i_brand" IN ('scholaramalgamalg #14', 'scholaramalgamalg #7', 'exportiunivamalg #9', 'scholaramalgamalg #9')
"item"."i_brand" IN (
'scholaramalgamalg #14',
'scholaramalgamalg #7',
'exportiunivamalg #9',
'scholaramalgamalg #9'
)
OR "item"."i_category" IN ('Women', 'Music', 'Men')
)
AND (
"item"."i_brand" IN ('scholaramalgamalg #14', 'scholaramalgamalg #7', 'exportiunivamalg #9', 'scholaramalgamalg #9')
"item"."i_brand" IN (
'scholaramalgamalg #14',
'scholaramalgamalg #7',
'exportiunivamalg #9',
'scholaramalgamalg #9'
)
OR "item"."i_class" IN ('accessories', 'classical', 'fragrances', 'pants')
)
AND (
@ -9677,7 +10129,12 @@ WITH "dj" AS (
ELSE NULL
END > 1
JOIN "store" AS "store"
ON "store"."s_county" IN ('Williamson County', 'Williamson County', 'Williamson County', 'Williamson County')
ON "store"."s_county" IN (
'Williamson County',
'Williamson County',
'Williamson County',
'Williamson County'
)
AND "store"."s_store_sk" = "store_sales"."ss_store_sk"
GROUP BY
"store_sales"."ss_ticket_number",

View file

@ -9,6 +9,19 @@ from sqlglot import ParseError, alias, exp, parse_one
class TestExpressions(unittest.TestCase):
maxDiff = None
def test_to_s(self):
self.assertEqual(repr(parse_one("5")), "Literal(this=5, is_string=False)")
self.assertEqual(repr(parse_one("5.3")), "Literal(this=5.3, is_string=False)")
self.assertEqual(repr(parse_one("True")), "Boolean(this=True)")
self.assertEqual(repr(parse_one("' x'")), "Literal(this=' x', is_string=True)")
self.assertEqual(repr(parse_one("' \n x'")), "Literal(this=' \\n x', is_string=True)")
self.assertEqual(
repr(parse_one(" x ")), "Column(\n this=Identifier(this=x, quoted=False))"
)
self.assertEqual(
repr(parse_one('" x "')), "Column(\n this=Identifier(this=' x ', quoted=True))"
)
def test_arg_key(self):
self.assertEqual(parse_one("sum(1)").find(exp.Literal).arg_key, "this")