1
0
Fork 0

Adding upstream version 15.0.0.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-02-13 15:56:32 +01:00
parent 70d5d3451a
commit bb75596aa9
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
167 changed files with 58268 additions and 51337 deletions

View file

@ -15,14 +15,23 @@ class TestParser(unittest.TestCase):
self.assertIsInstance(parse_one("left join foo", into=exp.Join), exp.Join)
self.assertIsInstance(parse_one("int", into=exp.DataType), exp.DataType)
self.assertIsInstance(parse_one("array<int>", into=exp.DataType), exp.DataType)
self.assertIsInstance(parse_one("foo", into=exp.Table), exp.Table)
with self.assertRaises(ParseError) as ctx:
parse_one("SELECT * FROM tbl", into=exp.Table)
self.assertEqual(
str(ctx.exception),
"Failed to parse 'SELECT * FROM tbl' into <class 'sqlglot.expressions.Table'>",
)
def test_parse_into_error(self):
expected_message = "Failed to parse into [<class 'sqlglot.expressions.From'>]"
expected_message = "Failed to parse 'SELECT 1;' into [<class 'sqlglot.expressions.From'>]"
expected_errors = [
{
"description": "Invalid expression / Unexpected token",
"line": 1,
"col": 7,
"col": 6,
"start_context": "",
"highlight": "SELECT",
"end_context": " 1;",
@ -30,17 +39,18 @@ class TestParser(unittest.TestCase):
}
]
with self.assertRaises(ParseError) as ctx:
parse_one("SELECT 1;", "sqlite", [exp.From])
parse_one("SELECT 1;", read="sqlite", into=[exp.From])
self.assertEqual(str(ctx.exception), expected_message)
self.assertEqual(ctx.exception.errors, expected_errors)
def test_parse_into_errors(self):
expected_message = "Failed to parse into [<class 'sqlglot.expressions.From'>, <class 'sqlglot.expressions.Join'>]"
expected_message = "Failed to parse 'SELECT 1;' into [<class 'sqlglot.expressions.From'>, <class 'sqlglot.expressions.Join'>]"
expected_errors = [
{
"description": "Invalid expression / Unexpected token",
"line": 1,
"col": 7,
"col": 6,
"start_context": "",
"highlight": "SELECT",
"end_context": " 1;",
@ -49,7 +59,7 @@ class TestParser(unittest.TestCase):
{
"description": "Invalid expression / Unexpected token",
"line": 1,
"col": 7,
"col": 6,
"start_context": "",
"highlight": "SELECT",
"end_context": " 1;",
@ -58,6 +68,7 @@ class TestParser(unittest.TestCase):
]
with self.assertRaises(ParseError) as ctx:
parse_one("SELECT 1;", "sqlite", [exp.From, exp.Join])
self.assertEqual(str(ctx.exception), expected_message)
self.assertEqual(ctx.exception.errors, expected_errors)
@ -75,7 +86,7 @@ class TestParser(unittest.TestCase):
def test_table(self):
tables = [t.sql() for t in parse_one("select * from a, b.c, .d").find_all(exp.Table)]
self.assertEqual(tables, ["a", "b.c", "d"])
self.assertEqual(set(tables), {"a", "b.c", "d"})
def test_union_order(self):
self.assertIsInstance(parse_one("SELECT * FROM (SELECT 1) UNION SELECT 2"), exp.Union)
@ -92,7 +103,7 @@ class TestParser(unittest.TestCase):
self.assertEqual(len(parse_one("select * from (select 1) x cross join y").args["joins"]), 1)
self.assertEqual(
parse_one("""SELECT * FROM x CROSS JOIN y, z LATERAL VIEW EXPLODE(y)""").sql(),
"""SELECT * FROM x, z CROSS JOIN y LATERAL VIEW EXPLODE(y)""",
"""SELECT * FROM x CROSS JOIN y, z LATERAL VIEW EXPLODE(y)""",
)
self.assertIsNone(
parse_one("create table a as (select b from c) index").find(exp.TableAlias)
@ -156,8 +167,8 @@ class TestParser(unittest.TestCase):
assert expression.expressions[2].alias == "c"
assert expression.expressions[3].alias == "D"
assert expression.expressions[4].alias == "y|z'"
table = expression.args["from"].expressions[0]
assert table.this.name == "z"
table = expression.args["from"].this
assert table.name == "z"
assert table.args["db"].name == "y"
def test_multi(self):
@ -168,8 +179,8 @@ class TestParser(unittest.TestCase):
)
assert len(expressions) == 2
assert expressions[0].args["from"].expressions[0].this.name == "a"
assert expressions[1].args["from"].expressions[0].this.name == "b"
assert expressions[0].args["from"].name == "a"
assert expressions[1].args["from"].name == "b"
expressions = parse("SELECT 1; ; SELECT 2")
@ -202,6 +213,15 @@ class TestParser(unittest.TestCase):
with self.assertRaises(ParseError):
parse_one("WITH cte AS (SELECT * FROM x)")
self.assertEqual(
parse_one(
"CREATE TABLE t (i UInt8) ENGINE = AggregatingMergeTree() ORDER BY tuple()",
read="clickhouse",
error_level=ErrorLevel.RAISE,
).sql(dialect="clickhouse"),
"CREATE TABLE t (i UInt8) ENGINE=AggregatingMergeTree() ORDER BY tuple()",
)
def test_space(self):
self.assertEqual(
parse_one("SELECT ROW() OVER(PARTITION BY x) FROM x GROUP BY y").sql(),
@ -292,6 +312,7 @@ class TestParser(unittest.TestCase):
self.assertIsInstance(parse_one("TIMESTAMP('2022-01-01')"), exp.Func)
self.assertIsInstance(parse_one("TIMESTAMP()"), exp.Func)
self.assertIsInstance(parse_one("map.x"), exp.Column)
self.assertIsInstance(parse_one("CAST(x AS CHAR(5))").to.expressions[0], exp.DataTypeSize)
def test_set_expression(self):
set_ = parse_one("SET")
@ -415,39 +436,56 @@ class TestParser(unittest.TestCase):
) PIVOT (AVG(price), MAX(quality) FOR partname IN ('prop' AS prop1, 'rudder'))
"""
multiple_aggregates_not_aliased_with_quoted_identifier = """
multiple_aggregates_not_aliased_with_quoted_identifier_spark = """
SELECT * FROM (
SELECT partname, price, quality FROM part
) PIVOT (AVG(`PrIcE`), MAX(quality) FOR partname IN ('prop' AS prop1, 'rudder'))
"""
multiple_aggregates_not_aliased_with_quoted_identifier_duckdb = """
SELECT * FROM (
SELECT partname, price, quality FROM part
) PIVOT (AVG("PrIcE"), MAX(quality) FOR partname IN ('prop' AS prop1, 'rudder'))
"""
query_to_column_names = {
nothing_aliased: {
"bigquery": ["prop", "rudder"],
"duckdb": ["prop", "rudder"],
"redshift": ["prop", "rudder"],
"snowflake": ['"prop"', '"rudder"'],
"snowflake": ['''"'prop'"''', '''"'rudder'"'''],
"spark": ["prop", "rudder"],
},
everything_aliased: {
"bigquery": ["avg_price_prop1", "avg_price_rudder1"],
"duckdb": ["prop1_avg_price", "rudder1_avg_price"],
"redshift": ["prop1_avg_price", "rudder1_avg_price"],
"spark": ["prop1", "rudder1"],
},
only_pivot_columns_aliased: {
"bigquery": ["prop1", "rudder1"],
"duckdb": ["prop1", "rudder1"],
"redshift": ["prop1", "rudder1"],
"spark": ["prop1", "rudder1"],
},
columns_partially_aliased: {
"bigquery": ["prop1", "rudder"],
"duckdb": ["prop1", "rudder"],
"redshift": ["prop1", "rudder"],
"spark": ["prop1", "rudder"],
},
multiple_aggregates_aliased: {
"bigquery": ["p_prop1", "q_prop1", "p_rudder", "q_rudder"],
"duckdb": ["prop1_p", "prop1_q", "rudder_p", "rudder_q"],
"spark": ["prop1_p", "prop1_q", "rudder_p", "rudder_q"],
},
multiple_aggregates_not_aliased: {
"duckdb": [
'"prop1_avg(price)"',
'"prop1_max(quality)"',
'"rudder_avg(price)"',
'"rudder_max(quality)"',
],
"spark": [
"`prop1_avg(price)`",
"`prop1_max(quality)`",
@ -455,7 +493,7 @@ class TestParser(unittest.TestCase):
"`rudder_max(quality)`",
],
},
multiple_aggregates_not_aliased_with_quoted_identifier: {
multiple_aggregates_not_aliased_with_quoted_identifier_spark: {
"spark": [
"`prop1_avg(PrIcE)`",
"`prop1_max(quality)`",
@ -463,10 +501,23 @@ class TestParser(unittest.TestCase):
"`rudder_max(quality)`",
],
},
multiple_aggregates_not_aliased_with_quoted_identifier_duckdb: {
"duckdb": [
'"prop1_avg(PrIcE)"',
'"prop1_max(quality)"',
'"rudder_avg(PrIcE)"',
'"rudder_max(quality)"',
],
},
}
for query, dialect_columns in query_to_column_names.items():
for dialect, expected_columns in dialect_columns.items():
expr = parse_one(query, read=dialect)
columns = expr.args["from"].expressions[0].args["pivots"][0].args["columns"]
columns = expr.args["from"].this.args["pivots"][0].args["columns"]
self.assertEqual(expected_columns, [col.sql(dialect=dialect) for col in columns])
def test_parse_properties(self):
self.assertEqual(
parse_one("create materialized table x").sql(), "CREATE MATERIALIZED TABLE x"
)