1
0
Fork 0

Adding upstream version 11.7.1.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-02-13 15:51:35 +01:00
parent b4e0e3422e
commit 82a8846a46
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
144 changed files with 44104 additions and 39367 deletions

View file

@ -22,7 +22,7 @@ class TestParser(unittest.TestCase):
{
"description": "Invalid expression / Unexpected token",
"line": 1,
"col": 1,
"col": 7,
"start_context": "",
"highlight": "SELECT",
"end_context": " 1;",
@ -40,7 +40,7 @@ class TestParser(unittest.TestCase):
{
"description": "Invalid expression / Unexpected token",
"line": 1,
"col": 1,
"col": 7,
"start_context": "",
"highlight": "SELECT",
"end_context": " 1;",
@ -49,7 +49,7 @@ class TestParser(unittest.TestCase):
{
"description": "Invalid expression / Unexpected token",
"line": 1,
"col": 1,
"col": 7,
"start_context": "",
"highlight": "SELECT",
"end_context": " 1;",
@ -112,6 +112,8 @@ class TestParser(unittest.TestCase):
self.assertIsInstance(lambda_expr.this.this, exp.Dot)
self.assertEqual(lambda_expr.sql(), "x -> x.id = id")
self.assertIsNone(parse_one("FILTER([], x -> x)").find(exp.Column))
def test_transactions(self):
expression = parse_one("BEGIN TRANSACTION")
self.assertIsNone(expression.this)
@ -222,6 +224,7 @@ class TestParser(unittest.TestCase):
self.assertEqual(parse_one("SELECT @x, @@x, @1").sql(), "SELECT @x, @@x, @1")
def test_var(self):
self.assertIsInstance(parse_one("INTERVAL '1' DAY").args["unit"], exp.Var)
self.assertEqual(parse_one("SELECT @JOIN, @'foo'").sql(), "SELECT @JOIN, @'foo'")
def test_comments(self):
@ -374,3 +377,96 @@ class TestParser(unittest.TestCase):
parse_one("ALTER TABLE foo RENAME TO bar").sql(),
"ALTER TABLE foo RENAME TO bar",
)
def test_pivot_columns(self):
nothing_aliased = """
SELECT * FROM (
SELECT partname, price FROM part
) PIVOT (AVG(price) FOR partname IN ('prop', 'rudder'))
"""
everything_aliased = """
SELECT * FROM (
SELECT partname, price FROM part
) PIVOT (AVG(price) AS avg_price FOR partname IN ('prop' AS prop1, 'rudder' AS rudder1))
"""
only_pivot_columns_aliased = """
SELECT * FROM (
SELECT partname, price FROM part
) PIVOT (AVG(price) FOR partname IN ('prop' AS prop1, 'rudder' AS rudder1))
"""
columns_partially_aliased = """
SELECT * FROM (
SELECT partname, price FROM part
) PIVOT (AVG(price) FOR partname IN ('prop' AS prop1, 'rudder'))
"""
multiple_aggregates_aliased = """
SELECT * FROM (
SELECT partname, price, quality FROM part
) PIVOT (AVG(price) AS p, MAX(quality) AS q FOR partname IN ('prop' AS prop1, 'rudder'))
"""
multiple_aggregates_not_aliased = """
SELECT * FROM (
SELECT partname, price, quality FROM part
) PIVOT (AVG(price), MAX(quality) FOR partname IN ('prop' AS prop1, 'rudder'))
"""
multiple_aggregates_not_aliased_with_quoted_identifier = """
SELECT * FROM (
SELECT partname, price, quality FROM part
) PIVOT (AVG(`PrIcE`), MAX(quality) FOR partname IN ('prop' AS prop1, 'rudder'))
"""
query_to_column_names = {
nothing_aliased: {
"bigquery": ["prop", "rudder"],
"redshift": ["prop", "rudder"],
"snowflake": ['"prop"', '"rudder"'],
"spark": ["prop", "rudder"],
},
everything_aliased: {
"bigquery": ["avg_price_prop1", "avg_price_rudder1"],
"redshift": ["prop1_avg_price", "rudder1_avg_price"],
"spark": ["prop1", "rudder1"],
},
only_pivot_columns_aliased: {
"bigquery": ["prop1", "rudder1"],
"redshift": ["prop1", "rudder1"],
"spark": ["prop1", "rudder1"],
},
columns_partially_aliased: {
"bigquery": ["prop1", "rudder"],
"redshift": ["prop1", "rudder"],
"spark": ["prop1", "rudder"],
},
multiple_aggregates_aliased: {
"bigquery": ["p_prop1", "q_prop1", "p_rudder", "q_rudder"],
"spark": ["prop1_p", "prop1_q", "rudder_p", "rudder_q"],
},
multiple_aggregates_not_aliased: {
"spark": [
"`prop1_avg(price)`",
"`prop1_max(quality)`",
"`rudder_avg(price)`",
"`rudder_max(quality)`",
],
},
multiple_aggregates_not_aliased_with_quoted_identifier: {
"spark": [
"`prop1_avg(PrIcE)`",
"`prop1_max(quality)`",
"`rudder_avg(PrIcE)`",
"`rudder_max(quality)`",
],
},
}
for query, dialect_columns in query_to_column_names.items():
for dialect, expected_columns in dialect_columns.items():
expr = parse_one(query, read=dialect)
columns = expr.args["from"].expressions[0].args["pivots"][0].args["columns"]
self.assertEqual(expected_columns, [col.sql(dialect=dialect) for col in columns])