1
0
Fork 0

Adding upstream version 26.11.1.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-03-23 10:41:59 +01:00
parent 5b8e67f8b8
commit aa70b5e889
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
52 changed files with 14319 additions and 14244 deletions

View file

@ -196,6 +196,9 @@ LANGUAGE js AS
self.validate_identity("CAST(x AS TIMESTAMPTZ)", "CAST(x AS TIMESTAMP)")
self.validate_identity("CAST(x AS RECORD)", "CAST(x AS STRUCT)")
self.validate_identity("SELECT * FROM x WHERE x.y >= (SELECT MAX(a) FROM b-c) - 20")
self.validate_identity(
"SELECT cars, apples FROM some_table PIVOT(SUM(total_counts) FOR products IN ('general.cars' AS cars, 'food.apples' AS apples))"
)
self.validate_identity(
"MERGE INTO dataset.NewArrivals USING (SELECT * FROM UNNEST([('microwave', 10, 'warehouse #1'), ('dryer', 30, 'warehouse #1'), ('oven', 20, 'warehouse #2')])) ON FALSE WHEN NOT MATCHED THEN INSERT ROW WHEN NOT MATCHED BY SOURCE THEN DELETE"
)
@ -2346,6 +2349,7 @@ OPTIONS (
self.validate_identity("STRING_AGG(a, ' & ')")
self.validate_identity("STRING_AGG(DISTINCT a, ' & ')")
self.validate_identity("STRING_AGG(a, ' & ' ORDER BY LENGTH(a))")
self.validate_identity("STRING_AGG(foo, b'|' ORDER BY bar)")
self.validate_identity("STRING_AGG(a)", "STRING_AGG(a, ',')")
self.validate_identity(

View file

@ -3,6 +3,7 @@ from sqlglot import exp, parse_one
from sqlglot.dialects import ClickHouse
from sqlglot.expressions import convert
from sqlglot.optimizer import traverse_scope
from sqlglot.optimizer.qualify_columns import quote_identifiers
from tests.dialects.test_dialect import Validator
from sqlglot.errors import ErrorLevel
@ -11,6 +12,9 @@ class TestClickhouse(Validator):
dialect = "clickhouse"
def test_clickhouse(self):
expr = quote_identifiers(self.parse_one("{start_date:String}"), dialect="clickhouse")
self.assertEqual(expr.sql("clickhouse"), "{start_date: String}")
for string_type_enum in ClickHouse.Generator.STRING_TYPE_MAPPING:
self.validate_identity(f"CAST(x AS {string_type_enum.value})", "CAST(x AS String)")

View file

@ -2326,6 +2326,9 @@ STORAGE_ALLOWED_LOCATIONS=('s3://mybucket1/path1/', 's3://mybucket2/path2/')""",
self.validate_identity(
"""COPY INTO @my_stage/result/data FROM (SELECT * FROM orderstiny) FILE_FORMAT = (TYPE='csv')"""
)
self.validate_identity(
"""COPY INTO MY_DATABASE.MY_SCHEMA.MY_TABLE FROM @MY_DATABASE.MY_SCHEMA.MY_STAGE/my_path FILE_FORMAT = (FORMAT_NAME=MY_DATABASE.MY_SCHEMA.MY_FILE_FORMAT)"""
)
self.validate_all(
"""COPY INTO 's3://example/data.csv'
FROM EXTRA.EXAMPLE.TABLE

View file

@ -180,13 +180,13 @@ class TestOptimizer(unittest.TestCase):
actual,
)
if string_to_bool(execute):
with self.subTest(f"(execute) {title}"):
df1 = self.conn.execute(
sqlglot.transpile(sql, read=dialect, write="duckdb")[0]
).df()
df2 = self.conn.execute(optimized.sql(dialect="duckdb")).df()
assert_frame_equal(df1, df2)
if string_to_bool(execute):
with self.subTest(f"(execute) {title}"):
df1 = self.conn.execute(
sqlglot.transpile(sql, read=dialect, write="duckdb")[0]
).df()
df2 = self.conn.execute(optimized.sql(dialect="duckdb")).df()
assert_frame_equal(df1, df2)
@patch("sqlglot.generator.logger")
def test_optimize(self, logger):
@ -659,6 +659,15 @@ SELECT :with,WITH :expressions,CTE :this,UNION :this,SELECT :expressions,1,:expr
"WITH data AS (SELECT 1 AS id) SELECT FUNC(data.id) AS id FROM data GROUP BY FUNC(data.id)",
)
sql = "SELECT x.a, max(x.b) as x FROM x AS x GROUP BY 1 HAVING x > 1"
self.assertEqual(
optimizer.qualify_columns.qualify_columns(
parse_one(sql, dialect="bigquery"),
schema=MappingSchema(schema=unused_schema, dialect="bigquery"),
).sql(),
"SELECT x.a AS a, MAX(x.b) AS x FROM x AS x GROUP BY 1 HAVING x > 1",
)
def test_optimize_joins(self):
self.check_file(
"optimize_joins",
@ -1425,6 +1434,16 @@ FROM READ_CSV('tests/fixtures/optimizer/tpc-h/nation.csv.gz', 'delimiter', '|')
"""
self.assertEqual(optimizer.optimize(sql).selects[0].type.this, exp.DataType.Type.VARCHAR)
def test_udtf_annotation(self):
table_udtf = parse_one(
"SELECT * FROM TABLE(GENERATOR(ROWCOUNT => 100000))",
read="snowflake",
)
self.assertEqual(
annotate_types(table_udtf, dialect="snowflake").sql("snowflake"),
"SELECT * FROM TABLE(GENERATOR(ROWCOUNT => 100000))",
)
def test_recursive_cte(self):
query = parse_one(
"""