1
0
Fork 0

Merging upstream version 25.29.0.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-02-13 21:56:19 +01:00
parent de8c8a17d0
commit 1e53504dfc
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
97 changed files with 64720 additions and 61752 deletions

View file

@ -433,6 +433,9 @@ class Generator(metaclass=_Generator):
# Whether CONVERT_TIMEZONE() is supported; if not, it will be generated as exp.AtTimeZone
SUPPORTS_CONVERT_TIMEZONE = False
# Whether MEDIAN(expr) is supported; if not, it will be generated as PERCENTILE_CONT(expr, 0.5)
SUPPORTS_MEDIAN = True
# The name to generate for the JSONPath expression. If `None`, only `this` will be generated
PARSE_JSON_NAME: t.Optional[str] = "PARSE_JSON"
@ -2314,8 +2317,10 @@ class Generator(metaclass=_Generator):
step_sql = self.sql(expression, "step")
step_sql = f" STEP {step_sql}" if step_sql else ""
interpolated_values = [
f"{self.sql(named_expression, 'alias')} AS {self.sql(named_expression, 'this')}"
for named_expression in expression.args.get("interpolate") or []
f"{self.sql(e, 'alias')} AS {self.sql(e, 'this')}"
if isinstance(e, exp.Alias)
else self.sql(e, "this")
for e in expression.args.get("interpolate") or []
]
interpolate = (
f" INTERPOLATE ({', '.join(interpolated_values)})" if interpolated_values else ""
@ -4362,20 +4367,25 @@ class Generator(metaclass=_Generator):
def arrayagg_sql(self, expression: exp.ArrayAgg) -> str:
array_agg = self.function_fallback_sql(expression)
# Add a NULL FILTER on the column to mimic the results going from a dialect that excludes nulls
# on ARRAY_AGG (e.g Spark) to one that doesn't (e.g. DuckDB)
if self.dialect.ARRAY_AGG_INCLUDES_NULLS and expression.args.get("nulls_excluded"):
parent = expression.parent
if isinstance(parent, exp.Filter):
parent_cond = parent.expression.this
parent_cond.replace(parent_cond.and_(expression.this.is_(exp.null()).not_()))
else:
# DISTINCT is already present in the agg function, do not propagate it to FILTER as well
this = expression.this
this_sql = (
self.expressions(this)
if isinstance(this, exp.Distinct)
else self.sql(expression, "this")
)
array_agg = f"{array_agg} FILTER(WHERE {this_sql} IS NOT NULL)"
# Do not add the filter if the input is not a column (e.g. literal, struct etc)
if this.find(exp.Column):
# DISTINCT is already present in the agg function, do not propagate it to FILTER as well
this_sql = (
self.expressions(this)
if isinstance(this, exp.Distinct)
else self.sql(expression, "this")
)
array_agg = f"{array_agg} FILTER(WHERE {this_sql} IS NOT NULL)"
return array_agg
@ -4434,3 +4444,31 @@ class Generator(metaclass=_Generator):
@unsupported_args("format")
def todouble_sql(self, expression: exp.ToDouble) -> str:
return self.sql(exp.cast(expression.this, exp.DataType.Type.DOUBLE))
def string_sql(self, expression: exp.String) -> str:
this = expression.this
zone = expression.args.get("zone")
if zone:
# This is a BigQuery specific argument for STRING(<timestamp_expr>, <time_zone>)
# BigQuery stores timestamps internally as UTC, so ConvertTimezone is used with UTC
# set for source_tz to transpile the time conversion before the STRING cast
this = exp.ConvertTimezone(
source_tz=exp.Literal.string("UTC"), target_tz=zone, timestamp=this
)
return self.sql(exp.cast(this, exp.DataType.Type.VARCHAR))
def median_sql(self, expression: exp.Median):
if not self.SUPPORTS_MEDIAN:
return self.sql(
exp.PercentileCont(this=expression.this, expression=exp.Literal.number(0.5))
)
return self.function_fallback_sql(expression)
def overflowtruncatebehavior_sql(self, expression: exp.OverflowTruncateBehavior) -> str:
filler = self.sql(expression, "this")
filler = f" {filler}" if filler else ""
with_count = "WITH COUNT" if expression.args.get("with_count") else "WITHOUT COUNT"
return f"TRUNCATE{filler} {with_count}"