1
0
Fork 0

Adding upstream version 20.9.0.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-02-13 21:18:57 +01:00
parent 943dfc0887
commit 8a068da99c
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
144 changed files with 78309 additions and 59609 deletions

View file

@ -48,11 +48,8 @@ def _unix_to_time_sql(self: Spark2.Generator, expression: exp.UnixToTime) -> str
return f"TIMESTAMP_MILLIS({timestamp})"
if scale == exp.UnixToTime.MICROS:
return f"TIMESTAMP_MICROS({timestamp})"
if scale == exp.UnixToTime.NANOS:
return f"TIMESTAMP_SECONDS({timestamp} / 1000000000)"
self.unsupported(f"Unsupported scale for timestamp: {scale}.")
return ""
return f"TIMESTAMP_SECONDS({timestamp} / POW(10, {scale}))"
def _unalias_pivot(expression: exp.Expression) -> exp.Expression:
@ -93,12 +90,7 @@ def _unqualify_pivot_columns(expression: exp.Expression) -> exp.Expression:
SELECT * FROM tbl PIVOT(SUM(tbl.sales) FOR quarter IN ('Q1', 'Q1'))
"""
if isinstance(expression, exp.Pivot):
expression.args["field"].transform(
lambda node: exp.column(node.output_name, quoted=node.this.quoted)
if isinstance(node, exp.Column)
else node,
copy=False,
)
expression.set("field", transforms.unqualify_columns(expression.args["field"]))
return expression
@ -234,7 +226,7 @@ class Spark2(Hive):
def struct_sql(self, expression: exp.Struct) -> str:
args = []
for arg in expression.expressions:
if isinstance(arg, self.KEY_VALUE_DEFINITONS):
if isinstance(arg, self.KEY_VALUE_DEFINITIONS):
if isinstance(arg, exp.Bracket):
args.append(exp.alias_(arg.this, arg.expressions[0].name))
else: