1
0
Fork 0

Adding upstream version 18.2.0.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-02-13 20:56:33 +01:00
parent 9de781a59b
commit ab14e550ff
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
124 changed files with 60313 additions and 50346 deletions

View file

@ -15,7 +15,7 @@ from sqlglot.dialects.hive import Hive
from sqlglot.helper import seq_get
def _create_sql(self: Hive.Generator, e: exp.Create) -> str:
def _create_sql(self: Spark2.Generator, e: exp.Create) -> str:
kind = e.args["kind"]
properties = e.args.get("properties")
@ -31,17 +31,21 @@ def _create_sql(self: Hive.Generator, e: exp.Create) -> str:
return create_with_partitions_sql(self, e)
def _map_sql(self: Hive.Generator, expression: exp.Map) -> str:
keys = self.sql(expression.args["keys"])
values = self.sql(expression.args["values"])
return f"MAP_FROM_ARRAYS({keys}, {values})"
def _map_sql(self: Spark2.Generator, expression: exp.Map) -> str:
keys = expression.args.get("keys")
values = expression.args.get("values")
if not keys or not values:
return "MAP()"
return f"MAP_FROM_ARRAYS({self.sql(keys)}, {self.sql(values)})"
def _parse_as_cast(to_type: str) -> t.Callable[[t.List], exp.Expression]:
return lambda args: exp.Cast(this=seq_get(args, 0), to=exp.DataType.build(to_type))
def _str_to_date(self: Hive.Generator, expression: exp.StrToDate) -> str:
def _str_to_date(self: Spark2.Generator, expression: exp.StrToDate) -> str:
this = self.sql(expression, "this")
time_format = self.format_time(expression)
if time_format == Hive.DATE_FORMAT:
@ -49,7 +53,7 @@ def _str_to_date(self: Hive.Generator, expression: exp.StrToDate) -> str:
return f"TO_DATE({this}, {time_format})"
def _unix_to_time_sql(self: Hive.Generator, expression: exp.UnixToTime) -> str:
def _unix_to_time_sql(self: Spark2.Generator, expression: exp.UnixToTime) -> str:
scale = expression.args.get("scale")
timestamp = self.sql(expression, "this")
if scale is None:
@ -110,6 +114,13 @@ def _unqualify_pivot_columns(expression: exp.Expression) -> exp.Expression:
return expression
def _insert_sql(self: Spark2.Generator, expression: exp.Insert) -> str:
if expression.expression.args.get("with"):
expression = expression.copy()
expression.set("with", expression.expression.args.pop("with"))
return self.insert_sql(expression)
class Spark2(Hive):
class Parser(Hive.Parser):
FUNCTIONS = {
@ -169,10 +180,7 @@ class Spark2(Hive):
class Generator(Hive.Generator):
QUERY_HINTS = True
TYPE_MAPPING = {
**Hive.Generator.TYPE_MAPPING,
}
NVL2_SUPPORTED = True
PROPERTIES_LOCATION = {
**Hive.Generator.PROPERTIES_LOCATION,
@ -197,6 +205,7 @@ class Spark2(Hive):
exp.DayOfYear: rename_func("DAYOFYEAR"),
exp.FileFormatProperty: lambda self, e: f"USING {e.name.upper()}",
exp.From: transforms.preprocess([_unalias_pivot]),
exp.Insert: _insert_sql,
exp.LogicalAnd: rename_func("BOOL_AND"),
exp.LogicalOr: rename_func("BOOL_OR"),
exp.Map: _map_sql,