Adding upstream version 21.0.1.
Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
parent
07f4660f31
commit
91f2cef5f0
115 changed files with 66603 additions and 60920 deletions
|
@ -192,6 +192,18 @@ def _to_date_sql(self: Hive.Generator, expression: exp.TsOrDsToDate) -> str:
|
|||
return f"TO_DATE({this})"
|
||||
|
||||
|
||||
def _parse_ignore_nulls(
|
||||
exp_class: t.Type[exp.Expression],
|
||||
) -> t.Callable[[t.List[exp.Expression]], exp.Expression]:
|
||||
def _parse(args: t.List[exp.Expression]) -> exp.Expression:
|
||||
this = exp_class(this=seq_get(args, 0))
|
||||
if seq_get(args, 1) == exp.true():
|
||||
return exp.IgnoreNulls(this=this)
|
||||
return this
|
||||
|
||||
return _parse
|
||||
|
||||
|
||||
class Hive(Dialect):
|
||||
ALIAS_POST_TABLESAMPLE = True
|
||||
IDENTIFIERS_CAN_START_WITH_DIGIT = True
|
||||
|
@ -298,8 +310,12 @@ class Hive(Dialect):
|
|||
expression=exp.TsOrDsToDate(this=seq_get(args, 1)),
|
||||
),
|
||||
"DAY": lambda args: exp.Day(this=exp.TsOrDsToDate(this=seq_get(args, 0))),
|
||||
"FIRST": _parse_ignore_nulls(exp.First),
|
||||
"FIRST_VALUE": _parse_ignore_nulls(exp.FirstValue),
|
||||
"FROM_UNIXTIME": format_time_lambda(exp.UnixToStr, "hive", True),
|
||||
"GET_JSON_OBJECT": exp.JSONExtractScalar.from_arg_list,
|
||||
"LAST": _parse_ignore_nulls(exp.Last),
|
||||
"LAST_VALUE": _parse_ignore_nulls(exp.LastValue),
|
||||
"LOCATE": locate_to_strposition,
|
||||
"MAP": parse_var_map,
|
||||
"MONTH": lambda args: exp.Month(this=exp.TsOrDsToDate.from_arg_list(args)),
|
||||
|
@ -429,6 +445,7 @@ class Hive(Dialect):
|
|||
EXTRACT_ALLOWS_QUOTES = False
|
||||
NVL2_SUPPORTED = False
|
||||
LAST_DAY_SUPPORTS_DATE_PART = False
|
||||
JSON_PATH_SINGLE_QUOTE_ESCAPE = True
|
||||
|
||||
EXPRESSIONS_WITHOUT_NESTED_CTES = {
|
||||
exp.Insert,
|
||||
|
@ -437,6 +454,13 @@ class Hive(Dialect):
|
|||
exp.Union,
|
||||
}
|
||||
|
||||
SUPPORTED_JSON_PATH_PARTS = {
|
||||
exp.JSONPathKey,
|
||||
exp.JSONPathRoot,
|
||||
exp.JSONPathSubscript,
|
||||
exp.JSONPathWildcard,
|
||||
}
|
||||
|
||||
TYPE_MAPPING = {
|
||||
**generator.Generator.TYPE_MAPPING,
|
||||
exp.DataType.Type.BIT: "BOOLEAN",
|
||||
|
@ -471,9 +495,12 @@ class Hive(Dialect):
|
|||
exp.DateDiff: _date_diff_sql,
|
||||
exp.DateStrToDate: datestrtodate_sql,
|
||||
exp.DateSub: _add_date_sql,
|
||||
exp.DateToDi: lambda self, e: f"CAST(DATE_FORMAT({self.sql(e, 'this')}, {Hive.DATEINT_FORMAT}) AS INT)",
|
||||
exp.DiToDate: lambda self, e: f"TO_DATE(CAST({self.sql(e, 'this')} AS STRING), {Hive.DATEINT_FORMAT})",
|
||||
exp.FileFormatProperty: lambda self, e: f"STORED AS {self.sql(e, 'this') if isinstance(e.this, exp.InputOutputFormat) else e.name.upper()}",
|
||||
exp.DateToDi: lambda self,
|
||||
e: f"CAST(DATE_FORMAT({self.sql(e, 'this')}, {Hive.DATEINT_FORMAT}) AS INT)",
|
||||
exp.DiToDate: lambda self,
|
||||
e: f"TO_DATE(CAST({self.sql(e, 'this')} AS STRING), {Hive.DATEINT_FORMAT})",
|
||||
exp.FileFormatProperty: lambda self,
|
||||
e: f"STORED AS {self.sql(e, 'this') if isinstance(e.this, exp.InputOutputFormat) else e.name.upper()}",
|
||||
exp.FromBase64: rename_func("UNBASE64"),
|
||||
exp.If: if_sql(),
|
||||
exp.ILike: no_ilike_sql,
|
||||
|
@ -502,7 +529,8 @@ class Hive(Dialect):
|
|||
exp.SafeDivide: no_safe_divide_sql,
|
||||
exp.SchemaCommentProperty: lambda self, e: self.naked_property(e),
|
||||
exp.ArrayUniqueAgg: rename_func("COLLECT_SET"),
|
||||
exp.Split: lambda self, e: f"SPLIT({self.sql(e, 'this')}, CONCAT('\\\\Q', {self.sql(e, 'expression')}))",
|
||||
exp.Split: lambda self,
|
||||
e: f"SPLIT({self.sql(e, 'this')}, CONCAT('\\\\Q', {self.sql(e, 'expression')}))",
|
||||
exp.StrPosition: strposition_to_locate_sql,
|
||||
exp.StrToDate: _str_to_date_sql,
|
||||
exp.StrToTime: _str_to_time_sql,
|
||||
|
@ -514,7 +542,8 @@ class Hive(Dialect):
|
|||
exp.TimeToStr: _time_to_str,
|
||||
exp.TimeToUnix: rename_func("UNIX_TIMESTAMP"),
|
||||
exp.ToBase64: rename_func("BASE64"),
|
||||
exp.TsOrDiToDi: lambda self, e: f"CAST(SUBSTR(REPLACE(CAST({self.sql(e, 'this')} AS STRING), '-', ''), 1, 8) AS INT)",
|
||||
exp.TsOrDiToDi: lambda self,
|
||||
e: f"CAST(SUBSTR(REPLACE(CAST({self.sql(e, 'this')} AS STRING), '-', ''), 1, 8) AS INT)",
|
||||
exp.TsOrDsAdd: _add_date_sql,
|
||||
exp.TsOrDsDiff: _date_diff_sql,
|
||||
exp.TsOrDsToDate: _to_date_sql,
|
||||
|
@ -528,8 +557,10 @@ class Hive(Dialect):
|
|||
exp.SerdeProperties: lambda self, e: self.properties(e, prefix="WITH SERDEPROPERTIES"),
|
||||
exp.NumberToStr: rename_func("FORMAT_NUMBER"),
|
||||
exp.National: lambda self, e: self.national_sql(e, prefix=""),
|
||||
exp.ClusteredColumnConstraint: lambda self, e: f"({self.expressions(e, 'this', indent=False)})",
|
||||
exp.NonClusteredColumnConstraint: lambda self, e: f"({self.expressions(e, 'this', indent=False)})",
|
||||
exp.ClusteredColumnConstraint: lambda self,
|
||||
e: f"({self.expressions(e, 'this', indent=False)})",
|
||||
exp.NonClusteredColumnConstraint: lambda self,
|
||||
e: f"({self.expressions(e, 'this', indent=False)})",
|
||||
exp.NotForReplicationColumnConstraint: lambda self, e: "",
|
||||
exp.OnProperty: lambda self, e: "",
|
||||
exp.PrimaryKeyColumnConstraint: lambda self, e: "PRIMARY KEY",
|
||||
|
@ -543,6 +574,13 @@ class Hive(Dialect):
|
|||
exp.WithDataProperty: exp.Properties.Location.UNSUPPORTED,
|
||||
}
|
||||
|
||||
def _jsonpathkey_sql(self, expression: exp.JSONPathKey) -> str:
|
||||
if isinstance(expression.this, exp.JSONPathWildcard):
|
||||
self.unsupported("Unsupported wildcard in JSONPathKey expression")
|
||||
return ""
|
||||
|
||||
return super()._jsonpathkey_sql(expression)
|
||||
|
||||
def temporary_storage_provider(self, expression: exp.Create) -> exp.Create:
|
||||
# Hive has no temporary storage provider (there are hive settings though)
|
||||
return expression
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue