Adding upstream version 15.0.0.
Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
parent
70d5d3451a
commit
bb75596aa9
167 changed files with 58268 additions and 51337 deletions
|
@ -147,13 +147,6 @@ def _to_date_sql(self: generator.Generator, expression: exp.TsOrDsToDate) -> str
|
|||
return f"TO_DATE({this})"
|
||||
|
||||
|
||||
def _index_sql(self: generator.Generator, expression: exp.Index) -> str:
|
||||
this = self.sql(expression, "this")
|
||||
table = self.sql(expression, "table")
|
||||
columns = self.sql(expression, "columns")
|
||||
return f"{this} ON TABLE {table} {columns}"
|
||||
|
||||
|
||||
class Hive(Dialect):
|
||||
alias_post_tablesample = True
|
||||
|
||||
|
@ -225,8 +218,7 @@ class Hive(Dialect):
|
|||
STRICT_CAST = False
|
||||
|
||||
FUNCTIONS = {
|
||||
**parser.Parser.FUNCTIONS, # type: ignore
|
||||
"APPROX_COUNT_DISTINCT": exp.ApproxDistinct.from_arg_list,
|
||||
**parser.Parser.FUNCTIONS,
|
||||
"BASE64": exp.ToBase64.from_arg_list,
|
||||
"COLLECT_LIST": exp.ArrayAgg.from_arg_list,
|
||||
"DATE_ADD": lambda args: exp.TsOrDsAdd(
|
||||
|
@ -271,21 +263,29 @@ class Hive(Dialect):
|
|||
}
|
||||
|
||||
PROPERTY_PARSERS = {
|
||||
**parser.Parser.PROPERTY_PARSERS, # type: ignore
|
||||
**parser.Parser.PROPERTY_PARSERS,
|
||||
"WITH SERDEPROPERTIES": lambda self: exp.SerdeProperties(
|
||||
expressions=self._parse_wrapped_csv(self._parse_property)
|
||||
),
|
||||
}
|
||||
|
||||
QUERY_MODIFIER_PARSERS = {
|
||||
**parser.Parser.QUERY_MODIFIER_PARSERS,
|
||||
"distribute": lambda self: self._parse_sort(exp.Distribute, "DISTRIBUTE", "BY"),
|
||||
"sort": lambda self: self._parse_sort(exp.Sort, "SORT", "BY"),
|
||||
"cluster": lambda self: self._parse_sort(exp.Cluster, "CLUSTER", "BY"),
|
||||
}
|
||||
|
||||
class Generator(generator.Generator):
|
||||
LIMIT_FETCH = "LIMIT"
|
||||
TABLESAMPLE_WITH_METHOD = False
|
||||
TABLESAMPLE_SIZE_IS_PERCENT = True
|
||||
JOIN_HINTS = False
|
||||
TABLE_HINTS = False
|
||||
INDEX_ON = "ON TABLE"
|
||||
|
||||
TYPE_MAPPING = {
|
||||
**generator.Generator.TYPE_MAPPING, # type: ignore
|
||||
**generator.Generator.TYPE_MAPPING,
|
||||
exp.DataType.Type.TEXT: "STRING",
|
||||
exp.DataType.Type.DATETIME: "TIMESTAMP",
|
||||
exp.DataType.Type.VARBINARY: "BINARY",
|
||||
|
@ -294,7 +294,7 @@ class Hive(Dialect):
|
|||
}
|
||||
|
||||
TRANSFORMS = {
|
||||
**generator.Generator.TRANSFORMS, # type: ignore
|
||||
**generator.Generator.TRANSFORMS,
|
||||
exp.Group: transforms.preprocess([transforms.unalias_group]),
|
||||
exp.Select: transforms.preprocess(
|
||||
[
|
||||
|
@ -319,7 +319,6 @@ class Hive(Dialect):
|
|||
exp.FileFormatProperty: lambda self, e: f"STORED AS {self.sql(e, 'this') if isinstance(e.this, exp.InputOutputFormat) else e.name.upper()}",
|
||||
exp.FromBase64: rename_func("UNBASE64"),
|
||||
exp.If: if_sql,
|
||||
exp.Index: _index_sql,
|
||||
exp.ILike: no_ilike_sql,
|
||||
exp.JSONExtract: rename_func("GET_JSON_OBJECT"),
|
||||
exp.JSONExtractScalar: rename_func("GET_JSON_OBJECT"),
|
||||
|
@ -342,7 +341,6 @@ class Hive(Dialect):
|
|||
exp.StrToTime: _str_to_time_sql,
|
||||
exp.StrToUnix: _str_to_unix_sql,
|
||||
exp.StructExtract: struct_extract_sql,
|
||||
exp.TableFormatProperty: lambda self, e: f"USING {self.sql(e, 'this')}",
|
||||
exp.TimeStrToDate: rename_func("TO_DATE"),
|
||||
exp.TimeStrToTime: timestrtotime_sql,
|
||||
exp.TimeStrToUnix: rename_func("UNIX_TIMESTAMP"),
|
||||
|
@ -363,14 +361,13 @@ class Hive(Dialect):
|
|||
exp.SerdeProperties: lambda self, e: self.properties(e, prefix="WITH SERDEPROPERTIES"),
|
||||
exp.NumberToStr: rename_func("FORMAT_NUMBER"),
|
||||
exp.LastDateOfMonth: rename_func("LAST_DAY"),
|
||||
exp.National: lambda self, e: self.sql(e, "this"),
|
||||
exp.National: lambda self, e: self.national_sql(e, prefix=""),
|
||||
}
|
||||
|
||||
PROPERTIES_LOCATION = {
|
||||
**generator.Generator.PROPERTIES_LOCATION, # type: ignore
|
||||
**generator.Generator.PROPERTIES_LOCATION,
|
||||
exp.FileFormatProperty: exp.Properties.Location.POST_SCHEMA,
|
||||
exp.PartitionedByProperty: exp.Properties.Location.POST_SCHEMA,
|
||||
exp.TableFormatProperty: exp.Properties.Location.POST_SCHEMA,
|
||||
exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED,
|
||||
}
|
||||
|
||||
|
@ -396,3 +393,10 @@ class Hive(Dialect):
|
|||
expression = exp.DataType.build(expression.this)
|
||||
|
||||
return super().datatype_sql(expression)
|
||||
|
||||
def after_having_modifiers(self, expression: exp.Expression) -> t.List[str]:
|
||||
return super().after_having_modifiers(expression) + [
|
||||
self.sql(expression, "distribute"),
|
||||
self.sql(expression, "sort"),
|
||||
self.sql(expression, "cluster"),
|
||||
]
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue