Adding upstream version 25.30.0.
Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
parent
c61927f460
commit
44a4f87ffd
69 changed files with 48139 additions and 46098 deletions
|
@ -235,6 +235,61 @@ def _unnest_generate_date_array(expression: exp.Expression) -> exp.Expression:
|
|||
return expression
|
||||
|
||||
|
||||
def _build_regexp_extract(expr_type: t.Type[E]) -> t.Callable[[t.List], E]:
|
||||
def _builder(args: t.List) -> E:
|
||||
return expr_type(
|
||||
this=seq_get(args, 0),
|
||||
expression=seq_get(args, 1),
|
||||
position=seq_get(args, 2),
|
||||
occurrence=seq_get(args, 3),
|
||||
parameters=seq_get(args, 4),
|
||||
group=seq_get(args, 5) or exp.Literal.number(0),
|
||||
)
|
||||
|
||||
return _builder
|
||||
|
||||
|
||||
def _regexpextract_sql(self, expression: exp.RegexpExtract | exp.RegexpExtractAll) -> str:
|
||||
# Other dialects don't support all of the following parameters, so we need to
|
||||
# generate default values as necessary to ensure the transpilation is correct
|
||||
group = expression.args.get("group")
|
||||
|
||||
# To avoid generating all these default values, we set group to None if
|
||||
# it's 0 (also default value) which doesn't trigger the following chain
|
||||
if group and group.name == "0":
|
||||
group = None
|
||||
|
||||
parameters = expression.args.get("parameters") or (group and exp.Literal.string("c"))
|
||||
occurrence = expression.args.get("occurrence") or (parameters and exp.Literal.number(1))
|
||||
position = expression.args.get("position") or (occurrence and exp.Literal.number(1))
|
||||
|
||||
return self.func(
|
||||
"REGEXP_SUBSTR" if isinstance(expression, exp.RegexpExtract) else "REGEXP_EXTRACT_ALL",
|
||||
expression.this,
|
||||
expression.expression,
|
||||
position,
|
||||
occurrence,
|
||||
parameters,
|
||||
group,
|
||||
)
|
||||
|
||||
|
||||
def _json_extract_value_array_sql(
|
||||
self: Snowflake.Generator, expression: exp.JSONValueArray | exp.JSONExtractArray
|
||||
) -> str:
|
||||
json_extract = exp.JSONExtract(this=expression.this, expression=expression.expression)
|
||||
ident = exp.to_identifier("x")
|
||||
|
||||
if isinstance(expression, exp.JSONValueArray):
|
||||
this: exp.Expression = exp.cast(ident, to=exp.DataType.Type.VARCHAR)
|
||||
else:
|
||||
this = exp.ParseJSON(this=f"TO_JSON({ident})")
|
||||
|
||||
transform_lambda = exp.Lambda(expressions=[ident], this=this)
|
||||
|
||||
return self.func("TRANSFORM", json_extract, transform_lambda)
|
||||
|
||||
|
||||
class Snowflake(Dialect):
|
||||
# https://docs.snowflake.com/en/sql-reference/identifiers-syntax
|
||||
NORMALIZATION_STRATEGY = NormalizationStrategy.UPPERCASE
|
||||
|
@ -322,6 +377,9 @@ class Snowflake(Dialect):
|
|||
"DATEADD": _build_date_time_add(exp.DateAdd),
|
||||
"DATEDIFF": _build_datediff,
|
||||
"DIV0": _build_if_from_div0,
|
||||
"EDITDISTANCE": lambda args: exp.Levenshtein(
|
||||
this=seq_get(args, 0), expression=seq_get(args, 1), max_dist=seq_get(args, 2)
|
||||
),
|
||||
"FLATTEN": exp.Explode.from_arg_list,
|
||||
"GET_PATH": lambda args, dialect: exp.JSONExtract(
|
||||
this=seq_get(args, 0), expression=dialect.to_json_path(seq_get(args, 1))
|
||||
|
@ -335,15 +393,10 @@ class Snowflake(Dialect):
|
|||
"LISTAGG": exp.GroupConcat.from_arg_list,
|
||||
"NULLIFZERO": _build_if_from_nullifzero,
|
||||
"OBJECT_CONSTRUCT": _build_object_construct,
|
||||
"REGEXP_EXTRACT_ALL": _build_regexp_extract(exp.RegexpExtractAll),
|
||||
"REGEXP_REPLACE": _build_regexp_replace,
|
||||
"REGEXP_SUBSTR": lambda args: exp.RegexpExtract(
|
||||
this=seq_get(args, 0),
|
||||
expression=seq_get(args, 1),
|
||||
position=seq_get(args, 2),
|
||||
occurrence=seq_get(args, 3),
|
||||
parameters=seq_get(args, 4),
|
||||
group=seq_get(args, 5) or exp.Literal.number(0),
|
||||
),
|
||||
"REGEXP_SUBSTR": _build_regexp_extract(exp.RegexpExtract),
|
||||
"REGEXP_SUBSTR_ALL": _build_regexp_extract(exp.RegexpExtractAll),
|
||||
"RLIKE": exp.RegexpLike.from_arg_list,
|
||||
"SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)),
|
||||
"TIMEADD": _build_date_time_add(exp.TimeAdd),
|
||||
|
@ -770,6 +823,7 @@ class Snowflake(Dialect):
|
|||
SUPPORTS_CONVERT_TIMEZONE = True
|
||||
EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE = False
|
||||
SUPPORTS_MEDIAN = True
|
||||
ARRAY_SIZE_NAME = "ARRAY_SIZE"
|
||||
|
||||
TRANSFORMS = {
|
||||
**generator.Generator.TRANSFORMS,
|
||||
|
@ -802,11 +856,13 @@ class Snowflake(Dialect):
|
|||
),
|
||||
exp.GroupConcat: rename_func("LISTAGG"),
|
||||
exp.If: if_sql(name="IFF", false_value="NULL"),
|
||||
exp.JSONExtractArray: _json_extract_value_array_sql,
|
||||
exp.JSONExtractScalar: lambda self, e: self.func(
|
||||
"JSON_EXTRACT_PATH_TEXT", e.this, e.expression
|
||||
),
|
||||
exp.JSONObject: lambda self, e: self.func("OBJECT_CONSTRUCT_KEEP_NULL", *e.expressions),
|
||||
exp.JSONPathRoot: lambda *_: "",
|
||||
exp.JSONValueArray: _json_extract_value_array_sql,
|
||||
exp.LogicalAnd: rename_func("BOOLAND_AGG"),
|
||||
exp.LogicalOr: rename_func("BOOLOR_AGG"),
|
||||
exp.Map: lambda self, e: var_map_sql(self, e, "OBJECT_CONSTRUCT"),
|
||||
|
@ -823,6 +879,8 @@ class Snowflake(Dialect):
|
|||
[transforms.add_within_group_for_percentiles]
|
||||
),
|
||||
exp.Pivot: transforms.preprocess([_unqualify_unpivot_columns]),
|
||||
exp.RegexpExtract: _regexpextract_sql,
|
||||
exp.RegexpExtractAll: _regexpextract_sql,
|
||||
exp.RegexpILike: _regexpilike_sql,
|
||||
exp.Rand: rename_func("RANDOM"),
|
||||
exp.Select: transforms.preprocess(
|
||||
|
@ -867,6 +925,9 @@ class Snowflake(Dialect):
|
|||
exp.VarMap: lambda self, e: var_map_sql(self, e, "OBJECT_CONSTRUCT"),
|
||||
exp.WeekOfYear: rename_func("WEEKOFYEAR"),
|
||||
exp.Xor: rename_func("BOOLXOR"),
|
||||
exp.Levenshtein: unsupported_args("ins_cost", "del_cost", "sub_cost")(
|
||||
rename_func("EDITDISTANCE")
|
||||
),
|
||||
}
|
||||
|
||||
SUPPORTED_JSON_PATH_PARTS = {
|
||||
|
@ -1009,30 +1070,6 @@ class Snowflake(Dialect):
|
|||
|
||||
return f"SHOW {terse}{expression.name}{history}{like}{scope_kind}{scope}{starts_with}{limit}{from_}"
|
||||
|
||||
def regexpextract_sql(self, expression: exp.RegexpExtract) -> str:
|
||||
# Other dialects don't support all of the following parameters, so we need to
|
||||
# generate default values as necessary to ensure the transpilation is correct
|
||||
group = expression.args.get("group")
|
||||
|
||||
# To avoid generating all these default values, we set group to None if
|
||||
# it's 0 (also default value) which doesn't trigger the following chain
|
||||
if group and group.name == "0":
|
||||
group = None
|
||||
|
||||
parameters = expression.args.get("parameters") or (group and exp.Literal.string("c"))
|
||||
occurrence = expression.args.get("occurrence") or (parameters and exp.Literal.number(1))
|
||||
position = expression.args.get("position") or (occurrence and exp.Literal.number(1))
|
||||
|
||||
return self.func(
|
||||
"REGEXP_SUBSTR",
|
||||
expression.this,
|
||||
expression.expression,
|
||||
position,
|
||||
occurrence,
|
||||
parameters,
|
||||
group,
|
||||
)
|
||||
|
||||
def describe_sql(self, expression: exp.Describe) -> str:
|
||||
# Default to table if kind is unknown
|
||||
kind_value = expression.args.get("kind") or "TABLE"
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue