Merging upstream version 25.16.1.
Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
parent
7688e2bdf8
commit
bad79d1f7c
110 changed files with 75353 additions and 68092 deletions
|
@ -92,6 +92,7 @@ class Generator(metaclass=_Generator):
|
|||
exp.DateFormatColumnConstraint: lambda self, e: f"FORMAT {self.sql(e, 'this')}",
|
||||
exp.DefaultColumnConstraint: lambda self, e: f"DEFAULT {self.sql(e, 'this')}",
|
||||
exp.DynamicProperty: lambda *_: "DYNAMIC",
|
||||
exp.EmptyProperty: lambda *_: "EMPTY",
|
||||
exp.EncodeColumnConstraint: lambda self, e: f"ENCODE {self.sql(e, 'this')}",
|
||||
exp.EphemeralColumnConstraint: lambda self,
|
||||
e: f"EPHEMERAL{(' ' + self.sql(e, 'this')) if e.this else ''}",
|
||||
|
@ -117,8 +118,10 @@ class Generator(metaclass=_Generator):
|
|||
e: f"ON COMMIT {'DELETE' if e.args.get('delete') else 'PRESERVE'} ROWS",
|
||||
exp.OnProperty: lambda self, e: f"ON {self.sql(e, 'this')}",
|
||||
exp.OnUpdateColumnConstraint: lambda self, e: f"ON UPDATE {self.sql(e, 'this')}",
|
||||
exp.Operator: lambda self, e: self.binary(e, ""), # The operator is produced in `binary`
|
||||
exp.OutputModelProperty: lambda self, e: f"OUTPUT{self.sql(e, 'this')}",
|
||||
exp.PathColumnConstraint: lambda self, e: f"PATH {self.sql(e, 'this')}",
|
||||
exp.PivotAny: lambda self, e: f"ANY{self.sql(e, 'this')}",
|
||||
exp.ProjectionPolicyColumnConstraint: lambda self,
|
||||
e: f"PROJECTION POLICY {self.sql(e, 'this')}",
|
||||
exp.RemoteWithConnectionModelProperty: lambda self,
|
||||
|
@ -136,6 +139,8 @@ class Generator(metaclass=_Generator):
|
|||
exp.SqlSecurityProperty: lambda _,
|
||||
e: f"SQL SECURITY {'DEFINER' if e.args.get('definer') else 'INVOKER'}",
|
||||
exp.StabilityProperty: lambda _, e: e.name,
|
||||
exp.Stream: lambda self, e: f"STREAM {self.sql(e, 'this')}",
|
||||
exp.StreamingTableProperty: lambda *_: "STREAMING",
|
||||
exp.StrictProperty: lambda *_: "STRICT",
|
||||
exp.TemporaryProperty: lambda *_: "TEMPORARY",
|
||||
exp.TagColumnConstraint: lambda self, e: f"TAG ({self.expressions(e, flat=True)})",
|
||||
|
@ -371,6 +376,18 @@ class Generator(metaclass=_Generator):
|
|||
# Whether the text pattern/fill (3rd) parameter of RPAD()/LPAD() is optional (defaults to space)
|
||||
PAD_FILL_PATTERN_IS_REQUIRED = False
|
||||
|
||||
# Whether a projection can explode into multiple rows, e.g. by unnesting an array.
|
||||
SUPPORTS_EXPLODING_PROJECTIONS = True
|
||||
|
||||
# Whether ARRAY_CONCAT can be generated with varlen args or if it should be reduced to 2-arg version
|
||||
ARRAY_CONCAT_IS_VAR_LEN = True
|
||||
|
||||
# Whether CONVERT_TIMEZONE() is supported; if not, it will be generated as exp.AtTimeZone
|
||||
SUPPORTS_CONVERT_TIMEZONE = False
|
||||
|
||||
# Whether nullable types can be constructed, e.g. `Nullable(Int64)`
|
||||
SUPPORTS_NULLABLE_TYPES = True
|
||||
|
||||
# The name to generate for the JSONPath expression. If `None`, only `this` will be generated
|
||||
PARSE_JSON_NAME: t.Optional[str] = "PARSE_JSON"
|
||||
|
||||
|
@ -439,6 +456,7 @@ class Generator(metaclass=_Generator):
|
|||
exp.DynamicProperty: exp.Properties.Location.POST_CREATE,
|
||||
exp.DistKeyProperty: exp.Properties.Location.POST_SCHEMA,
|
||||
exp.DistStyleProperty: exp.Properties.Location.POST_SCHEMA,
|
||||
exp.EmptyProperty: exp.Properties.Location.POST_SCHEMA,
|
||||
exp.EngineProperty: exp.Properties.Location.POST_SCHEMA,
|
||||
exp.ExecuteAsProperty: exp.Properties.Location.POST_SCHEMA,
|
||||
exp.ExternalProperty: exp.Properties.Location.POST_CREATE,
|
||||
|
@ -488,6 +506,7 @@ class Generator(metaclass=_Generator):
|
|||
exp.SqlReadWriteProperty: exp.Properties.Location.POST_SCHEMA,
|
||||
exp.SqlSecurityProperty: exp.Properties.Location.POST_CREATE,
|
||||
exp.StabilityProperty: exp.Properties.Location.POST_SCHEMA,
|
||||
exp.StreamingTableProperty: exp.Properties.Location.POST_CREATE,
|
||||
exp.StrictProperty: exp.Properties.Location.POST_SCHEMA,
|
||||
exp.TemporaryProperty: exp.Properties.Location.POST_CREATE,
|
||||
exp.ToTableProperty: exp.Properties.Location.POST_SCHEMA,
|
||||
|
@ -962,6 +981,7 @@ class Generator(metaclass=_Generator):
|
|||
|
||||
def create_sql(self, expression: exp.Create) -> str:
|
||||
kind = self.sql(expression, "kind")
|
||||
kind = self.dialect.INVERSE_CREATABLE_KIND_MAPPING.get(kind) or kind
|
||||
properties = expression.args.get("properties")
|
||||
properties_locs = self.locate_properties(properties) if properties else defaultdict()
|
||||
|
||||
|
@ -1018,6 +1038,7 @@ class Generator(metaclass=_Generator):
|
|||
index_sql = indexes + postindex_props_sql
|
||||
|
||||
replace = " OR REPLACE" if expression.args.get("replace") else ""
|
||||
refresh = " OR REFRESH" if expression.args.get("refresh") else ""
|
||||
unique = " UNIQUE" if expression.args.get("unique") else ""
|
||||
|
||||
clustered = expression.args.get("clustered")
|
||||
|
@ -1037,7 +1058,7 @@ class Generator(metaclass=_Generator):
|
|||
wrapped=False,
|
||||
)
|
||||
|
||||
modifiers = "".join((clustered_sql, replace, unique, postcreate_props_sql))
|
||||
modifiers = "".join((clustered_sql, replace, refresh, unique, postcreate_props_sql))
|
||||
|
||||
postexpression_props_sql = ""
|
||||
if properties_locs.get(exp.Properties.Location.POST_EXPRESSION):
|
||||
|
@ -1096,7 +1117,9 @@ class Generator(metaclass=_Generator):
|
|||
def describe_sql(self, expression: exp.Describe) -> str:
|
||||
style = expression.args.get("style")
|
||||
style = f" {style}" if style else ""
|
||||
return f"DESCRIBE{style} {self.sql(expression, 'this')}"
|
||||
partition = self.sql(expression, "partition")
|
||||
partition = f" {partition}" if partition else ""
|
||||
return f"DESCRIBE{style} {self.sql(expression, 'this')}{partition}"
|
||||
|
||||
def heredoc_sql(self, expression: exp.Heredoc) -> str:
|
||||
tag = self.sql(expression, "tag")
|
||||
|
@ -1195,20 +1218,21 @@ class Generator(metaclass=_Generator):
|
|||
return f"{this}{specifier}"
|
||||
|
||||
def datatype_sql(self, expression: exp.DataType) -> str:
|
||||
type_value = expression.this
|
||||
nested = ""
|
||||
values = ""
|
||||
interior = self.expressions(expression, flat=True)
|
||||
|
||||
type_value = expression.this
|
||||
if type_value == exp.DataType.Type.USERDEFINED and expression.args.get("kind"):
|
||||
type_sql = self.sql(expression, "kind")
|
||||
else:
|
||||
elif type_value != exp.DataType.Type.NULLABLE or self.SUPPORTS_NULLABLE_TYPES:
|
||||
type_sql = (
|
||||
self.TYPE_MAPPING.get(type_value, type_value.value)
|
||||
if isinstance(type_value, exp.DataType.Type)
|
||||
else type_value
|
||||
)
|
||||
|
||||
nested = ""
|
||||
interior = self.expressions(expression, flat=True)
|
||||
values = ""
|
||||
else:
|
||||
return interior
|
||||
|
||||
if interior:
|
||||
if expression.args.get("nested"):
|
||||
|
@ -1258,6 +1282,7 @@ class Generator(metaclass=_Generator):
|
|||
expressions = self.expressions(expression, flat=True)
|
||||
expressions = f" ({expressions})" if expressions else ""
|
||||
kind = expression.args["kind"]
|
||||
kind = self.dialect.INVERSE_CREATABLE_KIND_MAPPING.get(kind) or kind
|
||||
exists_sql = " IF EXISTS " if expression.args.get("exists") else " "
|
||||
on_cluster = self.sql(expression, "cluster")
|
||||
on_cluster = f" {on_cluster}" if on_cluster else ""
|
||||
|
@ -1277,7 +1302,7 @@ class Generator(metaclass=_Generator):
|
|||
def fetch_sql(self, expression: exp.Fetch) -> str:
|
||||
direction = expression.args.get("direction")
|
||||
direction = f" {direction}" if direction else ""
|
||||
count = expression.args.get("count")
|
||||
count = self.sql(expression, "count")
|
||||
count = f" {count}" if count else ""
|
||||
if expression.args.get("percent"):
|
||||
count = f"{count} PERCENT"
|
||||
|
@ -1639,7 +1664,12 @@ class Generator(metaclass=_Generator):
|
|||
else:
|
||||
expression_sql = f"{returning}{expression_sql}{on_conflict}"
|
||||
|
||||
sql = f"INSERT{hint}{alternative}{ignore}{this}{stored}{by_name}{exists}{where}{expression_sql}"
|
||||
partition_by = self.sql(expression, "partition")
|
||||
partition_by = f" {partition_by}" if partition_by else ""
|
||||
settings = self.sql(expression, "settings")
|
||||
settings = f" {settings}" if settings else ""
|
||||
|
||||
sql = f"INSERT{hint}{alternative}{ignore}{this}{stored}{by_name}{exists}{partition_by}{settings}{where}{expression_sql}"
|
||||
return self.prepend_ctes(expression, sql)
|
||||
|
||||
def intersect_sql(self, expression: exp.Intersect) -> str:
|
||||
|
@ -1824,13 +1854,20 @@ class Generator(metaclass=_Generator):
|
|||
alias = self.sql(expression, "alias")
|
||||
alias = f" AS {alias}" if alias else ""
|
||||
direction = self.seg("UNPIVOT" if expression.unpivot else "PIVOT")
|
||||
|
||||
field = self.sql(expression, "field")
|
||||
if field and isinstance(expression.args.get("field"), exp.PivotAny):
|
||||
field = f"IN ({field})"
|
||||
|
||||
include_nulls = expression.args.get("include_nulls")
|
||||
if include_nulls is not None:
|
||||
nulls = " INCLUDE NULLS " if include_nulls else " EXCLUDE NULLS "
|
||||
else:
|
||||
nulls = ""
|
||||
return f"{direction}{nulls}({expressions} FOR {field}){alias}"
|
||||
|
||||
default_on_null = self.sql(expression, "default_on_null")
|
||||
default_on_null = f" DEFAULT ON NULL ({default_on_null})" if default_on_null else ""
|
||||
return f"{direction}{nulls}({expressions} FOR {field}{default_on_null}){alias}"
|
||||
|
||||
def version_sql(self, expression: exp.Version) -> str:
|
||||
this = f"FOR {expression.name}"
|
||||
|
@ -2148,15 +2185,7 @@ class Generator(metaclass=_Generator):
|
|||
this = self.sql(expression, "this")
|
||||
this = f"{this} " if this else this
|
||||
siblings = "SIBLINGS " if expression.args.get("siblings") else ""
|
||||
order = self.op_expressions(f"{this}ORDER {siblings}BY", expression, flat=this or flat) # type: ignore
|
||||
interpolated_values = [
|
||||
f"{self.sql(named_expression, 'alias')} AS {self.sql(named_expression, 'this')}"
|
||||
for named_expression in expression.args.get("interpolate") or []
|
||||
]
|
||||
interpolate = (
|
||||
f" INTERPOLATE ({', '.join(interpolated_values)})" if interpolated_values else ""
|
||||
)
|
||||
return f"{order}{interpolate}"
|
||||
return self.op_expressions(f"{this}ORDER {siblings}BY", expression, flat=this or flat) # type: ignore
|
||||
|
||||
def withfill_sql(self, expression: exp.WithFill) -> str:
|
||||
from_sql = self.sql(expression, "from")
|
||||
|
@ -2165,7 +2194,14 @@ class Generator(metaclass=_Generator):
|
|||
to_sql = f" TO {to_sql}" if to_sql else ""
|
||||
step_sql = self.sql(expression, "step")
|
||||
step_sql = f" STEP {step_sql}" if step_sql else ""
|
||||
return f"WITH FILL{from_sql}{to_sql}{step_sql}"
|
||||
interpolated_values = [
|
||||
f"{self.sql(named_expression, 'alias')} AS {self.sql(named_expression, 'this')}"
|
||||
for named_expression in expression.args.get("interpolate") or []
|
||||
]
|
||||
interpolate = (
|
||||
f" INTERPOLATE ({', '.join(interpolated_values)})" if interpolated_values else ""
|
||||
)
|
||||
return f"WITH FILL{from_sql}{to_sql}{step_sql}{interpolate}"
|
||||
|
||||
def cluster_sql(self, expression: exp.Cluster) -> str:
|
||||
return self.op_expressions("CLUSTER BY", expression)
|
||||
|
@ -2875,11 +2911,13 @@ class Generator(metaclass=_Generator):
|
|||
|
||||
def pivotalias_sql(self, expression: exp.PivotAlias) -> str:
|
||||
alias = expression.args["alias"]
|
||||
|
||||
identifier_alias = isinstance(alias, exp.Identifier)
|
||||
literal_alias = isinstance(alias, exp.Literal)
|
||||
|
||||
if identifier_alias and not self.UNPIVOT_ALIASES_ARE_IDENTIFIERS:
|
||||
alias.replace(exp.Literal.string(alias.output_name))
|
||||
elif not identifier_alias and self.UNPIVOT_ALIASES_ARE_IDENTIFIERS:
|
||||
elif not identifier_alias and literal_alias and self.UNPIVOT_ALIASES_ARE_IDENTIFIERS:
|
||||
alias.replace(exp.to_identifier(alias.output_name))
|
||||
|
||||
return self.alias_sql(expression)
|
||||
|
@ -3103,7 +3141,7 @@ class Generator(metaclass=_Generator):
|
|||
exprs = self.expressions(expression, flat=True)
|
||||
return f"SET {exprs}"
|
||||
|
||||
def altertable_sql(self, expression: exp.AlterTable) -> str:
|
||||
def alter_sql(self, expression: exp.Alter) -> str:
|
||||
actions = expression.args["actions"]
|
||||
|
||||
if isinstance(actions[0], exp.ColumnDef):
|
||||
|
@ -3112,6 +3150,8 @@ class Generator(metaclass=_Generator):
|
|||
actions = self.expressions(expression, key="actions", prefix="ADD COLUMNS ")
|
||||
elif isinstance(actions[0], exp.Delete):
|
||||
actions = self.expressions(expression, key="actions", flat=True)
|
||||
elif isinstance(actions[0], exp.Query):
|
||||
actions = "AS " + self.expressions(expression, key="actions")
|
||||
else:
|
||||
actions = self.expressions(expression, key="actions", flat=True)
|
||||
|
||||
|
@ -3121,9 +3161,10 @@ class Generator(metaclass=_Generator):
|
|||
only = " ONLY" if expression.args.get("only") else ""
|
||||
options = self.expressions(expression, key="options")
|
||||
options = f", {options}" if options else ""
|
||||
return f"ALTER TABLE{exists}{only} {self.sql(expression, 'this')}{on_cluster} {actions}{options}"
|
||||
kind = self.sql(expression, "kind")
|
||||
return f"ALTER {kind}{exists}{only} {self.sql(expression, 'this')}{on_cluster} {actions}{options}"
|
||||
|
||||
def add_column_sql(self, expression: exp.AlterTable) -> str:
|
||||
def add_column_sql(self, expression: exp.Alter) -> str:
|
||||
if self.ALTER_TABLE_INCLUDE_COLUMN_KEYWORD:
|
||||
return self.expressions(
|
||||
expression,
|
||||
|
@ -3312,8 +3353,25 @@ class Generator(metaclass=_Generator):
|
|||
return f"USE{kind}{this}"
|
||||
|
||||
def binary(self, expression: exp.Binary, op: str) -> str:
|
||||
op = self.maybe_comment(op, comments=expression.comments)
|
||||
return f"{self.sql(expression, 'this')} {op} {self.sql(expression, 'expression')}"
|
||||
sqls: t.List[str] = []
|
||||
stack: t.List[t.Union[str, exp.Expression]] = [expression]
|
||||
binary_type = type(expression)
|
||||
|
||||
while stack:
|
||||
node = stack.pop()
|
||||
|
||||
if type(node) is binary_type:
|
||||
op_func = node.args.get("operator")
|
||||
if op_func:
|
||||
op = f"OPERATOR({self.sql(op_func)})"
|
||||
|
||||
stack.append(node.right)
|
||||
stack.append(f" {self.maybe_comment(op, comments=node.comments)} ")
|
||||
stack.append(node.left)
|
||||
else:
|
||||
sqls.append(self.sql(node))
|
||||
|
||||
return "".join(sqls)
|
||||
|
||||
def function_fallback_sql(self, expression: exp.Func) -> str:
|
||||
args = []
|
||||
|
@ -3660,9 +3718,6 @@ class Generator(metaclass=_Generator):
|
|||
table = "" if isinstance(expression.this, exp.Literal) else "TABLE "
|
||||
return f"REFRESH {table}{this}"
|
||||
|
||||
def operator_sql(self, expression: exp.Operator) -> str:
|
||||
return self.binary(expression, f"OPERATOR({self.sql(expression, 'operator')})")
|
||||
|
||||
def toarray_sql(self, expression: exp.ToArray) -> str:
|
||||
arg = expression.this
|
||||
if not arg.type:
|
||||
|
@ -4041,3 +4096,44 @@ class Generator(metaclass=_Generator):
|
|||
def summarize_sql(self, expression: exp.Summarize) -> str:
|
||||
table = " TABLE" if expression.args.get("table") else ""
|
||||
return f"SUMMARIZE{table} {self.sql(expression.this)}"
|
||||
|
||||
def explodinggenerateseries_sql(self, expression: exp.ExplodingGenerateSeries) -> str:
|
||||
generate_series = exp.GenerateSeries(**expression.args)
|
||||
|
||||
parent = expression.parent
|
||||
if isinstance(parent, (exp.Alias, exp.TableAlias)):
|
||||
parent = parent.parent
|
||||
|
||||
if self.SUPPORTS_EXPLODING_PROJECTIONS and not isinstance(parent, (exp.Table, exp.Unnest)):
|
||||
return self.sql(exp.Unnest(expressions=[generate_series]))
|
||||
|
||||
if isinstance(parent, exp.Select):
|
||||
self.unsupported("GenerateSeries projection unnesting is not supported.")
|
||||
|
||||
return self.sql(generate_series)
|
||||
|
||||
def arrayconcat_sql(self, expression: exp.ArrayConcat, name: str = "ARRAY_CONCAT") -> str:
|
||||
exprs = expression.expressions
|
||||
if not self.ARRAY_CONCAT_IS_VAR_LEN:
|
||||
rhs = reduce(lambda x, y: exp.ArrayConcat(this=x, expressions=[y]), exprs)
|
||||
else:
|
||||
rhs = self.expressions(expression)
|
||||
|
||||
return self.func(name, expression.this, rhs)
|
||||
|
||||
def converttimezone_sql(self, expression: exp.ConvertTimezone) -> str:
|
||||
if self.SUPPORTS_CONVERT_TIMEZONE:
|
||||
return self.function_fallback_sql(expression)
|
||||
|
||||
source_tz = expression.args.get("source_tz")
|
||||
target_tz = expression.args.get("target_tz")
|
||||
timestamp = expression.args.get("timestamp")
|
||||
|
||||
if source_tz and timestamp:
|
||||
timestamp = exp.AtTimeZone(
|
||||
this=exp.cast(timestamp, exp.DataType.Type.TIMESTAMPNTZ), zone=source_tz
|
||||
)
|
||||
|
||||
expr = exp.AtTimeZone(this=timestamp, zone=target_tz)
|
||||
|
||||
return self.sql(expr)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue