Merging upstream version 18.7.0.
Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
parent
77523b6777
commit
d1b976f442
96 changed files with 59037 additions and 52828 deletions
|
@ -125,6 +125,12 @@ class _Dialect(type):
|
|||
if not klass.STRICT_STRING_CONCAT and klass.DPIPE_IS_STRING_CONCAT:
|
||||
klass.parser_class.BITWISE[TokenType.DPIPE] = exp.SafeDPipe
|
||||
|
||||
if not klass.SUPPORTS_SEMI_ANTI_JOIN:
|
||||
klass.parser_class.TABLE_ALIAS_TOKENS = klass.parser_class.TABLE_ALIAS_TOKENS | {
|
||||
TokenType.ANTI,
|
||||
TokenType.SEMI,
|
||||
}
|
||||
|
||||
klass.generator_class.can_identify = klass.can_identify
|
||||
|
||||
return klass
|
||||
|
@ -156,9 +162,15 @@ class Dialect(metaclass=_Dialect):
|
|||
# Determines whether or not user-defined data types are supported
|
||||
SUPPORTS_USER_DEFINED_TYPES = True
|
||||
|
||||
# Determines whether or not SEMI/ANTI JOINs are supported
|
||||
SUPPORTS_SEMI_ANTI_JOIN = True
|
||||
|
||||
# Determines how function names are going to be normalized
|
||||
NORMALIZE_FUNCTIONS: bool | str = "upper"
|
||||
|
||||
# Determines whether the base comes first in the LOG function
|
||||
LOG_BASE_FIRST = True
|
||||
|
||||
# Indicates the default null ordering method to use if not explicitly set
|
||||
# Options are: "nulls_are_small", "nulls_are_large", "nulls_are_last"
|
||||
NULL_ORDERING = "nulls_are_small"
|
||||
|
@ -331,10 +343,18 @@ def approx_count_distinct_sql(self: Generator, expression: exp.ApproxDistinct) -
|
|||
return self.func("APPROX_COUNT_DISTINCT", expression.this)
|
||||
|
||||
|
||||
def if_sql(self: Generator, expression: exp.If) -> str:
|
||||
return self.func(
|
||||
"IF", expression.this, expression.args.get("true"), expression.args.get("false")
|
||||
)
|
||||
def if_sql(
|
||||
name: str = "IF", false_value: t.Optional[exp.Expression | str] = None
|
||||
) -> t.Callable[[Generator, exp.If], str]:
|
||||
def _if_sql(self: Generator, expression: exp.If) -> str:
|
||||
return self.func(
|
||||
name,
|
||||
expression.this,
|
||||
expression.args.get("true"),
|
||||
expression.args.get("false") or false_value,
|
||||
)
|
||||
|
||||
return _if_sql
|
||||
|
||||
|
||||
def arrow_json_extract_sql(self: Generator, expression: exp.JSONExtract | exp.JSONBExtract) -> str:
|
||||
|
@ -751,6 +771,12 @@ def any_value_to_max_sql(self: Generator, expression: exp.AnyValue) -> str:
|
|||
return self.func("MAX", expression.this)
|
||||
|
||||
|
||||
def bool_xor_sql(self: Generator, expression: exp.Xor) -> str:
|
||||
a = self.sql(expression.left)
|
||||
b = self.sql(expression.right)
|
||||
return f"({a} AND (NOT {b})) OR ((NOT {a}) AND {b})"
|
||||
|
||||
|
||||
# Used to generate JSON_OBJECT with a comma in BigQuery and MySQL instead of colon
|
||||
def json_keyvalue_comma_sql(self: Generator, expression: exp.JSONKeyValue) -> str:
|
||||
return f"{self.sql(expression, 'this')}, {self.sql(expression, 'expression')}"
|
||||
|
@ -764,3 +790,10 @@ def is_parse_json(expression: exp.Expression) -> bool:
|
|||
|
||||
def isnull_to_is_null(args: t.List) -> exp.Expression:
|
||||
return exp.Paren(this=exp.Is(this=seq_get(args, 0), expression=exp.null()))
|
||||
|
||||
|
||||
def move_insert_cte_sql(self: Generator, expression: exp.Insert) -> str:
|
||||
if expression.expression.args.get("with"):
|
||||
expression = expression.copy()
|
||||
expression.set("with", expression.expression.args["with"].pop())
|
||||
return self.insert_sql(expression)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue