Merging upstream version 20.1.0.
Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
parent
d4fe7bdb16
commit
90988d8258
127 changed files with 73384 additions and 73067 deletions
|
@ -5,6 +5,7 @@ import typing as t
|
|||
from sqlglot import exp, generator, parser, tokens, transforms
|
||||
from sqlglot.dialects.dialect import (
|
||||
Dialect,
|
||||
NormalizationStrategy,
|
||||
arrow_json_extract_scalar_sql,
|
||||
date_add_interval_sql,
|
||||
datestrtodate_sql,
|
||||
|
@ -150,10 +151,18 @@ class MySQL(Dialect):
|
|||
# https://dev.mysql.com/doc/refman/8.0/en/identifiers.html
|
||||
IDENTIFIERS_CAN_START_WITH_DIGIT = True
|
||||
|
||||
# We default to treating all identifiers as case-sensitive, since it matches MySQL's
|
||||
# behavior on Linux systems. For MacOS and Windows systems, one can override this
|
||||
# setting by specifying `dialect="mysql, normalization_strategy = lowercase"`.
|
||||
#
|
||||
# See also https://dev.mysql.com/doc/refman/8.2/en/identifier-case-sensitivity.html
|
||||
NORMALIZATION_STRATEGY = NormalizationStrategy.CASE_SENSITIVE
|
||||
|
||||
TIME_FORMAT = "'%Y-%m-%d %T'"
|
||||
DPIPE_IS_STRING_CONCAT = False
|
||||
SUPPORTS_USER_DEFINED_TYPES = False
|
||||
SUPPORTS_SEMI_ANTI_JOIN = False
|
||||
SAFE_DIVISION = True
|
||||
|
||||
# https://prestodb.io/docs/current/functions/datetime.html#mysql-date-functions
|
||||
TIME_MAPPING = {
|
||||
|
@ -264,11 +273,6 @@ class MySQL(Dialect):
|
|||
TokenType.DPIPE: exp.Or,
|
||||
}
|
||||
|
||||
# MySQL uses || as a synonym to the logical OR operator
|
||||
# https://dev.mysql.com/doc/refman/8.0/en/logical-operators.html#operator_or
|
||||
BITWISE = parser.Parser.BITWISE.copy()
|
||||
BITWISE.pop(TokenType.DPIPE)
|
||||
|
||||
TABLE_ALIAS_TOKENS = (
|
||||
parser.Parser.TABLE_ALIAS_TOKENS - parser.Parser.TABLE_INDEX_HINT_TOKENS
|
||||
)
|
||||
|
@ -451,7 +455,7 @@ class MySQL(Dialect):
|
|||
self, kind: t.Optional[str] = None
|
||||
) -> exp.IndexColumnConstraint:
|
||||
if kind:
|
||||
self._match_texts({"INDEX", "KEY"})
|
||||
self._match_texts(("INDEX", "KEY"))
|
||||
|
||||
this = self._parse_id_var(any_token=False)
|
||||
index_type = self._match(TokenType.USING) and self._advance_any() and self._prev.text
|
||||
|
@ -514,7 +518,7 @@ class MySQL(Dialect):
|
|||
|
||||
log = self._parse_string() if self._match_text_seq("IN") else None
|
||||
|
||||
if this in {"BINLOG EVENTS", "RELAYLOG EVENTS"}:
|
||||
if this in ("BINLOG EVENTS", "RELAYLOG EVENTS"):
|
||||
position = self._parse_number() if self._match_text_seq("FROM") else None
|
||||
db = None
|
||||
else:
|
||||
|
@ -671,6 +675,7 @@ class MySQL(Dialect):
|
|||
exp.Trim: _trim_sql,
|
||||
exp.TryCast: no_trycast_sql,
|
||||
exp.TsOrDsAdd: _date_add_sql("ADD"),
|
||||
exp.TsOrDsDiff: lambda self, e: self.func("DATEDIFF", e.this, e.expression),
|
||||
exp.TsOrDsToDate: _ts_or_ds_to_date_sql,
|
||||
exp.Week: _remove_ts_or_ds_to_date(),
|
||||
exp.WeekOfYear: _remove_ts_or_ds_to_date(rename_func("WEEKOFYEAR")),
|
||||
|
@ -763,7 +768,7 @@ class MySQL(Dialect):
|
|||
|
||||
target = self.sql(expression, "target")
|
||||
target = f" {target}" if target else ""
|
||||
if expression.name in {"COLUMNS", "INDEX"}:
|
||||
if expression.name in ("COLUMNS", "INDEX"):
|
||||
target = f" FROM{target}"
|
||||
elif expression.name == "GRANTS":
|
||||
target = f" FOR{target}"
|
||||
|
@ -796,6 +801,14 @@ class MySQL(Dialect):
|
|||
|
||||
return f"SHOW{full}{global_}{this}{target}{types}{db}{query}{log}{position}{channel}{mutex_or_status}{like}{where}{offset}{limit}"
|
||||
|
||||
def altercolumn_sql(self, expression: exp.AlterColumn) -> str:
|
||||
dtype = self.sql(expression, "dtype")
|
||||
if not dtype:
|
||||
return super().altercolumn_sql(expression)
|
||||
|
||||
this = self.sql(expression, "this")
|
||||
return f"MODIFY COLUMN {this} {dtype}"
|
||||
|
||||
def _prefixed_sql(self, prefix: str, expression: exp.Expression, arg: str) -> str:
|
||||
sql = self.sql(expression, arg)
|
||||
return f" {prefix} {sql}" if sql else ""
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue