2025-02-13 14:53:05 +01:00
|
|
|
from __future__ import annotations
|
|
|
|
|
|
|
|
import typing as t
|
|
|
|
|
|
|
|
from sqlglot import exp, generator, parser, tokens
|
2025-02-13 06:15:54 +01:00
|
|
|
from sqlglot.dialects.dialect import (
|
|
|
|
Dialect,
|
2025-02-13 14:54:32 +01:00
|
|
|
locate_to_strposition,
|
2025-02-13 06:15:54 +01:00
|
|
|
no_ilike_sql,
|
|
|
|
no_paren_current_date_sql,
|
|
|
|
no_tablesample_sql,
|
|
|
|
no_trycast_sql,
|
2025-02-13 15:05:06 +01:00
|
|
|
strposition_to_locate_sql,
|
2025-02-13 06:15:54 +01:00
|
|
|
)
|
2025-02-13 14:53:05 +01:00
|
|
|
from sqlglot.helper import seq_get
|
|
|
|
from sqlglot.tokens import TokenType
|
|
|
|
|
|
|
|
|
|
|
|
def _show_parser(*args, **kwargs):
|
|
|
|
def _parse(self):
|
|
|
|
return self._parse_show_mysql(*args, **kwargs)
|
|
|
|
|
|
|
|
return _parse
|
2025-02-13 06:15:54 +01:00
|
|
|
|
|
|
|
|
|
|
|
def _date_trunc_sql(self, expression):
|
2025-02-13 14:53:05 +01:00
|
|
|
unit = expression.name.lower()
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 14:53:05 +01:00
|
|
|
expr = self.sql(expression.expression)
|
2025-02-13 06:15:54 +01:00
|
|
|
|
|
|
|
if unit == "day":
|
2025-02-13 14:53:05 +01:00
|
|
|
return f"DATE({expr})"
|
2025-02-13 06:15:54 +01:00
|
|
|
|
|
|
|
if unit == "week":
|
2025-02-13 14:53:05 +01:00
|
|
|
concat = f"CONCAT(YEAR({expr}), ' ', WEEK({expr}, 1), ' 1')"
|
2025-02-13 06:15:54 +01:00
|
|
|
date_format = "%Y %u %w"
|
|
|
|
elif unit == "month":
|
2025-02-13 14:53:05 +01:00
|
|
|
concat = f"CONCAT(YEAR({expr}), ' ', MONTH({expr}), ' 1')"
|
2025-02-13 06:15:54 +01:00
|
|
|
date_format = "%Y %c %e"
|
|
|
|
elif unit == "quarter":
|
2025-02-13 14:53:05 +01:00
|
|
|
concat = f"CONCAT(YEAR({expr}), ' ', QUARTER({expr}) * 3 - 2, ' 1')"
|
2025-02-13 06:15:54 +01:00
|
|
|
date_format = "%Y %c %e"
|
|
|
|
elif unit == "year":
|
2025-02-13 14:53:05 +01:00
|
|
|
concat = f"CONCAT(YEAR({expr}), ' 1 1')"
|
2025-02-13 06:15:54 +01:00
|
|
|
date_format = "%Y %c %e"
|
|
|
|
else:
|
|
|
|
self.unsupported("Unexpected interval unit: {unit}")
|
2025-02-13 14:53:05 +01:00
|
|
|
return f"DATE({expr})"
|
2025-02-13 06:15:54 +01:00
|
|
|
|
|
|
|
return f"STR_TO_DATE({concat}, '{date_format}')"
|
|
|
|
|
|
|
|
|
|
|
|
def _str_to_date(args):
|
2025-02-13 14:53:05 +01:00
|
|
|
date_format = MySQL.format_time(seq_get(args, 1))
|
|
|
|
return exp.StrToDate(this=seq_get(args, 0), format=date_format)
|
2025-02-13 06:15:54 +01:00
|
|
|
|
|
|
|
|
|
|
|
def _str_to_date_sql(self, expression):
|
|
|
|
date_format = self.format_time(expression)
|
|
|
|
return f"STR_TO_DATE({self.sql(expression.this)}, {date_format})"
|
|
|
|
|
|
|
|
|
2025-02-13 08:04:41 +01:00
|
|
|
def _trim_sql(self, expression):
|
|
|
|
target = self.sql(expression, "this")
|
|
|
|
trim_type = self.sql(expression, "position")
|
|
|
|
remove_chars = self.sql(expression, "expression")
|
|
|
|
|
|
|
|
# Use TRIM/LTRIM/RTRIM syntax if the expression isn't mysql-specific
|
|
|
|
if not remove_chars:
|
|
|
|
return self.trim_sql(expression)
|
|
|
|
|
|
|
|
trim_type = f"{trim_type} " if trim_type else ""
|
|
|
|
remove_chars = f"{remove_chars} " if remove_chars else ""
|
|
|
|
from_part = "FROM " if trim_type or remove_chars else ""
|
|
|
|
return f"TRIM({trim_type}{remove_chars}{from_part}{target})"
|
|
|
|
|
|
|
|
|
2025-02-13 06:15:54 +01:00
|
|
|
def _date_add(expression_class):
|
|
|
|
def func(args):
|
2025-02-13 14:53:05 +01:00
|
|
|
interval = seq_get(args, 1)
|
2025-02-13 06:15:54 +01:00
|
|
|
return expression_class(
|
2025-02-13 14:53:05 +01:00
|
|
|
this=seq_get(args, 0),
|
2025-02-13 06:15:54 +01:00
|
|
|
expression=interval.this,
|
|
|
|
unit=exp.Literal.string(interval.text("unit").lower()),
|
|
|
|
)
|
|
|
|
|
|
|
|
return func
|
|
|
|
|
|
|
|
|
|
|
|
def _date_add_sql(kind):
|
|
|
|
def func(self, expression):
|
|
|
|
this = self.sql(expression, "this")
|
|
|
|
unit = expression.text("unit").upper() or "DAY"
|
|
|
|
expression = self.sql(expression, "expression")
|
|
|
|
return f"DATE_{kind}({this}, INTERVAL {expression} {unit})"
|
|
|
|
|
|
|
|
return func
|
|
|
|
|
|
|
|
|
|
|
|
class MySQL(Dialect):
|
|
|
|
# https://prestodb.io/docs/current/functions/datetime.html#mysql-date-functions
|
|
|
|
time_mapping = {
|
|
|
|
"%M": "%B",
|
|
|
|
"%c": "%-m",
|
|
|
|
"%e": "%-d",
|
|
|
|
"%h": "%I",
|
|
|
|
"%i": "%M",
|
|
|
|
"%s": "%S",
|
|
|
|
"%S": "%S",
|
|
|
|
"%u": "%W",
|
2025-02-13 14:48:46 +01:00
|
|
|
"%k": "%-H",
|
|
|
|
"%l": "%-I",
|
2025-02-13 06:15:54 +01:00
|
|
|
}
|
|
|
|
|
2025-02-13 14:53:05 +01:00
|
|
|
class Tokenizer(tokens.Tokenizer):
|
2025-02-13 06:15:54 +01:00
|
|
|
QUOTES = ["'", '"']
|
|
|
|
COMMENTS = ["--", "#", ("/*", "*/")]
|
|
|
|
IDENTIFIERS = ["`"]
|
2025-02-13 14:53:05 +01:00
|
|
|
ESCAPES = ["'", "\\"]
|
2025-02-13 08:04:41 +01:00
|
|
|
BIT_STRINGS = [("b'", "'"), ("B'", "'"), ("0b", "")]
|
|
|
|
HEX_STRINGS = [("x'", "'"), ("X'", "'"), ("0x", "")]
|
2025-02-13 06:15:54 +01:00
|
|
|
|
|
|
|
KEYWORDS = {
|
2025-02-13 14:53:05 +01:00
|
|
|
**tokens.Tokenizer.KEYWORDS,
|
2025-02-13 15:05:06 +01:00
|
|
|
"MEDIUMTEXT": TokenType.MEDIUMTEXT,
|
|
|
|
"LONGTEXT": TokenType.LONGTEXT,
|
2025-02-13 15:07:05 +01:00
|
|
|
"MEDIUMBLOB": TokenType.MEDIUMBLOB,
|
|
|
|
"LONGBLOB": TokenType.LONGBLOB,
|
2025-02-13 14:54:32 +01:00
|
|
|
"START": TokenType.BEGIN,
|
2025-02-13 08:04:41 +01:00
|
|
|
"SEPARATOR": TokenType.SEPARATOR,
|
2025-02-13 06:15:54 +01:00
|
|
|
"_ARMSCII8": TokenType.INTRODUCER,
|
|
|
|
"_ASCII": TokenType.INTRODUCER,
|
|
|
|
"_BIG5": TokenType.INTRODUCER,
|
|
|
|
"_BINARY": TokenType.INTRODUCER,
|
|
|
|
"_CP1250": TokenType.INTRODUCER,
|
|
|
|
"_CP1251": TokenType.INTRODUCER,
|
|
|
|
"_CP1256": TokenType.INTRODUCER,
|
|
|
|
"_CP1257": TokenType.INTRODUCER,
|
|
|
|
"_CP850": TokenType.INTRODUCER,
|
|
|
|
"_CP852": TokenType.INTRODUCER,
|
|
|
|
"_CP866": TokenType.INTRODUCER,
|
|
|
|
"_CP932": TokenType.INTRODUCER,
|
|
|
|
"_DEC8": TokenType.INTRODUCER,
|
|
|
|
"_EUCJPMS": TokenType.INTRODUCER,
|
|
|
|
"_EUCKR": TokenType.INTRODUCER,
|
|
|
|
"_GB18030": TokenType.INTRODUCER,
|
|
|
|
"_GB2312": TokenType.INTRODUCER,
|
|
|
|
"_GBK": TokenType.INTRODUCER,
|
|
|
|
"_GEOSTD8": TokenType.INTRODUCER,
|
|
|
|
"_GREEK": TokenType.INTRODUCER,
|
|
|
|
"_HEBREW": TokenType.INTRODUCER,
|
|
|
|
"_HP8": TokenType.INTRODUCER,
|
|
|
|
"_KEYBCS2": TokenType.INTRODUCER,
|
|
|
|
"_KOI8R": TokenType.INTRODUCER,
|
|
|
|
"_KOI8U": TokenType.INTRODUCER,
|
|
|
|
"_LATIN1": TokenType.INTRODUCER,
|
|
|
|
"_LATIN2": TokenType.INTRODUCER,
|
|
|
|
"_LATIN5": TokenType.INTRODUCER,
|
|
|
|
"_LATIN7": TokenType.INTRODUCER,
|
|
|
|
"_MACCE": TokenType.INTRODUCER,
|
|
|
|
"_MACROMAN": TokenType.INTRODUCER,
|
|
|
|
"_SJIS": TokenType.INTRODUCER,
|
|
|
|
"_SWE7": TokenType.INTRODUCER,
|
|
|
|
"_TIS620": TokenType.INTRODUCER,
|
|
|
|
"_UCS2": TokenType.INTRODUCER,
|
|
|
|
"_UJIS": TokenType.INTRODUCER,
|
2025-02-13 14:48:46 +01:00
|
|
|
# https://dev.mysql.com/doc/refman/8.0/en/string-literals.html
|
2025-02-13 06:15:54 +01:00
|
|
|
"_UTF8": TokenType.INTRODUCER,
|
|
|
|
"_UTF16": TokenType.INTRODUCER,
|
|
|
|
"_UTF16LE": TokenType.INTRODUCER,
|
|
|
|
"_UTF32": TokenType.INTRODUCER,
|
|
|
|
"_UTF8MB3": TokenType.INTRODUCER,
|
|
|
|
"_UTF8MB4": TokenType.INTRODUCER,
|
2025-02-13 14:53:05 +01:00
|
|
|
"@@": TokenType.SESSION_PARAMETER,
|
2025-02-13 06:15:54 +01:00
|
|
|
}
|
|
|
|
|
2025-02-13 14:53:05 +01:00
|
|
|
COMMANDS = tokens.Tokenizer.COMMANDS - {TokenType.SET, TokenType.SHOW}
|
|
|
|
|
|
|
|
class Parser(parser.Parser):
|
2025-02-13 15:01:55 +01:00
|
|
|
FUNC_TOKENS = {*parser.Parser.FUNC_TOKENS, TokenType.SCHEMA} # type: ignore
|
2025-02-13 06:15:54 +01:00
|
|
|
|
|
|
|
FUNCTIONS = {
|
2025-02-13 15:01:55 +01:00
|
|
|
**parser.Parser.FUNCTIONS, # type: ignore
|
2025-02-13 06:15:54 +01:00
|
|
|
"DATE_ADD": _date_add(exp.DateAdd),
|
|
|
|
"DATE_SUB": _date_add(exp.DateSub),
|
|
|
|
"STR_TO_DATE": _str_to_date,
|
2025-02-13 14:54:32 +01:00
|
|
|
"LOCATE": locate_to_strposition,
|
|
|
|
"INSTR": lambda args: exp.StrPosition(substr=seq_get(args, 1), this=seq_get(args, 0)),
|
|
|
|
"LEFT": lambda args: exp.Substring(
|
|
|
|
this=seq_get(args, 0), start=exp.Literal.number(1), length=seq_get(args, 1)
|
|
|
|
),
|
2025-02-13 06:15:54 +01:00
|
|
|
}
|
|
|
|
|
2025-02-13 08:04:41 +01:00
|
|
|
FUNCTION_PARSERS = {
|
2025-02-13 15:01:55 +01:00
|
|
|
**parser.Parser.FUNCTION_PARSERS, # type: ignore
|
2025-02-13 08:04:41 +01:00
|
|
|
"GROUP_CONCAT": lambda self: self.expression(
|
|
|
|
exp.GroupConcat,
|
|
|
|
this=self._parse_lambda(),
|
|
|
|
separator=self._match(TokenType.SEPARATOR) and self._parse_field(),
|
|
|
|
),
|
|
|
|
}
|
|
|
|
|
2025-02-13 14:40:43 +01:00
|
|
|
PROPERTY_PARSERS = {
|
2025-02-13 15:01:55 +01:00
|
|
|
**parser.Parser.PROPERTY_PARSERS, # type: ignore
|
2025-02-13 15:08:15 +01:00
|
|
|
"ENGINE": lambda self: self._parse_property_assignment(exp.EngineProperty),
|
2025-02-13 14:40:43 +01:00
|
|
|
}
|
|
|
|
|
2025-02-13 14:53:05 +01:00
|
|
|
STATEMENT_PARSERS = {
|
2025-02-13 15:01:55 +01:00
|
|
|
**parser.Parser.STATEMENT_PARSERS, # type: ignore
|
2025-02-13 14:53:05 +01:00
|
|
|
TokenType.SHOW: lambda self: self._parse_show(),
|
|
|
|
TokenType.SET: lambda self: self._parse_set(),
|
|
|
|
}
|
|
|
|
|
|
|
|
SHOW_PARSERS = {
|
|
|
|
"BINARY LOGS": _show_parser("BINARY LOGS"),
|
|
|
|
"MASTER LOGS": _show_parser("BINARY LOGS"),
|
|
|
|
"BINLOG EVENTS": _show_parser("BINLOG EVENTS"),
|
|
|
|
"CHARACTER SET": _show_parser("CHARACTER SET"),
|
|
|
|
"CHARSET": _show_parser("CHARACTER SET"),
|
|
|
|
"COLLATION": _show_parser("COLLATION"),
|
|
|
|
"FULL COLUMNS": _show_parser("COLUMNS", target="FROM", full=True),
|
|
|
|
"COLUMNS": _show_parser("COLUMNS", target="FROM"),
|
|
|
|
"CREATE DATABASE": _show_parser("CREATE DATABASE", target=True),
|
|
|
|
"CREATE EVENT": _show_parser("CREATE EVENT", target=True),
|
|
|
|
"CREATE FUNCTION": _show_parser("CREATE FUNCTION", target=True),
|
|
|
|
"CREATE PROCEDURE": _show_parser("CREATE PROCEDURE", target=True),
|
|
|
|
"CREATE TABLE": _show_parser("CREATE TABLE", target=True),
|
|
|
|
"CREATE TRIGGER": _show_parser("CREATE TRIGGER", target=True),
|
|
|
|
"CREATE VIEW": _show_parser("CREATE VIEW", target=True),
|
|
|
|
"DATABASES": _show_parser("DATABASES"),
|
|
|
|
"ENGINE": _show_parser("ENGINE", target=True),
|
|
|
|
"STORAGE ENGINES": _show_parser("ENGINES"),
|
|
|
|
"ENGINES": _show_parser("ENGINES"),
|
|
|
|
"ERRORS": _show_parser("ERRORS"),
|
|
|
|
"EVENTS": _show_parser("EVENTS"),
|
|
|
|
"FUNCTION CODE": _show_parser("FUNCTION CODE", target=True),
|
|
|
|
"FUNCTION STATUS": _show_parser("FUNCTION STATUS"),
|
|
|
|
"GRANTS": _show_parser("GRANTS", target="FOR"),
|
|
|
|
"INDEX": _show_parser("INDEX", target="FROM"),
|
|
|
|
"MASTER STATUS": _show_parser("MASTER STATUS"),
|
|
|
|
"OPEN TABLES": _show_parser("OPEN TABLES"),
|
|
|
|
"PLUGINS": _show_parser("PLUGINS"),
|
|
|
|
"PROCEDURE CODE": _show_parser("PROCEDURE CODE", target=True),
|
|
|
|
"PROCEDURE STATUS": _show_parser("PROCEDURE STATUS"),
|
|
|
|
"PRIVILEGES": _show_parser("PRIVILEGES"),
|
|
|
|
"FULL PROCESSLIST": _show_parser("PROCESSLIST", full=True),
|
|
|
|
"PROCESSLIST": _show_parser("PROCESSLIST"),
|
|
|
|
"PROFILE": _show_parser("PROFILE"),
|
|
|
|
"PROFILES": _show_parser("PROFILES"),
|
|
|
|
"RELAYLOG EVENTS": _show_parser("RELAYLOG EVENTS"),
|
|
|
|
"REPLICAS": _show_parser("REPLICAS"),
|
|
|
|
"SLAVE HOSTS": _show_parser("REPLICAS"),
|
|
|
|
"REPLICA STATUS": _show_parser("REPLICA STATUS"),
|
|
|
|
"SLAVE STATUS": _show_parser("REPLICA STATUS"),
|
|
|
|
"GLOBAL STATUS": _show_parser("STATUS", global_=True),
|
|
|
|
"SESSION STATUS": _show_parser("STATUS"),
|
|
|
|
"STATUS": _show_parser("STATUS"),
|
|
|
|
"TABLE STATUS": _show_parser("TABLE STATUS"),
|
|
|
|
"FULL TABLES": _show_parser("TABLES", full=True),
|
|
|
|
"TABLES": _show_parser("TABLES"),
|
|
|
|
"TRIGGERS": _show_parser("TRIGGERS"),
|
|
|
|
"GLOBAL VARIABLES": _show_parser("VARIABLES", global_=True),
|
|
|
|
"SESSION VARIABLES": _show_parser("VARIABLES"),
|
|
|
|
"VARIABLES": _show_parser("VARIABLES"),
|
|
|
|
"WARNINGS": _show_parser("WARNINGS"),
|
|
|
|
}
|
|
|
|
|
|
|
|
SET_PARSERS = {
|
|
|
|
"GLOBAL": lambda self: self._parse_set_item_assignment("GLOBAL"),
|
|
|
|
"PERSIST": lambda self: self._parse_set_item_assignment("PERSIST"),
|
|
|
|
"PERSIST_ONLY": lambda self: self._parse_set_item_assignment("PERSIST_ONLY"),
|
|
|
|
"SESSION": lambda self: self._parse_set_item_assignment("SESSION"),
|
|
|
|
"LOCAL": lambda self: self._parse_set_item_assignment("LOCAL"),
|
|
|
|
"CHARACTER SET": lambda self: self._parse_set_item_charset("CHARACTER SET"),
|
|
|
|
"CHARSET": lambda self: self._parse_set_item_charset("CHARACTER SET"),
|
|
|
|
"NAMES": lambda self: self._parse_set_item_names(),
|
2025-02-13 14:54:32 +01:00
|
|
|
"TRANSACTION": lambda self: self._parse_set_transaction(),
|
2025-02-13 14:53:05 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
PROFILE_TYPES = {
|
|
|
|
"ALL",
|
|
|
|
"BLOCK IO",
|
|
|
|
"CONTEXT SWITCHES",
|
|
|
|
"CPU",
|
|
|
|
"IPC",
|
|
|
|
"MEMORY",
|
|
|
|
"PAGE FAULTS",
|
|
|
|
"SOURCE",
|
|
|
|
"SWAPS",
|
|
|
|
}
|
|
|
|
|
2025-02-13 14:54:32 +01:00
|
|
|
TRANSACTION_CHARACTERISTICS = {
|
|
|
|
"ISOLATION LEVEL REPEATABLE READ",
|
|
|
|
"ISOLATION LEVEL READ COMMITTED",
|
|
|
|
"ISOLATION LEVEL READ UNCOMMITTED",
|
|
|
|
"ISOLATION LEVEL SERIALIZABLE",
|
|
|
|
"READ WRITE",
|
|
|
|
"READ ONLY",
|
|
|
|
}
|
|
|
|
|
2025-02-13 14:53:05 +01:00
|
|
|
def _parse_show_mysql(self, this, target=False, full=None, global_=None):
|
|
|
|
if target:
|
|
|
|
if isinstance(target, str):
|
2025-02-13 14:54:32 +01:00
|
|
|
self._match_text_seq(target)
|
2025-02-13 14:53:05 +01:00
|
|
|
target_id = self._parse_id_var()
|
|
|
|
else:
|
|
|
|
target_id = None
|
|
|
|
|
2025-02-13 14:54:32 +01:00
|
|
|
log = self._parse_string() if self._match_text_seq("IN") else None
|
2025-02-13 14:53:05 +01:00
|
|
|
|
|
|
|
if this in {"BINLOG EVENTS", "RELAYLOG EVENTS"}:
|
2025-02-13 14:54:32 +01:00
|
|
|
position = self._parse_number() if self._match_text_seq("FROM") else None
|
2025-02-13 14:53:05 +01:00
|
|
|
db = None
|
|
|
|
else:
|
|
|
|
position = None
|
2025-02-13 14:54:32 +01:00
|
|
|
db = self._parse_id_var() if self._match_text_seq("FROM") else None
|
2025-02-13 14:53:05 +01:00
|
|
|
|
2025-02-13 14:54:32 +01:00
|
|
|
channel = self._parse_id_var() if self._match_text_seq("FOR", "CHANNEL") else None
|
2025-02-13 14:53:05 +01:00
|
|
|
|
2025-02-13 14:54:32 +01:00
|
|
|
like = self._parse_string() if self._match_text_seq("LIKE") else None
|
2025-02-13 14:53:05 +01:00
|
|
|
where = self._parse_where()
|
|
|
|
|
|
|
|
if this == "PROFILE":
|
2025-02-13 14:54:32 +01:00
|
|
|
types = self._parse_csv(lambda: self._parse_var_from_options(self.PROFILE_TYPES))
|
|
|
|
query = self._parse_number() if self._match_text_seq("FOR", "QUERY") else None
|
|
|
|
offset = self._parse_number() if self._match_text_seq("OFFSET") else None
|
|
|
|
limit = self._parse_number() if self._match_text_seq("LIMIT") else None
|
2025-02-13 14:53:05 +01:00
|
|
|
else:
|
|
|
|
types, query = None, None
|
|
|
|
offset, limit = self._parse_oldstyle_limit()
|
|
|
|
|
2025-02-13 14:54:32 +01:00
|
|
|
mutex = True if self._match_text_seq("MUTEX") else None
|
|
|
|
mutex = False if self._match_text_seq("STATUS") else mutex
|
2025-02-13 14:53:05 +01:00
|
|
|
|
|
|
|
return self.expression(
|
|
|
|
exp.Show,
|
|
|
|
this=this,
|
|
|
|
target=target_id,
|
|
|
|
full=full,
|
|
|
|
log=log,
|
|
|
|
position=position,
|
|
|
|
db=db,
|
|
|
|
channel=channel,
|
|
|
|
like=like,
|
|
|
|
where=where,
|
|
|
|
types=types,
|
|
|
|
query=query,
|
|
|
|
offset=offset,
|
|
|
|
limit=limit,
|
|
|
|
mutex=mutex,
|
|
|
|
**{"global": global_},
|
|
|
|
)
|
|
|
|
|
2025-02-13 14:54:32 +01:00
|
|
|
def _parse_var_from_options(self, options):
|
|
|
|
for option in options:
|
|
|
|
if self._match_text_seq(*option.split(" ")):
|
|
|
|
return exp.Var(this=option)
|
2025-02-13 14:53:05 +01:00
|
|
|
return None
|
|
|
|
|
|
|
|
def _parse_oldstyle_limit(self):
|
|
|
|
limit = None
|
|
|
|
offset = None
|
2025-02-13 14:54:32 +01:00
|
|
|
if self._match_text_seq("LIMIT"):
|
2025-02-13 14:53:05 +01:00
|
|
|
parts = self._parse_csv(self._parse_number)
|
|
|
|
if len(parts) == 1:
|
|
|
|
limit = parts[0]
|
|
|
|
elif len(parts) == 2:
|
|
|
|
limit = parts[1]
|
|
|
|
offset = parts[0]
|
|
|
|
return offset, limit
|
|
|
|
|
|
|
|
def _default_parse_set_item(self):
|
|
|
|
return self._parse_set_item_assignment(kind=None)
|
|
|
|
|
|
|
|
def _parse_set_item_assignment(self, kind):
|
2025-02-13 14:54:32 +01:00
|
|
|
if kind in {"GLOBAL", "SESSION"} and self._match_text_seq("TRANSACTION"):
|
|
|
|
return self._parse_set_transaction(global_=kind == "GLOBAL")
|
|
|
|
|
2025-02-13 14:53:05 +01:00
|
|
|
left = self._parse_primary() or self._parse_id_var()
|
|
|
|
if not self._match(TokenType.EQ):
|
|
|
|
self.raise_error("Expected =")
|
|
|
|
right = self._parse_statement() or self._parse_id_var()
|
|
|
|
|
|
|
|
this = self.expression(
|
|
|
|
exp.EQ,
|
|
|
|
this=left,
|
|
|
|
expression=right,
|
|
|
|
)
|
|
|
|
|
|
|
|
return self.expression(
|
|
|
|
exp.SetItem,
|
|
|
|
this=this,
|
|
|
|
kind=kind,
|
|
|
|
)
|
|
|
|
|
|
|
|
def _parse_set_item_charset(self, kind):
|
|
|
|
this = self._parse_string() or self._parse_id_var()
|
|
|
|
|
|
|
|
return self.expression(
|
|
|
|
exp.SetItem,
|
|
|
|
this=this,
|
|
|
|
kind=kind,
|
|
|
|
)
|
|
|
|
|
|
|
|
def _parse_set_item_names(self):
|
|
|
|
charset = self._parse_string() or self._parse_id_var()
|
2025-02-13 14:54:32 +01:00
|
|
|
if self._match_text_seq("COLLATE"):
|
2025-02-13 14:53:05 +01:00
|
|
|
collate = self._parse_string() or self._parse_id_var()
|
|
|
|
else:
|
|
|
|
collate = None
|
|
|
|
return self.expression(
|
|
|
|
exp.SetItem,
|
|
|
|
this=charset,
|
|
|
|
collate=collate,
|
|
|
|
kind="NAMES",
|
|
|
|
)
|
|
|
|
|
2025-02-13 14:54:32 +01:00
|
|
|
def _parse_set_transaction(self, global_=False):
|
|
|
|
self._match_text_seq("TRANSACTION")
|
|
|
|
characteristics = self._parse_csv(
|
|
|
|
lambda: self._parse_var_from_options(self.TRANSACTION_CHARACTERISTICS)
|
|
|
|
)
|
|
|
|
return self.expression(
|
|
|
|
exp.SetItem,
|
|
|
|
expressions=characteristics,
|
|
|
|
kind="TRANSACTION",
|
|
|
|
**{"global": global_},
|
|
|
|
)
|
|
|
|
|
2025-02-13 14:53:05 +01:00
|
|
|
class Generator(generator.Generator):
|
2025-02-13 06:15:54 +01:00
|
|
|
NULL_ORDERING_SUPPORTED = False
|
|
|
|
|
|
|
|
TRANSFORMS = {
|
2025-02-13 15:01:55 +01:00
|
|
|
**generator.Generator.TRANSFORMS, # type: ignore
|
2025-02-13 06:15:54 +01:00
|
|
|
exp.CurrentDate: no_paren_current_date_sql,
|
|
|
|
exp.CurrentTimestamp: lambda *_: "CURRENT_TIMESTAMP",
|
|
|
|
exp.ILike: no_ilike_sql,
|
|
|
|
exp.TableSample: no_tablesample_sql,
|
|
|
|
exp.TryCast: no_trycast_sql,
|
|
|
|
exp.DateAdd: _date_add_sql("ADD"),
|
|
|
|
exp.DateSub: _date_add_sql("SUB"),
|
|
|
|
exp.DateTrunc: _date_trunc_sql,
|
2025-02-13 08:04:41 +01:00
|
|
|
exp.GroupConcat: lambda self, e: f"""GROUP_CONCAT({self.sql(e, "this")} SEPARATOR {self.sql(e, "separator") or "','"})""",
|
2025-02-13 06:15:54 +01:00
|
|
|
exp.StrToDate: _str_to_date_sql,
|
|
|
|
exp.StrToTime: _str_to_date_sql,
|
2025-02-13 08:04:41 +01:00
|
|
|
exp.Trim: _trim_sql,
|
2025-02-13 14:53:05 +01:00
|
|
|
exp.NullSafeEQ: lambda self, e: self.binary(e, "<=>"),
|
|
|
|
exp.NullSafeNEQ: lambda self, e: self.not_sql(self.binary(e, "<=>")),
|
2025-02-13 15:05:06 +01:00
|
|
|
exp.StrPosition: strposition_to_locate_sql,
|
2025-02-13 06:15:54 +01:00
|
|
|
}
|
2025-02-13 14:40:43 +01:00
|
|
|
|
|
|
|
ROOT_PROPERTIES = {
|
|
|
|
exp.EngineProperty,
|
|
|
|
exp.AutoIncrementProperty,
|
|
|
|
exp.CharacterSetProperty,
|
|
|
|
exp.CollateProperty,
|
|
|
|
exp.SchemaCommentProperty,
|
2025-02-13 14:56:25 +01:00
|
|
|
exp.LikeProperty,
|
2025-02-13 14:40:43 +01:00
|
|
|
}
|
|
|
|
|
2025-02-13 15:05:06 +01:00
|
|
|
TYPE_MAPPING = generator.Generator.TYPE_MAPPING.copy()
|
|
|
|
TYPE_MAPPING.pop(exp.DataType.Type.MEDIUMTEXT)
|
|
|
|
TYPE_MAPPING.pop(exp.DataType.Type.LONGTEXT)
|
2025-02-13 15:07:05 +01:00
|
|
|
TYPE_MAPPING.pop(exp.DataType.Type.MEDIUMBLOB)
|
|
|
|
TYPE_MAPPING.pop(exp.DataType.Type.LONGBLOB)
|
2025-02-13 15:05:06 +01:00
|
|
|
|
2025-02-13 14:53:05 +01:00
|
|
|
WITH_PROPERTIES: t.Set[t.Type[exp.Property]] = set()
|
|
|
|
|
|
|
|
def show_sql(self, expression):
|
|
|
|
this = f" {expression.name}"
|
|
|
|
full = " FULL" if expression.args.get("full") else ""
|
|
|
|
global_ = " GLOBAL" if expression.args.get("global") else ""
|
|
|
|
|
|
|
|
target = self.sql(expression, "target")
|
|
|
|
target = f" {target}" if target else ""
|
|
|
|
if expression.name in {"COLUMNS", "INDEX"}:
|
|
|
|
target = f" FROM{target}"
|
|
|
|
elif expression.name == "GRANTS":
|
|
|
|
target = f" FOR{target}"
|
|
|
|
|
|
|
|
db = self._prefixed_sql("FROM", expression, "db")
|
|
|
|
|
|
|
|
like = self._prefixed_sql("LIKE", expression, "like")
|
|
|
|
where = self.sql(expression, "where")
|
|
|
|
|
|
|
|
types = self.expressions(expression, key="types")
|
|
|
|
types = f" {types}" if types else types
|
|
|
|
query = self._prefixed_sql("FOR QUERY", expression, "query")
|
|
|
|
|
|
|
|
if expression.name == "PROFILE":
|
|
|
|
offset = self._prefixed_sql("OFFSET", expression, "offset")
|
|
|
|
limit = self._prefixed_sql("LIMIT", expression, "limit")
|
|
|
|
else:
|
|
|
|
offset = ""
|
|
|
|
limit = self._oldstyle_limit_sql(expression)
|
|
|
|
|
|
|
|
log = self._prefixed_sql("IN", expression, "log")
|
|
|
|
position = self._prefixed_sql("FROM", expression, "position")
|
|
|
|
|
|
|
|
channel = self._prefixed_sql("FOR CHANNEL", expression, "channel")
|
|
|
|
|
|
|
|
if expression.name == "ENGINE":
|
|
|
|
mutex_or_status = " MUTEX" if expression.args.get("mutex") else " STATUS"
|
|
|
|
else:
|
|
|
|
mutex_or_status = ""
|
|
|
|
|
|
|
|
return f"SHOW{full}{global_}{this}{target}{types}{db}{query}{log}{position}{channel}{mutex_or_status}{like}{where}{offset}{limit}"
|
|
|
|
|
|
|
|
def _prefixed_sql(self, prefix, expression, arg):
|
|
|
|
sql = self.sql(expression, arg)
|
|
|
|
if not sql:
|
|
|
|
return ""
|
|
|
|
return f" {prefix} {sql}"
|
|
|
|
|
|
|
|
def _oldstyle_limit_sql(self, expression):
|
|
|
|
limit = self.sql(expression, "limit")
|
|
|
|
offset = self.sql(expression, "offset")
|
|
|
|
if limit:
|
|
|
|
limit_offset = f"{offset}, {limit}" if offset else limit
|
|
|
|
return f" LIMIT {limit_offset}"
|
|
|
|
return ""
|
|
|
|
|
|
|
|
def setitem_sql(self, expression):
|
|
|
|
kind = self.sql(expression, "kind")
|
|
|
|
kind = f"{kind} " if kind else ""
|
|
|
|
this = self.sql(expression, "this")
|
2025-02-13 14:54:32 +01:00
|
|
|
expressions = self.expressions(expression)
|
2025-02-13 14:53:05 +01:00
|
|
|
collate = self.sql(expression, "collate")
|
|
|
|
collate = f" COLLATE {collate}" if collate else ""
|
2025-02-13 14:54:32 +01:00
|
|
|
global_ = "GLOBAL " if expression.args.get("global") else ""
|
|
|
|
return f"{global_}{kind}{this}{expressions}{collate}"
|
2025-02-13 14:53:05 +01:00
|
|
|
|
|
|
|
def set_sql(self, expression):
|
|
|
|
return f"SET {self.expressions(expression)}"
|