2025-02-13 14:53:05 +01:00
|
|
|
from __future__ import annotations
|
|
|
|
|
2025-02-13 06:15:54 +01:00
|
|
|
import logging
|
2025-02-13 14:53:05 +01:00
|
|
|
import typing as t
|
2025-02-13 06:15:54 +01:00
|
|
|
|
|
|
|
from sqlglot import exp
|
2025-02-13 14:56:25 +01:00
|
|
|
from sqlglot.errors import ErrorLevel, UnsupportedError, concat_messages
|
2025-02-13 15:46:19 +01:00
|
|
|
from sqlglot.helper import apply_index_offset, csv, seq_get, should_identify
|
2025-02-13 06:15:54 +01:00
|
|
|
from sqlglot.time import format_time
|
|
|
|
from sqlglot.tokens import TokenType
|
|
|
|
|
|
|
|
logger = logging.getLogger("sqlglot")
|
|
|
|
|
|
|
|
|
|
|
|
class Generator:
|
|
|
|
"""
|
|
|
|
Generator interprets the given syntax tree and produces a SQL string as an output.
|
|
|
|
|
2025-02-13 15:07:05 +01:00
|
|
|
Args:
|
2025-02-13 06:15:54 +01:00
|
|
|
time_mapping (dict): the dictionary of custom time mappings in which the key
|
|
|
|
represents a python time format and the output the target time format
|
|
|
|
time_trie (trie): a trie of the time_mapping keys
|
|
|
|
pretty (bool): if set to True the returned string will be formatted. Default: False.
|
|
|
|
quote_start (str): specifies which starting character to use to delimit quotes. Default: '.
|
|
|
|
quote_end (str): specifies which ending character to use to delimit quotes. Default: '.
|
|
|
|
identifier_start (str): specifies which starting character to use to delimit identifiers. Default: ".
|
|
|
|
identifier_end (str): specifies which ending character to use to delimit identifiers. Default: ".
|
2025-02-13 15:53:39 +01:00
|
|
|
bit_start (str): specifies which starting character to use to delimit bit literals. Default: None.
|
|
|
|
bit_end (str): specifies which ending character to use to delimit bit literals. Default: None.
|
|
|
|
hex_start (str): specifies which starting character to use to delimit hex literals. Default: None.
|
|
|
|
hex_end (str): specifies which ending character to use to delimit hex literals. Default: None.
|
|
|
|
byte_start (str): specifies which starting character to use to delimit byte literals. Default: None.
|
|
|
|
byte_end (str): specifies which ending character to use to delimit byte literals. Default: None.
|
2025-02-13 15:57:23 +01:00
|
|
|
raw_start (str): specifies which starting character to use to delimit raw literals. Default: None.
|
|
|
|
raw_end (str): specifies which ending character to use to delimit raw literals. Default: None.
|
2025-02-13 15:46:19 +01:00
|
|
|
identify (bool | str): 'always': always quote, 'safe': quote identifiers if they don't contain an upcase, True defaults to always.
|
2025-02-13 06:15:54 +01:00
|
|
|
normalize (bool): if set to True all identifiers will lower cased
|
2025-02-13 15:23:26 +01:00
|
|
|
string_escape (str): specifies a string escape character. Default: '.
|
|
|
|
identifier_escape (str): specifies an identifier escape character. Default: ".
|
2025-02-13 06:15:54 +01:00
|
|
|
pad (int): determines padding in a formatted string. Default: 2.
|
|
|
|
indent (int): determines the size of indentation in a formatted string. Default: 4.
|
|
|
|
unnest_column_only (bool): if true unnest table aliases are considered only as column aliases
|
|
|
|
normalize_functions (str): normalize function names, "upper", "lower", or None
|
|
|
|
Default: "upper"
|
|
|
|
alias_post_tablesample (bool): if the table alias comes after tablesample
|
|
|
|
Default: False
|
|
|
|
unsupported_level (ErrorLevel): determines the generator's behavior when it encounters
|
|
|
|
unsupported expressions. Default ErrorLevel.WARN.
|
|
|
|
null_ordering (str): Indicates the default null ordering method to use if not explicitly set.
|
|
|
|
Options are "nulls_are_small", "nulls_are_large", "nulls_are_last".
|
|
|
|
Default: "nulls_are_small"
|
|
|
|
max_unsupported (int): Maximum number of unsupported messages to include in a raised UnsupportedError.
|
|
|
|
This is only relevant if unsupported_level is ErrorLevel.RAISE.
|
|
|
|
Default: 3
|
2025-02-13 08:04:41 +01:00
|
|
|
leading_comma (bool): if the the comma is leading or trailing in select statements
|
|
|
|
Default: False
|
2025-02-13 14:46:58 +01:00
|
|
|
max_text_width: The max number of characters in a segment before creating new lines in pretty mode.
|
|
|
|
The default is on the smaller end because the length only represents a segment and not the true
|
|
|
|
line length.
|
|
|
|
Default: 80
|
2025-02-13 15:03:38 +01:00
|
|
|
comments: Whether or not to preserve comments in the output SQL code.
|
2025-02-13 14:48:46 +01:00
|
|
|
Default: True
|
2025-02-13 06:15:54 +01:00
|
|
|
"""
|
|
|
|
|
|
|
|
TRANSFORMS = {
|
2025-02-13 15:26:26 +01:00
|
|
|
exp.DateAdd: lambda self, e: self.func(
|
2025-02-13 15:46:19 +01:00
|
|
|
"DATE_ADD", e.this, e.expression, exp.Literal.string(e.text("unit"))
|
2025-02-13 15:26:26 +01:00
|
|
|
),
|
|
|
|
exp.TsOrDsAdd: lambda self, e: self.func(
|
2025-02-13 15:46:19 +01:00
|
|
|
"TS_OR_DS_ADD", e.this, e.expression, exp.Literal.string(e.text("unit"))
|
2025-02-13 15:26:26 +01:00
|
|
|
),
|
|
|
|
exp.VarMap: lambda self, e: self.func("MAP", e.args["keys"], e.args["values"]),
|
2025-02-13 15:40:23 +01:00
|
|
|
exp.CharacterSetProperty: lambda self, e: f"{'DEFAULT ' if e.args.get('default') else ''}CHARACTER SET={self.sql(e, 'this')}",
|
|
|
|
exp.ExecuteAsProperty: lambda self, e: self.naked_property(e),
|
|
|
|
exp.ExternalProperty: lambda self, e: "EXTERNAL",
|
2025-02-13 14:40:43 +01:00
|
|
|
exp.LanguageProperty: lambda self, e: self.naked_property(e),
|
|
|
|
exp.LocationProperty: lambda self, e: self.naked_property(e),
|
2025-02-13 15:40:23 +01:00
|
|
|
exp.LogProperty: lambda self, e: f"{'NO ' if e.args.get('no') else ''}LOG",
|
|
|
|
exp.MaterializedProperty: lambda self, e: "MATERIALIZED",
|
|
|
|
exp.NoPrimaryIndexProperty: lambda self, e: "NO PRIMARY INDEX",
|
2025-02-13 15:57:23 +01:00
|
|
|
exp.OnCommitProperty: lambda self, e: f"ON COMMIT {'DELETE' if e.args.get('delete') else 'PRESERVE'} ROWS",
|
2025-02-13 14:40:43 +01:00
|
|
|
exp.ReturnsProperty: lambda self, e: self.naked_property(e),
|
2025-02-13 15:40:23 +01:00
|
|
|
exp.SetProperty: lambda self, e: f"{'MULTI' if e.args.get('multi') else ''}SET",
|
2025-02-13 15:57:23 +01:00
|
|
|
exp.SettingsProperty: lambda self, e: f"SETTINGS{self.seg('')}{(self.expressions(e))}",
|
2025-02-13 15:40:23 +01:00
|
|
|
exp.SqlSecurityProperty: lambda self, e: f"SQL SECURITY {'DEFINER' if e.args.get('definer') else 'INVOKER'}",
|
2025-02-13 15:57:23 +01:00
|
|
|
exp.TemporaryProperty: lambda self, e: f"TEMPORARY",
|
2025-02-13 15:40:23 +01:00
|
|
|
exp.TransientProperty: lambda self, e: "TRANSIENT",
|
2025-02-13 15:52:09 +01:00
|
|
|
exp.StabilityProperty: lambda self, e: e.name,
|
|
|
|
exp.VolatileProperty: lambda self, e: "VOLATILE",
|
2025-02-13 15:08:15 +01:00
|
|
|
exp.WithJournalTableProperty: lambda self, e: f"WITH JOURNAL TABLE={self.sql(e, 'this')}",
|
2025-02-13 15:26:26 +01:00
|
|
|
exp.CaseSpecificColumnConstraint: lambda self, e: f"{'NOT ' if e.args.get('not_') else ''}CASESPECIFIC",
|
|
|
|
exp.CharacterSetColumnConstraint: lambda self, e: f"CHARACTER SET {self.sql(e, 'this')}",
|
|
|
|
exp.DateFormatColumnConstraint: lambda self, e: f"FORMAT {self.sql(e, 'this')}",
|
2025-02-13 15:52:09 +01:00
|
|
|
exp.OnUpdateColumnConstraint: lambda self, e: f"ON UPDATE {self.sql(e, 'this')}",
|
2025-02-13 15:26:26 +01:00
|
|
|
exp.UppercaseColumnConstraint: lambda self, e: f"UPPERCASE",
|
|
|
|
exp.TitleColumnConstraint: lambda self, e: f"TITLE {self.sql(e, 'this')}",
|
|
|
|
exp.PathColumnConstraint: lambda self, e: f"PATH {self.sql(e, 'this')}",
|
|
|
|
exp.CheckColumnConstraint: lambda self, e: f"CHECK ({self.sql(e, 'this')})",
|
|
|
|
exp.CommentColumnConstraint: lambda self, e: f"COMMENT {self.sql(e, 'this')}",
|
|
|
|
exp.CollateColumnConstraint: lambda self, e: f"COLLATE {self.sql(e, 'this')}",
|
|
|
|
exp.EncodeColumnConstraint: lambda self, e: f"ENCODE {self.sql(e, 'this')}",
|
|
|
|
exp.DefaultColumnConstraint: lambda self, e: f"DEFAULT {self.sql(e, 'this')}",
|
2025-02-13 15:30:09 +01:00
|
|
|
exp.InlineLengthColumnConstraint: lambda self, e: f"INLINE LENGTH {self.sql(e, 'this')}",
|
2025-02-13 06:15:54 +01:00
|
|
|
}
|
|
|
|
|
2025-02-13 14:53:05 +01:00
|
|
|
# Whether or not null ordering is supported in order by
|
2025-02-13 06:15:54 +01:00
|
|
|
NULL_ORDERING_SUPPORTED = True
|
2025-02-13 14:53:05 +01:00
|
|
|
|
2025-02-13 15:09:58 +01:00
|
|
|
# Whether or not locking reads (i.e. SELECT ... FOR UPDATE/SHARE) are supported
|
|
|
|
LOCKING_READS_SUPPORTED = False
|
|
|
|
|
2025-02-13 14:53:05 +01:00
|
|
|
# Always do union distinct or union all
|
2025-02-13 14:45:11 +01:00
|
|
|
EXPLICIT_UNION = False
|
2025-02-13 14:53:05 +01:00
|
|
|
|
|
|
|
# Wrap derived values in parens, usually standard but spark doesn't support it
|
2025-02-13 14:45:11 +01:00
|
|
|
WRAP_DERIVED_VALUES = True
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 15:26:26 +01:00
|
|
|
# Whether or not create function uses an AS before the RETURN
|
|
|
|
CREATE_FUNCTION_RETURN_AS = True
|
2025-02-13 15:23:26 +01:00
|
|
|
|
2025-02-13 15:43:32 +01:00
|
|
|
# Whether or not MERGE ... WHEN MATCHED BY SOURCE is allowed
|
|
|
|
MATCHED_BY_SOURCE = True
|
|
|
|
|
2025-02-13 15:52:09 +01:00
|
|
|
# Whether or not the INTERVAL expression works only with values like '1 day'
|
|
|
|
SINGLE_STRING_INTERVAL = False
|
|
|
|
|
|
|
|
# Whether or not the plural form of date parts like day (i.e. "days") is supported in INTERVALs
|
|
|
|
INTERVAL_ALLOWS_PLURAL_FORM = True
|
|
|
|
|
|
|
|
# Whether or not the TABLESAMPLE clause supports a method name, like BERNOULLI
|
|
|
|
TABLESAMPLE_WITH_METHOD = True
|
|
|
|
|
|
|
|
# Whether or not to treat the number in TABLESAMPLE (50) as a percentage
|
|
|
|
TABLESAMPLE_SIZE_IS_PERCENT = False
|
|
|
|
|
|
|
|
# Whether or not limit and fetch are supported (possible values: "ALL", "LIMIT", "FETCH")
|
2025-02-13 15:48:10 +01:00
|
|
|
LIMIT_FETCH = "ALL"
|
|
|
|
|
2025-02-13 15:57:23 +01:00
|
|
|
# Whether a table is allowed to be renamed with a db
|
|
|
|
RENAME_TABLE_WITH_DB = True
|
|
|
|
|
|
|
|
# The separator for grouping sets and rollups
|
|
|
|
GROUPINGS_SEP = ","
|
|
|
|
|
|
|
|
# The string used for creating index on a table
|
|
|
|
INDEX_ON = "ON"
|
|
|
|
|
2025-02-13 06:15:54 +01:00
|
|
|
TYPE_MAPPING = {
|
|
|
|
exp.DataType.Type.NCHAR: "CHAR",
|
|
|
|
exp.DataType.Type.NVARCHAR: "VARCHAR",
|
2025-02-13 15:05:06 +01:00
|
|
|
exp.DataType.Type.MEDIUMTEXT: "TEXT",
|
|
|
|
exp.DataType.Type.LONGTEXT: "TEXT",
|
2025-02-13 15:07:05 +01:00
|
|
|
exp.DataType.Type.MEDIUMBLOB: "BLOB",
|
|
|
|
exp.DataType.Type.LONGBLOB: "BLOB",
|
2025-02-13 15:40:23 +01:00
|
|
|
exp.DataType.Type.INET: "INET",
|
2025-02-13 15:07:05 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
STAR_MAPPING = {
|
|
|
|
"except": "EXCEPT",
|
|
|
|
"replace": "REPLACE",
|
2025-02-13 06:15:54 +01:00
|
|
|
}
|
|
|
|
|
2025-02-13 15:52:09 +01:00
|
|
|
TIME_PART_SINGULARS = {
|
|
|
|
"microseconds": "microsecond",
|
|
|
|
"seconds": "second",
|
|
|
|
"minutes": "minute",
|
|
|
|
"hours": "hour",
|
|
|
|
"days": "day",
|
|
|
|
"weeks": "week",
|
|
|
|
"months": "month",
|
|
|
|
"quarters": "quarter",
|
|
|
|
"years": "year",
|
|
|
|
}
|
|
|
|
|
2025-02-13 14:53:05 +01:00
|
|
|
TOKEN_MAPPING: t.Dict[TokenType, str] = {}
|
2025-02-13 06:15:54 +01:00
|
|
|
|
|
|
|
STRUCT_DELIMITER = ("<", ">")
|
|
|
|
|
2025-02-13 15:26:26 +01:00
|
|
|
PARAMETER_TOKEN = "@"
|
|
|
|
|
2025-02-13 15:09:58 +01:00
|
|
|
PROPERTIES_LOCATION = {
|
|
|
|
exp.AlgorithmProperty: exp.Properties.Location.POST_CREATE,
|
2025-02-13 15:26:26 +01:00
|
|
|
exp.AutoIncrementProperty: exp.Properties.Location.POST_SCHEMA,
|
|
|
|
exp.BlockCompressionProperty: exp.Properties.Location.POST_NAME,
|
|
|
|
exp.CharacterSetProperty: exp.Properties.Location.POST_SCHEMA,
|
|
|
|
exp.ChecksumProperty: exp.Properties.Location.POST_NAME,
|
|
|
|
exp.CollateProperty: exp.Properties.Location.POST_SCHEMA,
|
|
|
|
exp.Cluster: exp.Properties.Location.POST_SCHEMA,
|
|
|
|
exp.DataBlocksizeProperty: exp.Properties.Location.POST_NAME,
|
2025-02-13 15:09:58 +01:00
|
|
|
exp.DefinerProperty: exp.Properties.Location.POST_CREATE,
|
2025-02-13 15:26:26 +01:00
|
|
|
exp.DistKeyProperty: exp.Properties.Location.POST_SCHEMA,
|
|
|
|
exp.DistStyleProperty: exp.Properties.Location.POST_SCHEMA,
|
|
|
|
exp.EngineProperty: exp.Properties.Location.POST_SCHEMA,
|
|
|
|
exp.ExecuteAsProperty: exp.Properties.Location.POST_SCHEMA,
|
2025-02-13 15:40:23 +01:00
|
|
|
exp.ExternalProperty: exp.Properties.Location.POST_CREATE,
|
2025-02-13 15:26:26 +01:00
|
|
|
exp.FallbackProperty: exp.Properties.Location.POST_NAME,
|
|
|
|
exp.FileFormatProperty: exp.Properties.Location.POST_WITH,
|
|
|
|
exp.FreespaceProperty: exp.Properties.Location.POST_NAME,
|
|
|
|
exp.IsolatedLoadingProperty: exp.Properties.Location.POST_NAME,
|
|
|
|
exp.JournalProperty: exp.Properties.Location.POST_NAME,
|
|
|
|
exp.LanguageProperty: exp.Properties.Location.POST_SCHEMA,
|
|
|
|
exp.LikeProperty: exp.Properties.Location.POST_SCHEMA,
|
|
|
|
exp.LocationProperty: exp.Properties.Location.POST_SCHEMA,
|
|
|
|
exp.LockingProperty: exp.Properties.Location.POST_ALIAS,
|
|
|
|
exp.LogProperty: exp.Properties.Location.POST_NAME,
|
2025-02-13 15:40:23 +01:00
|
|
|
exp.MaterializedProperty: exp.Properties.Location.POST_CREATE,
|
2025-02-13 15:26:26 +01:00
|
|
|
exp.MergeBlockRatioProperty: exp.Properties.Location.POST_NAME,
|
2025-02-13 15:40:23 +01:00
|
|
|
exp.NoPrimaryIndexProperty: exp.Properties.Location.POST_EXPRESSION,
|
|
|
|
exp.OnCommitProperty: exp.Properties.Location.POST_EXPRESSION,
|
2025-02-13 15:57:23 +01:00
|
|
|
exp.Order: exp.Properties.Location.POST_SCHEMA,
|
2025-02-13 15:26:26 +01:00
|
|
|
exp.PartitionedByProperty: exp.Properties.Location.POST_WITH,
|
2025-02-13 15:57:23 +01:00
|
|
|
exp.PrimaryKey: exp.Properties.Location.POST_SCHEMA,
|
2025-02-13 15:26:26 +01:00
|
|
|
exp.Property: exp.Properties.Location.POST_WITH,
|
|
|
|
exp.ReturnsProperty: exp.Properties.Location.POST_SCHEMA,
|
2025-02-13 15:52:09 +01:00
|
|
|
exp.RowFormatProperty: exp.Properties.Location.POST_SCHEMA,
|
2025-02-13 15:26:26 +01:00
|
|
|
exp.RowFormatDelimitedProperty: exp.Properties.Location.POST_SCHEMA,
|
|
|
|
exp.RowFormatSerdeProperty: exp.Properties.Location.POST_SCHEMA,
|
|
|
|
exp.SchemaCommentProperty: exp.Properties.Location.POST_SCHEMA,
|
|
|
|
exp.SerdeProperties: exp.Properties.Location.POST_SCHEMA,
|
2025-02-13 15:57:23 +01:00
|
|
|
exp.Set: exp.Properties.Location.POST_SCHEMA,
|
|
|
|
exp.SettingsProperty: exp.Properties.Location.POST_SCHEMA,
|
2025-02-13 15:40:23 +01:00
|
|
|
exp.SetProperty: exp.Properties.Location.POST_CREATE,
|
2025-02-13 15:26:26 +01:00
|
|
|
exp.SortKeyProperty: exp.Properties.Location.POST_SCHEMA,
|
2025-02-13 15:09:58 +01:00
|
|
|
exp.SqlSecurityProperty: exp.Properties.Location.POST_CREATE,
|
2025-02-13 15:52:09 +01:00
|
|
|
exp.StabilityProperty: exp.Properties.Location.POST_SCHEMA,
|
2025-02-13 15:40:23 +01:00
|
|
|
exp.TemporaryProperty: exp.Properties.Location.POST_CREATE,
|
|
|
|
exp.TransientProperty: exp.Properties.Location.POST_CREATE,
|
2025-02-13 15:57:23 +01:00
|
|
|
exp.MergeTreeTTL: exp.Properties.Location.POST_SCHEMA,
|
2025-02-13 15:52:09 +01:00
|
|
|
exp.VolatileProperty: exp.Properties.Location.POST_CREATE,
|
2025-02-13 15:40:23 +01:00
|
|
|
exp.WithDataProperty: exp.Properties.Location.POST_EXPRESSION,
|
2025-02-13 15:26:26 +01:00
|
|
|
exp.WithJournalTableProperty: exp.Properties.Location.POST_NAME,
|
2025-02-13 14:40:43 +01:00
|
|
|
}
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 15:52:09 +01:00
|
|
|
JOIN_HINTS = True
|
|
|
|
TABLE_HINTS = True
|
|
|
|
|
|
|
|
RESERVED_KEYWORDS: t.Set[str] = set()
|
|
|
|
WITH_SEPARATED_COMMENTS = (exp.Select, exp.From, exp.Where, exp.With)
|
2025-02-13 15:57:23 +01:00
|
|
|
UNWRAPPED_INTERVAL_VALUES = (exp.Column, exp.Literal, exp.Neg, exp.Paren)
|
2025-02-13 15:52:09 +01:00
|
|
|
|
2025-02-13 15:05:06 +01:00
|
|
|
SENTINEL_LINE_BREAK = "__SQLGLOT__LB__"
|
2025-02-13 14:53:05 +01:00
|
|
|
|
2025-02-13 06:15:54 +01:00
|
|
|
__slots__ = (
|
|
|
|
"time_mapping",
|
|
|
|
"time_trie",
|
|
|
|
"pretty",
|
|
|
|
"quote_start",
|
|
|
|
"quote_end",
|
|
|
|
"identifier_start",
|
|
|
|
"identifier_end",
|
2025-02-13 15:53:39 +01:00
|
|
|
"bit_start",
|
|
|
|
"bit_end",
|
|
|
|
"hex_start",
|
|
|
|
"hex_end",
|
|
|
|
"byte_start",
|
|
|
|
"byte_end",
|
2025-02-13 15:57:23 +01:00
|
|
|
"raw_start",
|
|
|
|
"raw_end",
|
2025-02-13 06:15:54 +01:00
|
|
|
"identify",
|
|
|
|
"normalize",
|
2025-02-13 15:23:26 +01:00
|
|
|
"string_escape",
|
|
|
|
"identifier_escape",
|
2025-02-13 06:15:54 +01:00
|
|
|
"pad",
|
|
|
|
"index_offset",
|
|
|
|
"unnest_column_only",
|
|
|
|
"alias_post_tablesample",
|
|
|
|
"normalize_functions",
|
|
|
|
"unsupported_level",
|
|
|
|
"unsupported_messages",
|
|
|
|
"null_ordering",
|
|
|
|
"max_unsupported",
|
|
|
|
"_indent",
|
|
|
|
"_escaped_quote_end",
|
2025-02-13 15:23:26 +01:00
|
|
|
"_escaped_identifier_end",
|
2025-02-13 08:04:41 +01:00
|
|
|
"_leading_comma",
|
2025-02-13 14:46:58 +01:00
|
|
|
"_max_text_width",
|
2025-02-13 14:53:05 +01:00
|
|
|
"_comments",
|
2025-02-13 15:48:10 +01:00
|
|
|
"_cache",
|
2025-02-13 06:15:54 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
time_mapping=None,
|
|
|
|
time_trie=None,
|
|
|
|
pretty=None,
|
|
|
|
quote_start=None,
|
|
|
|
quote_end=None,
|
|
|
|
identifier_start=None,
|
|
|
|
identifier_end=None,
|
2025-02-13 15:53:39 +01:00
|
|
|
bit_start=None,
|
|
|
|
bit_end=None,
|
|
|
|
hex_start=None,
|
|
|
|
hex_end=None,
|
|
|
|
byte_start=None,
|
|
|
|
byte_end=None,
|
2025-02-13 15:57:23 +01:00
|
|
|
raw_start=None,
|
|
|
|
raw_end=None,
|
2025-02-13 06:15:54 +01:00
|
|
|
identify=False,
|
|
|
|
normalize=False,
|
2025-02-13 15:23:26 +01:00
|
|
|
string_escape=None,
|
|
|
|
identifier_escape=None,
|
2025-02-13 06:15:54 +01:00
|
|
|
pad=2,
|
|
|
|
indent=2,
|
|
|
|
index_offset=0,
|
|
|
|
unnest_column_only=False,
|
|
|
|
alias_post_tablesample=False,
|
|
|
|
normalize_functions="upper",
|
|
|
|
unsupported_level=ErrorLevel.WARN,
|
|
|
|
null_ordering=None,
|
|
|
|
max_unsupported=3,
|
2025-02-13 08:04:41 +01:00
|
|
|
leading_comma=False,
|
2025-02-13 14:46:58 +01:00
|
|
|
max_text_width=80,
|
2025-02-13 14:53:05 +01:00
|
|
|
comments=True,
|
2025-02-13 06:15:54 +01:00
|
|
|
):
|
|
|
|
import sqlglot
|
|
|
|
|
|
|
|
self.time_mapping = time_mapping or {}
|
|
|
|
self.time_trie = time_trie
|
|
|
|
self.pretty = pretty if pretty is not None else sqlglot.pretty
|
|
|
|
self.quote_start = quote_start or "'"
|
|
|
|
self.quote_end = quote_end or "'"
|
|
|
|
self.identifier_start = identifier_start or '"'
|
|
|
|
self.identifier_end = identifier_end or '"'
|
2025-02-13 15:53:39 +01:00
|
|
|
self.bit_start = bit_start
|
|
|
|
self.bit_end = bit_end
|
|
|
|
self.hex_start = hex_start
|
|
|
|
self.hex_end = hex_end
|
|
|
|
self.byte_start = byte_start
|
|
|
|
self.byte_end = byte_end
|
2025-02-13 15:57:23 +01:00
|
|
|
self.raw_start = raw_start
|
|
|
|
self.raw_end = raw_end
|
2025-02-13 06:15:54 +01:00
|
|
|
self.identify = identify
|
|
|
|
self.normalize = normalize
|
2025-02-13 15:23:26 +01:00
|
|
|
self.string_escape = string_escape or "'"
|
|
|
|
self.identifier_escape = identifier_escape or '"'
|
2025-02-13 06:15:54 +01:00
|
|
|
self.pad = pad
|
|
|
|
self.index_offset = index_offset
|
|
|
|
self.unnest_column_only = unnest_column_only
|
|
|
|
self.alias_post_tablesample = alias_post_tablesample
|
|
|
|
self.normalize_functions = normalize_functions
|
|
|
|
self.unsupported_level = unsupported_level
|
|
|
|
self.unsupported_messages = []
|
|
|
|
self.max_unsupported = max_unsupported
|
|
|
|
self.null_ordering = null_ordering
|
|
|
|
self._indent = indent
|
2025-02-13 15:23:26 +01:00
|
|
|
self._escaped_quote_end = self.string_escape + self.quote_end
|
|
|
|
self._escaped_identifier_end = self.identifier_escape + self.identifier_end
|
2025-02-13 08:04:41 +01:00
|
|
|
self._leading_comma = leading_comma
|
2025-02-13 14:46:58 +01:00
|
|
|
self._max_text_width = max_text_width
|
2025-02-13 14:53:05 +01:00
|
|
|
self._comments = comments
|
2025-02-13 15:48:10 +01:00
|
|
|
self._cache = None
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 15:48:10 +01:00
|
|
|
def generate(
|
|
|
|
self,
|
|
|
|
expression: t.Optional[exp.Expression],
|
|
|
|
cache: t.Optional[t.Dict[int, str]] = None,
|
|
|
|
) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
"""
|
|
|
|
Generates a SQL string by interpreting the given syntax tree.
|
|
|
|
|
|
|
|
Args
|
2025-02-13 14:58:37 +01:00
|
|
|
expression: the syntax tree.
|
2025-02-13 15:48:10 +01:00
|
|
|
cache: an optional sql string cache. this leverages the hash of an expression which is slow, so only use this if you set _hash on each node.
|
2025-02-13 06:15:54 +01:00
|
|
|
|
|
|
|
Returns
|
|
|
|
the SQL string.
|
|
|
|
"""
|
2025-02-13 15:48:10 +01:00
|
|
|
if cache is not None:
|
|
|
|
self._cache = cache
|
2025-02-13 06:15:54 +01:00
|
|
|
self.unsupported_messages = []
|
|
|
|
sql = self.sql(expression).strip()
|
2025-02-13 15:48:10 +01:00
|
|
|
self._cache = None
|
2025-02-13 06:15:54 +01:00
|
|
|
|
|
|
|
if self.unsupported_level == ErrorLevel.IGNORE:
|
|
|
|
return sql
|
|
|
|
|
|
|
|
if self.unsupported_level == ErrorLevel.WARN:
|
|
|
|
for msg in self.unsupported_messages:
|
|
|
|
logger.warning(msg)
|
|
|
|
elif self.unsupported_level == ErrorLevel.RAISE and self.unsupported_messages:
|
2025-02-13 14:56:25 +01:00
|
|
|
raise UnsupportedError(concat_messages(self.unsupported_messages, self.max_unsupported))
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 15:05:06 +01:00
|
|
|
if self.pretty:
|
|
|
|
sql = sql.replace(self.SENTINEL_LINE_BREAK, "\n")
|
2025-02-13 06:15:54 +01:00
|
|
|
return sql
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def unsupported(self, message: str) -> None:
|
2025-02-13 06:15:54 +01:00
|
|
|
if self.unsupported_level == ErrorLevel.IMMEDIATE:
|
|
|
|
raise UnsupportedError(message)
|
|
|
|
self.unsupported_messages.append(message)
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def sep(self, sep: str = " ") -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
return f"{sep.strip()}\n" if self.pretty else sep
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def seg(self, sql: str, sep: str = " ") -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
return f"{self.sep(sep)}{sql}"
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def pad_comment(self, comment: str) -> str:
|
2025-02-13 14:53:05 +01:00
|
|
|
comment = " " + comment if comment[0].strip() else comment
|
|
|
|
comment = comment + " " if comment[-1].strip() else comment
|
2025-02-13 14:56:25 +01:00
|
|
|
return comment
|
2025-02-13 14:53:05 +01:00
|
|
|
|
2025-02-13 15:52:09 +01:00
|
|
|
def maybe_comment(
|
|
|
|
self,
|
|
|
|
sql: str,
|
|
|
|
expression: t.Optional[exp.Expression] = None,
|
|
|
|
comments: t.Optional[t.List[str]] = None,
|
|
|
|
) -> str:
|
2025-02-13 15:53:39 +01:00
|
|
|
comments = ((expression and expression.comments) if comments is None else comments) if self._comments else None # type: ignore
|
2025-02-13 14:53:05 +01:00
|
|
|
|
2025-02-13 15:52:09 +01:00
|
|
|
if not comments or isinstance(expression, exp.Binary):
|
2025-02-13 14:56:25 +01:00
|
|
|
return sql
|
|
|
|
|
|
|
|
sep = "\n" if self.pretty else " "
|
2025-02-13 15:07:05 +01:00
|
|
|
comments_sql = sep.join(
|
|
|
|
f"/*{self.pad_comment(comment)}*/" for comment in comments if comment
|
|
|
|
)
|
2025-02-13 15:03:38 +01:00
|
|
|
|
2025-02-13 15:07:05 +01:00
|
|
|
if not comments_sql:
|
2025-02-13 15:03:38 +01:00
|
|
|
return sql
|
2025-02-13 14:53:05 +01:00
|
|
|
|
2025-02-13 14:56:25 +01:00
|
|
|
if isinstance(expression, self.WITH_SEPARATED_COMMENTS):
|
2025-02-13 15:57:23 +01:00
|
|
|
return (
|
|
|
|
f"{self.sep()}{comments_sql}{sql}"
|
|
|
|
if sql[0].isspace()
|
|
|
|
else f"{comments_sql}{self.sep()}{sql}"
|
|
|
|
)
|
2025-02-13 14:53:05 +01:00
|
|
|
|
2025-02-13 15:07:05 +01:00
|
|
|
return f"{sql} {comments_sql}"
|
2025-02-13 14:53:05 +01:00
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def wrap(self, expression: exp.Expression | str) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
this_sql = self.indent(
|
2025-02-13 14:53:05 +01:00
|
|
|
self.sql(expression)
|
|
|
|
if isinstance(expression, (exp.Select, exp.Union))
|
|
|
|
else self.sql(expression, "this"),
|
2025-02-13 06:15:54 +01:00
|
|
|
level=1,
|
|
|
|
pad=0,
|
|
|
|
)
|
|
|
|
return f"({self.sep('')}{this_sql}{self.seg(')', sep='')}"
|
|
|
|
|
2025-02-13 15:09:58 +01:00
|
|
|
def no_identify(self, func: t.Callable[..., str], *args, **kwargs) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
original = self.identify
|
|
|
|
self.identify = False
|
2025-02-13 15:09:58 +01:00
|
|
|
result = func(*args, **kwargs)
|
2025-02-13 06:15:54 +01:00
|
|
|
self.identify = original
|
|
|
|
return result
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def normalize_func(self, name: str) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
if self.normalize_functions == "upper":
|
|
|
|
return name.upper()
|
|
|
|
if self.normalize_functions == "lower":
|
|
|
|
return name.lower()
|
|
|
|
return name
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def indent(
|
|
|
|
self,
|
|
|
|
sql: str,
|
|
|
|
level: int = 0,
|
|
|
|
pad: t.Optional[int] = None,
|
|
|
|
skip_first: bool = False,
|
|
|
|
skip_last: bool = False,
|
|
|
|
) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
if not self.pretty:
|
|
|
|
return sql
|
|
|
|
|
|
|
|
pad = self.pad if pad is None else pad
|
|
|
|
lines = sql.split("\n")
|
|
|
|
|
|
|
|
return "\n".join(
|
|
|
|
line
|
|
|
|
if (skip_first and i == 0) or (skip_last and i == len(lines) - 1)
|
|
|
|
else f"{' ' * (level * self._indent + pad)}{line}"
|
|
|
|
for i, line in enumerate(lines)
|
|
|
|
)
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def sql(
|
|
|
|
self,
|
|
|
|
expression: t.Optional[str | exp.Expression],
|
|
|
|
key: t.Optional[str] = None,
|
|
|
|
comment: bool = True,
|
|
|
|
) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
if not expression:
|
|
|
|
return ""
|
|
|
|
|
|
|
|
if isinstance(expression, str):
|
|
|
|
return expression
|
|
|
|
|
|
|
|
if key:
|
|
|
|
return self.sql(expression.args.get(key))
|
|
|
|
|
2025-02-13 15:48:10 +01:00
|
|
|
if self._cache is not None:
|
|
|
|
expression_id = hash(expression)
|
|
|
|
|
|
|
|
if expression_id in self._cache:
|
|
|
|
return self._cache[expression_id]
|
|
|
|
|
2025-02-13 06:15:54 +01:00
|
|
|
transform = self.TRANSFORMS.get(expression.__class__)
|
|
|
|
|
|
|
|
if callable(transform):
|
2025-02-13 14:53:05 +01:00
|
|
|
sql = transform(self, expression)
|
|
|
|
elif transform:
|
|
|
|
sql = transform
|
|
|
|
elif isinstance(expression, exp.Expression):
|
|
|
|
exp_handler_name = f"{expression.key}_sql"
|
|
|
|
|
|
|
|
if hasattr(self, exp_handler_name):
|
|
|
|
sql = getattr(self, exp_handler_name)(expression)
|
|
|
|
elif isinstance(expression, exp.Func):
|
|
|
|
sql = self.function_fallback_sql(expression)
|
|
|
|
elif isinstance(expression, exp.Property):
|
|
|
|
sql = self.property_sql(expression)
|
|
|
|
else:
|
|
|
|
raise ValueError(f"Unsupported expression type {expression.__class__.__name__}")
|
|
|
|
else:
|
2025-02-13 08:04:41 +01:00
|
|
|
raise ValueError(f"Expected an Expression. Received {type(expression)}: {expression}")
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 15:48:10 +01:00
|
|
|
sql = self.maybe_comment(sql, expression) if self._comments and comment else sql
|
|
|
|
|
|
|
|
if self._cache is not None:
|
|
|
|
self._cache[expression_id] = sql
|
|
|
|
return sql
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def uncache_sql(self, expression: exp.Uncache) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
table = self.sql(expression, "this")
|
|
|
|
exists_sql = " IF EXISTS" if expression.args.get("exists") else ""
|
|
|
|
return f"UNCACHE TABLE{exists_sql} {table}"
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def cache_sql(self, expression: exp.Cache) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
lazy = " LAZY" if expression.args.get("lazy") else ""
|
|
|
|
table = self.sql(expression, "this")
|
|
|
|
options = expression.args.get("options")
|
2025-02-13 08:04:41 +01:00
|
|
|
options = f" OPTIONS({self.sql(options[0])} = {self.sql(options[1])})" if options else ""
|
2025-02-13 06:15:54 +01:00
|
|
|
sql = self.sql(expression, "expression")
|
|
|
|
sql = f" AS{self.sep()}{sql}" if sql else ""
|
|
|
|
sql = f"CACHE{lazy} TABLE {table}{options}{sql}"
|
|
|
|
return self.prepend_ctes(expression, sql)
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def characterset_sql(self, expression: exp.CharacterSet) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
if isinstance(expression.parent, exp.Cast):
|
|
|
|
return f"CHAR CHARACTER SET {self.sql(expression, 'this')}"
|
|
|
|
default = "DEFAULT " if expression.args.get("default") else ""
|
|
|
|
return f"{default}CHARACTER SET={self.sql(expression, 'this')}"
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def column_sql(self, expression: exp.Column) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
return ".".join(
|
2025-02-13 15:26:26 +01:00
|
|
|
self.sql(part)
|
|
|
|
for part in (
|
|
|
|
expression.args.get("catalog"),
|
|
|
|
expression.args.get("db"),
|
|
|
|
expression.args.get("table"),
|
|
|
|
expression.args.get("this"),
|
|
|
|
)
|
2025-02-13 06:15:54 +01:00
|
|
|
if part
|
|
|
|
)
|
|
|
|
|
2025-02-13 15:50:57 +01:00
|
|
|
def columnposition_sql(self, expression: exp.ColumnPosition) -> str:
|
|
|
|
this = self.sql(expression, "this")
|
|
|
|
this = f" {this}" if this else ""
|
|
|
|
position = self.sql(expression, "position")
|
|
|
|
return f"{position}{this}"
|
|
|
|
|
2025-02-13 15:53:39 +01:00
|
|
|
def columndef_sql(self, expression: exp.ColumnDef, sep: str = " ") -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
column = self.sql(expression, "this")
|
|
|
|
kind = self.sql(expression, "kind")
|
2025-02-13 08:04:41 +01:00
|
|
|
constraints = self.expressions(expression, key="constraints", sep=" ", flat=True)
|
2025-02-13 15:01:55 +01:00
|
|
|
exists = "IF NOT EXISTS " if expression.args.get("exists") else ""
|
2025-02-13 15:53:39 +01:00
|
|
|
kind = f"{sep}{kind}" if kind else ""
|
2025-02-13 15:03:38 +01:00
|
|
|
constraints = f" {constraints}" if constraints else ""
|
2025-02-13 15:50:57 +01:00
|
|
|
position = self.sql(expression, "position")
|
|
|
|
position = f" {position}" if position else ""
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 15:50:57 +01:00
|
|
|
return f"{exists}{column}{kind}{constraints}{position}"
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def columnconstraint_sql(self, expression: exp.ColumnConstraint) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
this = self.sql(expression, "this")
|
2025-02-13 15:53:39 +01:00
|
|
|
kind_sql = self.sql(expression, "kind").strip()
|
2025-02-13 06:15:54 +01:00
|
|
|
return f"CONSTRAINT {this} {kind_sql}" if this else kind_sql
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def autoincrementcolumnconstraint_sql(self, _) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
return self.token_sql(TokenType.AUTO_INCREMENT)
|
|
|
|
|
2025-02-13 15:30:09 +01:00
|
|
|
def compresscolumnconstraint_sql(self, expression: exp.CompressColumnConstraint) -> str:
|
|
|
|
if isinstance(expression.this, list):
|
|
|
|
this = self.wrap(self.expressions(expression, key="this", flat=True))
|
|
|
|
else:
|
|
|
|
this = self.sql(expression, "this")
|
|
|
|
|
|
|
|
return f"COMPRESS {this}"
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def generatedasidentitycolumnconstraint_sql(
|
|
|
|
self, expression: exp.GeneratedAsIdentityColumnConstraint
|
|
|
|
) -> str:
|
2025-02-13 15:23:26 +01:00
|
|
|
this = ""
|
|
|
|
if expression.this is not None:
|
2025-02-13 15:57:23 +01:00
|
|
|
on_null = "ON NULL " if expression.args.get("on_null") else ""
|
|
|
|
this = " ALWAYS " if expression.this else f" BY DEFAULT {on_null}"
|
|
|
|
|
2025-02-13 15:05:06 +01:00
|
|
|
start = expression.args.get("start")
|
|
|
|
start = f"START WITH {start}" if start else ""
|
|
|
|
increment = expression.args.get("increment")
|
2025-02-13 15:26:26 +01:00
|
|
|
increment = f" INCREMENT BY {increment}" if increment else ""
|
|
|
|
minvalue = expression.args.get("minvalue")
|
|
|
|
minvalue = f" MINVALUE {minvalue}" if minvalue else ""
|
|
|
|
maxvalue = expression.args.get("maxvalue")
|
|
|
|
maxvalue = f" MAXVALUE {maxvalue}" if maxvalue else ""
|
|
|
|
cycle = expression.args.get("cycle")
|
|
|
|
cycle_sql = ""
|
2025-02-13 15:57:23 +01:00
|
|
|
|
2025-02-13 15:26:26 +01:00
|
|
|
if cycle is not None:
|
|
|
|
cycle_sql = f"{' NO' if not cycle else ''} CYCLE"
|
|
|
|
cycle_sql = cycle_sql.strip() if not start and not increment else cycle_sql
|
2025-02-13 15:57:23 +01:00
|
|
|
|
2025-02-13 15:05:06 +01:00
|
|
|
sequence_opts = ""
|
2025-02-13 15:26:26 +01:00
|
|
|
if start or increment or cycle_sql:
|
|
|
|
sequence_opts = f"{start}{increment}{minvalue}{maxvalue}{cycle_sql}"
|
2025-02-13 15:05:06 +01:00
|
|
|
sequence_opts = f" ({sequence_opts.strip()})"
|
2025-02-13 15:57:23 +01:00
|
|
|
|
|
|
|
expr = self.sql(expression, "expression")
|
|
|
|
expr = f"({expr})" if expr else "IDENTITY"
|
|
|
|
|
|
|
|
return f"GENERATED{this}AS {expr}{sequence_opts}"
|
2025-02-13 08:04:41 +01:00
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def notnullcolumnconstraint_sql(self, expression: exp.NotNullColumnConstraint) -> str:
|
|
|
|
return f"{'' if expression.args.get('allow_null') else 'NOT '}NULL"
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def primarykeycolumnconstraint_sql(self, expression: exp.PrimaryKeyColumnConstraint) -> str:
|
2025-02-13 14:56:25 +01:00
|
|
|
desc = expression.args.get("desc")
|
|
|
|
if desc is not None:
|
|
|
|
return f"PRIMARY KEY{' DESC' if desc else ' ASC'}"
|
|
|
|
return f"PRIMARY KEY"
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 15:57:23 +01:00
|
|
|
def uniquecolumnconstraint_sql(self, expression: exp.UniqueColumnConstraint) -> str:
|
|
|
|
this = self.sql(expression, "this")
|
|
|
|
this = f" {this}" if this else ""
|
|
|
|
return f"UNIQUE{this}"
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def create_sql(self, expression: exp.Create) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
kind = self.sql(expression, "kind").upper()
|
2025-02-13 15:09:58 +01:00
|
|
|
properties = expression.args.get("properties")
|
|
|
|
properties_exp = expression.copy()
|
|
|
|
properties_locs = self.locate_properties(properties) if properties else {}
|
2025-02-13 15:26:26 +01:00
|
|
|
if properties_locs.get(exp.Properties.Location.POST_SCHEMA) or properties_locs.get(
|
|
|
|
exp.Properties.Location.POST_WITH
|
2025-02-13 15:09:58 +01:00
|
|
|
):
|
|
|
|
properties_exp.set(
|
|
|
|
"properties",
|
|
|
|
exp.Properties(
|
|
|
|
expressions=[
|
2025-02-13 15:26:26 +01:00
|
|
|
*properties_locs[exp.Properties.Location.POST_SCHEMA],
|
|
|
|
*properties_locs[exp.Properties.Location.POST_WITH],
|
2025-02-13 15:09:58 +01:00
|
|
|
]
|
|
|
|
),
|
|
|
|
)
|
2025-02-13 15:26:26 +01:00
|
|
|
if kind == "TABLE" and properties_locs.get(exp.Properties.Location.POST_NAME):
|
2025-02-13 15:08:15 +01:00
|
|
|
this_name = self.sql(expression.this, "this")
|
2025-02-13 15:09:58 +01:00
|
|
|
this_properties = self.properties(
|
2025-02-13 15:26:26 +01:00
|
|
|
exp.Properties(expressions=properties_locs[exp.Properties.Location.POST_NAME]),
|
2025-02-13 15:09:58 +01:00
|
|
|
wrapped=False,
|
|
|
|
)
|
2025-02-13 15:08:15 +01:00
|
|
|
this_schema = f"({self.expressions(expression.this)})"
|
|
|
|
this = f"{this_name}, {this_properties} {this_schema}"
|
2025-02-13 15:09:58 +01:00
|
|
|
properties_sql = ""
|
2025-02-13 15:08:15 +01:00
|
|
|
else:
|
|
|
|
this = self.sql(expression, "this")
|
2025-02-13 15:09:58 +01:00
|
|
|
properties_sql = self.sql(properties_exp, "properties")
|
2025-02-13 15:07:05 +01:00
|
|
|
begin = " BEGIN" if expression.args.get("begin") else ""
|
2025-02-13 06:15:54 +01:00
|
|
|
expression_sql = self.sql(expression, "expression")
|
2025-02-13 15:23:26 +01:00
|
|
|
if expression_sql:
|
|
|
|
expression_sql = f"{begin}{self.sep()}{expression_sql}"
|
|
|
|
|
2025-02-13 15:26:26 +01:00
|
|
|
if self.CREATE_FUNCTION_RETURN_AS or not isinstance(expression.expression, exp.Return):
|
|
|
|
if properties_locs.get(exp.Properties.Location.POST_ALIAS):
|
|
|
|
postalias_props_sql = self.properties(
|
|
|
|
exp.Properties(
|
|
|
|
expressions=properties_locs[exp.Properties.Location.POST_ALIAS]
|
|
|
|
),
|
|
|
|
wrapped=False,
|
|
|
|
)
|
|
|
|
expression_sql = f" AS {postalias_props_sql}{expression_sql}"
|
|
|
|
else:
|
|
|
|
expression_sql = f" AS{expression_sql}"
|
2025-02-13 15:23:26 +01:00
|
|
|
|
2025-02-13 15:42:13 +01:00
|
|
|
postindex_props_sql = ""
|
|
|
|
if properties_locs.get(exp.Properties.Location.POST_INDEX):
|
|
|
|
postindex_props_sql = self.properties(
|
|
|
|
exp.Properties(expressions=properties_locs[exp.Properties.Location.POST_INDEX]),
|
|
|
|
wrapped=False,
|
|
|
|
prefix=" ",
|
|
|
|
)
|
2025-02-13 15:03:38 +01:00
|
|
|
|
2025-02-13 15:57:23 +01:00
|
|
|
indexes = self.expressions(expression, key="indexes", indent=False, sep=" ")
|
|
|
|
indexes = f" {indexes}" if indexes else ""
|
|
|
|
index_sql = indexes + postindex_props_sql
|
2025-02-13 15:42:13 +01:00
|
|
|
|
|
|
|
replace = " OR REPLACE" if expression.args.get("replace") else ""
|
|
|
|
unique = " UNIQUE" if expression.args.get("unique") else ""
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 15:09:58 +01:00
|
|
|
postcreate_props_sql = ""
|
|
|
|
if properties_locs.get(exp.Properties.Location.POST_CREATE):
|
|
|
|
postcreate_props_sql = self.properties(
|
|
|
|
exp.Properties(expressions=properties_locs[exp.Properties.Location.POST_CREATE]),
|
|
|
|
sep=" ",
|
|
|
|
prefix=" ",
|
|
|
|
wrapped=False,
|
|
|
|
)
|
|
|
|
|
2025-02-13 15:52:09 +01:00
|
|
|
modifiers = "".join((replace, unique, postcreate_props_sql))
|
2025-02-13 15:40:23 +01:00
|
|
|
|
|
|
|
postexpression_props_sql = ""
|
|
|
|
if properties_locs.get(exp.Properties.Location.POST_EXPRESSION):
|
|
|
|
postexpression_props_sql = self.properties(
|
|
|
|
exp.Properties(
|
|
|
|
expressions=properties_locs[exp.Properties.Location.POST_EXPRESSION]
|
|
|
|
),
|
|
|
|
sep=" ",
|
|
|
|
prefix=" ",
|
|
|
|
wrapped=False,
|
2025-02-13 14:58:37 +01:00
|
|
|
)
|
2025-02-13 15:40:23 +01:00
|
|
|
|
2025-02-13 15:42:13 +01:00
|
|
|
exists_sql = " IF NOT EXISTS" if expression.args.get("exists") else ""
|
2025-02-13 15:05:06 +01:00
|
|
|
no_schema_binding = (
|
|
|
|
" WITH NO SCHEMA BINDING" if expression.args.get("no_schema_binding") else ""
|
|
|
|
)
|
2025-02-13 15:03:38 +01:00
|
|
|
|
2025-02-13 15:57:23 +01:00
|
|
|
clone = self.sql(expression, "clone")
|
|
|
|
clone = f" {clone}" if clone else ""
|
|
|
|
|
|
|
|
expression_sql = f"CREATE{modifiers} {kind}{exists_sql} {this}{properties_sql}{expression_sql}{postexpression_props_sql}{index_sql}{no_schema_binding}{clone}"
|
2025-02-13 06:15:54 +01:00
|
|
|
return self.prepend_ctes(expression, expression_sql)
|
|
|
|
|
2025-02-13 15:57:23 +01:00
|
|
|
def clone_sql(self, expression: exp.Clone) -> str:
|
|
|
|
this = self.sql(expression, "this")
|
|
|
|
when = self.sql(expression, "when")
|
|
|
|
|
|
|
|
if when:
|
|
|
|
kind = self.sql(expression, "kind")
|
|
|
|
expr = self.sql(expression, "expression")
|
|
|
|
return f"CLONE {this} {when} ({kind} => {expr})"
|
|
|
|
|
|
|
|
return f"CLONE {this}"
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def describe_sql(self, expression: exp.Describe) -> str:
|
2025-02-13 14:46:58 +01:00
|
|
|
return f"DESCRIBE {self.sql(expression, 'this')}"
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def prepend_ctes(self, expression: exp.Expression, sql: str) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
with_ = self.sql(expression, "with")
|
|
|
|
if with_:
|
|
|
|
sql = f"{with_}{self.sep()}{sql}"
|
|
|
|
return sql
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def with_sql(self, expression: exp.With) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
sql = self.expressions(expression, flat=True)
|
|
|
|
recursive = "RECURSIVE " if expression.args.get("recursive") else ""
|
|
|
|
|
|
|
|
return f"WITH {recursive}{sql}"
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def cte_sql(self, expression: exp.CTE) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
alias = self.sql(expression, "alias")
|
|
|
|
return f"{alias} AS {self.wrap(expression)}"
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def tablealias_sql(self, expression: exp.TableAlias) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
alias = self.sql(expression, "this")
|
|
|
|
columns = self.expressions(expression, key="columns", flat=True)
|
|
|
|
columns = f"({columns})" if columns else ""
|
|
|
|
return f"{alias}{columns}"
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def bitstring_sql(self, expression: exp.BitString) -> str:
|
2025-02-13 15:53:39 +01:00
|
|
|
this = self.sql(expression, "this")
|
|
|
|
if self.bit_start:
|
|
|
|
return f"{self.bit_start}{this}{self.bit_end}"
|
|
|
|
return f"{int(this, 2)}"
|
2025-02-13 08:04:41 +01:00
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def hexstring_sql(self, expression: exp.HexString) -> str:
|
2025-02-13 15:53:39 +01:00
|
|
|
this = self.sql(expression, "this")
|
|
|
|
if self.hex_start:
|
|
|
|
return f"{self.hex_start}{this}{self.hex_end}"
|
|
|
|
return f"{int(this, 16)}"
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 15:52:09 +01:00
|
|
|
def bytestring_sql(self, expression: exp.ByteString) -> str:
|
2025-02-13 15:53:39 +01:00
|
|
|
this = self.sql(expression, "this")
|
|
|
|
if self.byte_start:
|
|
|
|
return f"{self.byte_start}{this}{self.byte_end}"
|
|
|
|
return this
|
2025-02-13 15:52:09 +01:00
|
|
|
|
2025-02-13 15:57:23 +01:00
|
|
|
def rawstring_sql(self, expression: exp.RawString) -> str:
|
|
|
|
if self.raw_start:
|
|
|
|
return f"{self.raw_start}{expression.name}{self.raw_end}"
|
|
|
|
return self.sql(exp.Literal.string(expression.name.replace("\\", "\\\\")))
|
|
|
|
|
|
|
|
def datatypesize_sql(self, expression: exp.DataTypeSize) -> str:
|
|
|
|
this = self.sql(expression, "this")
|
|
|
|
specifier = self.sql(expression, "expression")
|
|
|
|
specifier = f" {specifier}" if specifier else ""
|
|
|
|
return f"{this}{specifier}"
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def datatype_sql(self, expression: exp.DataType) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
type_value = expression.this
|
|
|
|
type_sql = self.TYPE_MAPPING.get(type_value, type_value.value)
|
|
|
|
nested = ""
|
|
|
|
interior = self.expressions(expression, flat=True)
|
2025-02-13 15:05:06 +01:00
|
|
|
values = ""
|
2025-02-13 06:15:54 +01:00
|
|
|
if interior:
|
2025-02-13 15:05:06 +01:00
|
|
|
if expression.args.get("nested"):
|
|
|
|
nested = f"{self.STRUCT_DELIMITER[0]}{interior}{self.STRUCT_DELIMITER[1]}"
|
|
|
|
if expression.args.get("values") is not None:
|
|
|
|
delimiters = ("[", "]") if type_value == exp.DataType.Type.ARRAY else ("(", ")")
|
2025-02-13 15:57:23 +01:00
|
|
|
values = self.expressions(expression, key="values", flat=True)
|
|
|
|
values = f"{delimiters[0]}{values}{delimiters[1]}"
|
2025-02-13 15:05:06 +01:00
|
|
|
else:
|
|
|
|
nested = f"({interior})"
|
|
|
|
|
|
|
|
return f"{type_sql}{nested}{values}"
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def directory_sql(self, expression: exp.Directory) -> str:
|
2025-02-13 14:46:58 +01:00
|
|
|
local = "LOCAL " if expression.args.get("local") else ""
|
|
|
|
row_format = self.sql(expression, "row_format")
|
|
|
|
row_format = f" {row_format}" if row_format else ""
|
|
|
|
return f"{local}DIRECTORY {self.sql(expression, 'this')}{row_format}"
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def delete_sql(self, expression: exp.Delete) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
this = self.sql(expression, "this")
|
2025-02-13 15:08:15 +01:00
|
|
|
this = f" FROM {this}" if this else ""
|
2025-02-13 14:48:46 +01:00
|
|
|
using_sql = (
|
2025-02-13 15:52:09 +01:00
|
|
|
f" USING {self.expressions(expression, key='using', sep=', USING ')}"
|
2025-02-13 14:53:05 +01:00
|
|
|
if expression.args.get("using")
|
|
|
|
else ""
|
2025-02-13 14:48:46 +01:00
|
|
|
)
|
2025-02-13 06:15:54 +01:00
|
|
|
where_sql = self.sql(expression, "where")
|
2025-02-13 15:43:32 +01:00
|
|
|
returning = self.sql(expression, "returning")
|
|
|
|
sql = f"DELETE{this}{using_sql}{where_sql}{returning}"
|
2025-02-13 06:15:54 +01:00
|
|
|
return self.prepend_ctes(expression, sql)
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def drop_sql(self, expression: exp.Drop) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
this = self.sql(expression, "this")
|
|
|
|
kind = expression.args["kind"]
|
|
|
|
exists_sql = " IF EXISTS " if expression.args.get("exists") else " "
|
2025-02-13 14:43:32 +01:00
|
|
|
temporary = " TEMPORARY" if expression.args.get("temporary") else ""
|
|
|
|
materialized = " MATERIALIZED" if expression.args.get("materialized") else ""
|
2025-02-13 14:54:32 +01:00
|
|
|
cascade = " CASCADE" if expression.args.get("cascade") else ""
|
2025-02-13 15:48:10 +01:00
|
|
|
constraints = " CONSTRAINTS" if expression.args.get("constraints") else ""
|
2025-02-13 15:52:09 +01:00
|
|
|
purge = " PURGE" if expression.args.get("purge") else ""
|
|
|
|
return (
|
|
|
|
f"DROP{temporary}{materialized} {kind}{exists_sql}{this}{cascade}{constraints}{purge}"
|
|
|
|
)
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def except_sql(self, expression: exp.Except) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
return self.prepend_ctes(
|
|
|
|
expression,
|
|
|
|
self.set_operation(expression, self.except_op(expression)),
|
|
|
|
)
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def except_op(self, expression: exp.Except) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
return f"EXCEPT{'' if expression.args.get('distinct') else ' ALL'}"
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def fetch_sql(self, expression: exp.Fetch) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
direction = expression.args.get("direction")
|
|
|
|
direction = f" {direction.upper()}" if direction else ""
|
|
|
|
count = expression.args.get("count")
|
|
|
|
count = f" {count}" if count else ""
|
2025-02-13 15:52:09 +01:00
|
|
|
if expression.args.get("percent"):
|
|
|
|
count = f"{count} PERCENT"
|
|
|
|
with_ties_or_only = "WITH TIES" if expression.args.get("with_ties") else "ONLY"
|
|
|
|
return f"{self.seg('FETCH')}{direction}{count} ROWS {with_ties_or_only}"
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def filter_sql(self, expression: exp.Filter) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
this = self.sql(expression, "this")
|
|
|
|
where = self.sql(expression, "expression")[1:] # where has a leading space
|
|
|
|
return f"{this} FILTER({where})"
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def hint_sql(self, expression: exp.Hint) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
if self.sql(expression, "this"):
|
|
|
|
self.unsupported("Hints are not supported")
|
|
|
|
return ""
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def index_sql(self, expression: exp.Index) -> str:
|
2025-02-13 15:57:23 +01:00
|
|
|
unique = "UNIQUE " if expression.args.get("unique") else ""
|
|
|
|
primary = "PRIMARY " if expression.args.get("primary") else ""
|
|
|
|
amp = "AMP " if expression.args.get("amp") else ""
|
|
|
|
name = f"{expression.name} " if expression.name else ""
|
2025-02-13 06:15:54 +01:00
|
|
|
table = self.sql(expression, "table")
|
2025-02-13 15:57:23 +01:00
|
|
|
table = f"{self.INDEX_ON} {table} " if table else ""
|
|
|
|
index = "INDEX " if not table else ""
|
|
|
|
columns = self.expressions(expression, key="columns", flat=True)
|
|
|
|
partition_by = self.expressions(expression, key="partition_by", flat=True)
|
|
|
|
partition_by = f" PARTITION BY {partition_by}" if partition_by else ""
|
|
|
|
return f"{unique}{primary}{amp}{index}{name}{table}({columns}){partition_by}"
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def identifier_sql(self, expression: exp.Identifier) -> str:
|
2025-02-13 14:53:05 +01:00
|
|
|
text = expression.name
|
2025-02-13 15:52:09 +01:00
|
|
|
lower = text.lower()
|
|
|
|
text = lower if self.normalize and not expression.quoted else text
|
2025-02-13 15:23:26 +01:00
|
|
|
text = text.replace(self.identifier_end, self._escaped_identifier_end)
|
2025-02-13 15:52:09 +01:00
|
|
|
if (
|
|
|
|
expression.quoted
|
|
|
|
or should_identify(text, self.identify)
|
|
|
|
or lower in self.RESERVED_KEYWORDS
|
|
|
|
):
|
2025-02-13 14:53:05 +01:00
|
|
|
text = f"{self.identifier_start}{text}{self.identifier_end}"
|
|
|
|
return text
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 15:52:09 +01:00
|
|
|
def inputoutputformat_sql(self, expression: exp.InputOutputFormat) -> str:
|
|
|
|
input_format = self.sql(expression, "input_format")
|
|
|
|
input_format = f"INPUTFORMAT {input_format}" if input_format else ""
|
|
|
|
output_format = self.sql(expression, "output_format")
|
|
|
|
output_format = f"OUTPUTFORMAT {output_format}" if output_format else ""
|
|
|
|
return self.sep().join((input_format, output_format))
|
|
|
|
|
2025-02-13 15:57:23 +01:00
|
|
|
def national_sql(self, expression: exp.National, prefix: str = "N") -> str:
|
|
|
|
string = self.sql(exp.Literal.string(expression.name))
|
|
|
|
return f"{prefix}{string}"
|
2025-02-13 15:01:55 +01:00
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def partition_sql(self, expression: exp.Partition) -> str:
|
2025-02-13 15:08:15 +01:00
|
|
|
return f"PARTITION({self.expressions(expression)})"
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def properties_sql(self, expression: exp.Properties) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
root_properties = []
|
|
|
|
with_properties = []
|
|
|
|
|
|
|
|
for p in expression.expressions:
|
2025-02-13 15:09:58 +01:00
|
|
|
p_loc = self.PROPERTIES_LOCATION[p.__class__]
|
2025-02-13 15:26:26 +01:00
|
|
|
if p_loc == exp.Properties.Location.POST_WITH:
|
2025-02-13 06:15:54 +01:00
|
|
|
with_properties.append(p)
|
2025-02-13 15:26:26 +01:00
|
|
|
elif p_loc == exp.Properties.Location.POST_SCHEMA:
|
2025-02-13 14:40:43 +01:00
|
|
|
root_properties.append(p)
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 15:09:58 +01:00
|
|
|
return self.root_properties(
|
|
|
|
exp.Properties(expressions=root_properties)
|
|
|
|
) + self.with_properties(exp.Properties(expressions=with_properties))
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def root_properties(self, properties: exp.Properties) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
if properties.expressions:
|
2025-02-13 14:56:25 +01:00
|
|
|
return self.sep() + self.expressions(properties, indent=False, sep=" ")
|
2025-02-13 06:15:54 +01:00
|
|
|
return ""
|
|
|
|
|
2025-02-13 15:05:06 +01:00
|
|
|
def properties(
|
2025-02-13 15:08:15 +01:00
|
|
|
self,
|
|
|
|
properties: exp.Properties,
|
|
|
|
prefix: str = "",
|
|
|
|
sep: str = ", ",
|
|
|
|
suffix: str = "",
|
2025-02-13 15:09:58 +01:00
|
|
|
wrapped: bool = True,
|
2025-02-13 15:05:06 +01:00
|
|
|
) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
if properties.expressions:
|
2025-02-13 14:56:25 +01:00
|
|
|
expressions = self.expressions(properties, sep=sep, indent=False)
|
2025-02-13 15:09:58 +01:00
|
|
|
expressions = self.wrap(expressions) if wrapped else expressions
|
2025-02-13 15:08:15 +01:00
|
|
|
return f"{prefix}{' ' if prefix and prefix != ' ' else ''}{expressions}{suffix}"
|
2025-02-13 06:15:54 +01:00
|
|
|
return ""
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def with_properties(self, properties: exp.Properties) -> str:
|
|
|
|
return self.properties(properties, prefix=self.seg("WITH"))
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 15:09:58 +01:00
|
|
|
def locate_properties(
|
|
|
|
self, properties: exp.Properties
|
|
|
|
) -> t.Dict[exp.Properties.Location, list[exp.Property]]:
|
|
|
|
properties_locs: t.Dict[exp.Properties.Location, list[exp.Property]] = {
|
|
|
|
key: [] for key in exp.Properties.Location
|
|
|
|
}
|
|
|
|
|
|
|
|
for p in properties.expressions:
|
|
|
|
p_loc = self.PROPERTIES_LOCATION[p.__class__]
|
2025-02-13 15:26:26 +01:00
|
|
|
if p_loc == exp.Properties.Location.POST_NAME:
|
|
|
|
properties_locs[exp.Properties.Location.POST_NAME].append(p)
|
2025-02-13 15:09:58 +01:00
|
|
|
elif p_loc == exp.Properties.Location.POST_INDEX:
|
|
|
|
properties_locs[exp.Properties.Location.POST_INDEX].append(p)
|
2025-02-13 15:26:26 +01:00
|
|
|
elif p_loc == exp.Properties.Location.POST_SCHEMA:
|
|
|
|
properties_locs[exp.Properties.Location.POST_SCHEMA].append(p)
|
|
|
|
elif p_loc == exp.Properties.Location.POST_WITH:
|
|
|
|
properties_locs[exp.Properties.Location.POST_WITH].append(p)
|
2025-02-13 15:09:58 +01:00
|
|
|
elif p_loc == exp.Properties.Location.POST_CREATE:
|
|
|
|
properties_locs[exp.Properties.Location.POST_CREATE].append(p)
|
2025-02-13 15:26:26 +01:00
|
|
|
elif p_loc == exp.Properties.Location.POST_ALIAS:
|
|
|
|
properties_locs[exp.Properties.Location.POST_ALIAS].append(p)
|
2025-02-13 15:40:23 +01:00
|
|
|
elif p_loc == exp.Properties.Location.POST_EXPRESSION:
|
|
|
|
properties_locs[exp.Properties.Location.POST_EXPRESSION].append(p)
|
2025-02-13 15:09:58 +01:00
|
|
|
elif p_loc == exp.Properties.Location.UNSUPPORTED:
|
|
|
|
self.unsupported(f"Unsupported property {p.key}")
|
|
|
|
|
|
|
|
return properties_locs
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def property_sql(self, expression: exp.Property) -> str:
|
2025-02-13 14:56:25 +01:00
|
|
|
property_cls = expression.__class__
|
|
|
|
if property_cls == exp.Property:
|
|
|
|
return f"{expression.name}={self.sql(expression, 'value')}"
|
|
|
|
|
|
|
|
property_name = exp.Properties.PROPERTY_TO_NAME.get(property_cls)
|
|
|
|
if not property_name:
|
2025-02-13 15:09:58 +01:00
|
|
|
self.unsupported(f"Unsupported property {expression.key}")
|
2025-02-13 14:56:25 +01:00
|
|
|
|
|
|
|
return f"{property_name}={self.sql(expression, 'this')}"
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def likeproperty_sql(self, expression: exp.LikeProperty) -> str:
|
2025-02-13 14:56:25 +01:00
|
|
|
options = " ".join(f"{e.name} {self.sql(e, 'value')}" for e in expression.expressions)
|
|
|
|
options = f" {options}" if options else ""
|
|
|
|
return f"LIKE {self.sql(expression, 'this')}{options}"
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 15:08:15 +01:00
|
|
|
def fallbackproperty_sql(self, expression: exp.FallbackProperty) -> str:
|
|
|
|
no = "NO " if expression.args.get("no") else ""
|
|
|
|
protection = " PROTECTION" if expression.args.get("protection") else ""
|
|
|
|
return f"{no}FALLBACK{protection}"
|
|
|
|
|
|
|
|
def journalproperty_sql(self, expression: exp.JournalProperty) -> str:
|
|
|
|
no = "NO " if expression.args.get("no") else ""
|
2025-02-13 15:57:23 +01:00
|
|
|
local = expression.args.get("local")
|
|
|
|
local = f"{local} " if local else ""
|
2025-02-13 15:08:15 +01:00
|
|
|
dual = "DUAL " if expression.args.get("dual") else ""
|
|
|
|
before = "BEFORE " if expression.args.get("before") else ""
|
2025-02-13 15:57:23 +01:00
|
|
|
after = "AFTER " if expression.args.get("after") else ""
|
|
|
|
return f"{no}{local}{dual}{before}{after}JOURNAL"
|
2025-02-13 15:08:15 +01:00
|
|
|
|
|
|
|
def freespaceproperty_sql(self, expression: exp.FreespaceProperty) -> str:
|
|
|
|
freespace = self.sql(expression, "this")
|
|
|
|
percent = " PERCENT" if expression.args.get("percent") else ""
|
|
|
|
return f"FREESPACE={freespace}{percent}"
|
|
|
|
|
|
|
|
def checksumproperty_sql(self, expression: exp.ChecksumProperty) -> str:
|
|
|
|
if expression.args.get("default"):
|
|
|
|
property = "DEFAULT"
|
|
|
|
elif expression.args.get("on"):
|
|
|
|
property = "ON"
|
|
|
|
else:
|
|
|
|
property = "OFF"
|
|
|
|
return f"CHECKSUM={property}"
|
|
|
|
|
|
|
|
def mergeblockratioproperty_sql(self, expression: exp.MergeBlockRatioProperty) -> str:
|
|
|
|
if expression.args.get("no"):
|
|
|
|
return "NO MERGEBLOCKRATIO"
|
|
|
|
if expression.args.get("default"):
|
|
|
|
return "DEFAULT MERGEBLOCKRATIO"
|
|
|
|
|
|
|
|
percent = " PERCENT" if expression.args.get("percent") else ""
|
|
|
|
return f"MERGEBLOCKRATIO={self.sql(expression, 'this')}{percent}"
|
|
|
|
|
|
|
|
def datablocksizeproperty_sql(self, expression: exp.DataBlocksizeProperty) -> str:
|
|
|
|
default = expression.args.get("default")
|
2025-02-13 15:57:23 +01:00
|
|
|
minimum = expression.args.get("minimum")
|
|
|
|
maximum = expression.args.get("maximum")
|
|
|
|
if default or minimum or maximum:
|
2025-02-13 15:08:15 +01:00
|
|
|
if default:
|
2025-02-13 15:57:23 +01:00
|
|
|
prop = "DEFAULT"
|
|
|
|
elif minimum:
|
|
|
|
prop = "MINIMUM"
|
2025-02-13 15:08:15 +01:00
|
|
|
else:
|
2025-02-13 15:57:23 +01:00
|
|
|
prop = "MAXIMUM"
|
|
|
|
return f"{prop} DATABLOCKSIZE"
|
|
|
|
units = expression.args.get("units")
|
|
|
|
units = f" {units}" if units else ""
|
|
|
|
return f"DATABLOCKSIZE={self.sql(expression, 'size')}{units}"
|
2025-02-13 15:08:15 +01:00
|
|
|
|
|
|
|
def blockcompressionproperty_sql(self, expression: exp.BlockCompressionProperty) -> str:
|
|
|
|
autotemp = expression.args.get("autotemp")
|
|
|
|
always = expression.args.get("always")
|
|
|
|
default = expression.args.get("default")
|
|
|
|
manual = expression.args.get("manual")
|
|
|
|
never = expression.args.get("never")
|
|
|
|
|
|
|
|
if autotemp is not None:
|
2025-02-13 15:57:23 +01:00
|
|
|
prop = f"AUTOTEMP({self.expressions(autotemp)})"
|
2025-02-13 15:08:15 +01:00
|
|
|
elif always:
|
2025-02-13 15:57:23 +01:00
|
|
|
prop = "ALWAYS"
|
2025-02-13 15:08:15 +01:00
|
|
|
elif default:
|
2025-02-13 15:57:23 +01:00
|
|
|
prop = "DEFAULT"
|
2025-02-13 15:08:15 +01:00
|
|
|
elif manual:
|
2025-02-13 15:57:23 +01:00
|
|
|
prop = "MANUAL"
|
2025-02-13 15:08:15 +01:00
|
|
|
elif never:
|
2025-02-13 15:57:23 +01:00
|
|
|
prop = "NEVER"
|
|
|
|
return f"BLOCKCOMPRESSION={prop}"
|
2025-02-13 15:08:15 +01:00
|
|
|
|
|
|
|
def isolatedloadingproperty_sql(self, expression: exp.IsolatedLoadingProperty) -> str:
|
|
|
|
no = expression.args.get("no")
|
|
|
|
no = " NO" if no else ""
|
|
|
|
concurrent = expression.args.get("concurrent")
|
|
|
|
concurrent = " CONCURRENT" if concurrent else ""
|
|
|
|
|
|
|
|
for_ = ""
|
|
|
|
if expression.args.get("for_all"):
|
|
|
|
for_ = " FOR ALL"
|
|
|
|
elif expression.args.get("for_insert"):
|
|
|
|
for_ = " FOR INSERT"
|
|
|
|
elif expression.args.get("for_none"):
|
|
|
|
for_ = " FOR NONE"
|
|
|
|
return f"WITH{no}{concurrent} ISOLATED LOADING{for_}"
|
|
|
|
|
2025-02-13 15:26:26 +01:00
|
|
|
def lockingproperty_sql(self, expression: exp.LockingProperty) -> str:
|
|
|
|
kind = expression.args.get("kind")
|
|
|
|
this: str = f" {this}" if expression.this else ""
|
|
|
|
for_or_in = expression.args.get("for_or_in")
|
|
|
|
lock_type = expression.args.get("lock_type")
|
|
|
|
override = " OVERRIDE" if expression.args.get("override") else ""
|
|
|
|
return f"LOCKING {kind}{this} {for_or_in} {lock_type}{override}"
|
|
|
|
|
2025-02-13 15:40:23 +01:00
|
|
|
def withdataproperty_sql(self, expression: exp.WithDataProperty) -> str:
|
|
|
|
data_sql = f"WITH {'NO ' if expression.args.get('no') else ''}DATA"
|
|
|
|
statistics = expression.args.get("statistics")
|
|
|
|
statistics_sql = ""
|
|
|
|
if statistics is not None:
|
|
|
|
statistics_sql = f" AND {'NO ' if not statistics else ''}STATISTICS"
|
|
|
|
return f"{data_sql}{statistics_sql}"
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def insert_sql(self, expression: exp.Insert) -> str:
|
2025-02-13 14:46:58 +01:00
|
|
|
overwrite = expression.args.get("overwrite")
|
|
|
|
|
|
|
|
if isinstance(expression.this, exp.Directory):
|
|
|
|
this = "OVERWRITE " if overwrite else "INTO "
|
|
|
|
else:
|
|
|
|
this = "OVERWRITE TABLE " if overwrite else "INTO "
|
|
|
|
|
2025-02-13 15:26:26 +01:00
|
|
|
alternative = expression.args.get("alternative")
|
|
|
|
alternative = f" OR {alternative} " if alternative else " "
|
2025-02-13 14:46:58 +01:00
|
|
|
this = f"{this}{self.sql(expression, 'this')}"
|
2025-02-13 15:26:26 +01:00
|
|
|
|
2025-02-13 06:15:54 +01:00
|
|
|
exists = " IF EXISTS " if expression.args.get("exists") else " "
|
2025-02-13 14:53:05 +01:00
|
|
|
partition_sql = (
|
|
|
|
self.sql(expression, "partition") if expression.args.get("partition") else ""
|
|
|
|
)
|
2025-02-13 06:15:54 +01:00
|
|
|
expression_sql = self.sql(expression, "expression")
|
2025-02-13 15:52:09 +01:00
|
|
|
conflict = self.sql(expression, "conflict")
|
2025-02-13 15:43:32 +01:00
|
|
|
returning = self.sql(expression, "returning")
|
2025-02-13 06:15:54 +01:00
|
|
|
sep = self.sep() if partition_sql else ""
|
2025-02-13 15:52:09 +01:00
|
|
|
sql = f"INSERT{alternative}{this}{exists}{partition_sql}{sep}{expression_sql}{conflict}{returning}"
|
2025-02-13 06:15:54 +01:00
|
|
|
return self.prepend_ctes(expression, sql)
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def intersect_sql(self, expression: exp.Intersect) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
return self.prepend_ctes(
|
|
|
|
expression,
|
|
|
|
self.set_operation(expression, self.intersect_op(expression)),
|
|
|
|
)
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def intersect_op(self, expression: exp.Intersect) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
return f"INTERSECT{'' if expression.args.get('distinct') else ' ALL'}"
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def introducer_sql(self, expression: exp.Introducer) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
return f"{self.sql(expression, 'this')} {self.sql(expression, 'expression')}"
|
|
|
|
|
2025-02-13 15:01:55 +01:00
|
|
|
def pseudotype_sql(self, expression: exp.PseudoType) -> str:
|
|
|
|
return expression.name.upper()
|
|
|
|
|
2025-02-13 15:52:09 +01:00
|
|
|
def onconflict_sql(self, expression: exp.OnConflict) -> str:
|
|
|
|
conflict = "ON DUPLICATE KEY" if expression.args.get("duplicate") else "ON CONFLICT"
|
|
|
|
constraint = self.sql(expression, "constraint")
|
|
|
|
if constraint:
|
|
|
|
constraint = f"ON CONSTRAINT {constraint}"
|
|
|
|
key = self.expressions(expression, key="key", flat=True)
|
|
|
|
do = "" if expression.args.get("duplicate") else " DO "
|
|
|
|
nothing = "NOTHING" if expression.args.get("nothing") else ""
|
|
|
|
expressions = self.expressions(expression, flat=True)
|
|
|
|
if expressions:
|
|
|
|
expressions = f"UPDATE SET {expressions}"
|
|
|
|
return f"{self.seg(conflict)} {constraint}{key}{do}{nothing}{expressions}"
|
|
|
|
|
2025-02-13 15:43:32 +01:00
|
|
|
def returning_sql(self, expression: exp.Returning) -> str:
|
|
|
|
return f"{self.seg('RETURNING')} {self.expressions(expression, flat=True)}"
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def rowformatdelimitedproperty_sql(self, expression: exp.RowFormatDelimitedProperty) -> str:
|
2025-02-13 14:46:58 +01:00
|
|
|
fields = expression.args.get("fields")
|
|
|
|
fields = f" FIELDS TERMINATED BY {fields}" if fields else ""
|
|
|
|
escaped = expression.args.get("escaped")
|
|
|
|
escaped = f" ESCAPED BY {escaped}" if escaped else ""
|
|
|
|
items = expression.args.get("collection_items")
|
|
|
|
items = f" COLLECTION ITEMS TERMINATED BY {items}" if items else ""
|
|
|
|
keys = expression.args.get("map_keys")
|
|
|
|
keys = f" MAP KEYS TERMINATED BY {keys}" if keys else ""
|
|
|
|
lines = expression.args.get("lines")
|
|
|
|
lines = f" LINES TERMINATED BY {lines}" if lines else ""
|
|
|
|
null = expression.args.get("null")
|
|
|
|
null = f" NULL DEFINED AS {null}" if null else ""
|
|
|
|
return f"ROW FORMAT DELIMITED{fields}{escaped}{items}{keys}{lines}{null}"
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def table_sql(self, expression: exp.Table, sep: str = " AS ") -> str:
|
2025-02-13 08:04:41 +01:00
|
|
|
table = ".".join(
|
2025-02-13 06:15:54 +01:00
|
|
|
part
|
|
|
|
for part in [
|
|
|
|
self.sql(expression, "catalog"),
|
|
|
|
self.sql(expression, "db"),
|
|
|
|
self.sql(expression, "this"),
|
|
|
|
]
|
|
|
|
if part
|
|
|
|
)
|
|
|
|
|
2025-02-13 14:48:46 +01:00
|
|
|
alias = self.sql(expression, "alias")
|
|
|
|
alias = f"{sep}{alias}" if alias else ""
|
2025-02-13 15:57:23 +01:00
|
|
|
hints = self.expressions(expression, key="hints", flat=True)
|
2025-02-13 15:52:09 +01:00
|
|
|
hints = f" WITH ({hints})" if hints and self.TABLE_HINTS else ""
|
2025-02-13 15:57:23 +01:00
|
|
|
pivots = self.expressions(expression, key="pivots", sep=" ", flat=True)
|
|
|
|
pivots = f" {pivots}" if pivots else ""
|
2025-02-13 08:04:41 +01:00
|
|
|
joins = self.expressions(expression, key="joins", sep="")
|
2025-02-13 15:57:23 +01:00
|
|
|
laterals = self.expressions(expression, key="laterals", sep="")
|
2025-02-13 15:07:05 +01:00
|
|
|
system_time = expression.args.get("system_time")
|
|
|
|
system_time = f" {self.sql(expression, 'system_time')}" if system_time else ""
|
2025-02-13 14:48:46 +01:00
|
|
|
|
2025-02-13 15:57:23 +01:00
|
|
|
return f"{table}{system_time}{alias}{hints}{pivots}{joins}{laterals}"
|
2025-02-13 08:04:41 +01:00
|
|
|
|
2025-02-13 15:53:39 +01:00
|
|
|
def tablesample_sql(
|
|
|
|
self, expression: exp.TableSample, seed_prefix: str = "SEED", sep=" AS "
|
|
|
|
) -> str:
|
2025-02-13 14:48:46 +01:00
|
|
|
if self.alias_post_tablesample and expression.this.alias:
|
2025-02-13 15:57:23 +01:00
|
|
|
table = expression.this.copy()
|
|
|
|
table.set("alias", None)
|
|
|
|
this = self.sql(table)
|
2025-02-13 15:53:39 +01:00
|
|
|
alias = f"{sep}{self.sql(expression.this, 'alias')}"
|
2025-02-13 06:15:54 +01:00
|
|
|
else:
|
|
|
|
this = self.sql(expression, "this")
|
|
|
|
alias = ""
|
|
|
|
method = self.sql(expression, "method")
|
2025-02-13 15:52:09 +01:00
|
|
|
method = f"{method.upper()} " if method and self.TABLESAMPLE_WITH_METHOD else ""
|
2025-02-13 06:15:54 +01:00
|
|
|
numerator = self.sql(expression, "bucket_numerator")
|
|
|
|
denominator = self.sql(expression, "bucket_denominator")
|
|
|
|
field = self.sql(expression, "bucket_field")
|
|
|
|
field = f" ON {field}" if field else ""
|
|
|
|
bucket = f"BUCKET {numerator} OUT OF {denominator}{field}" if numerator else ""
|
|
|
|
percent = self.sql(expression, "percent")
|
|
|
|
percent = f"{percent} PERCENT" if percent else ""
|
|
|
|
rows = self.sql(expression, "rows")
|
|
|
|
rows = f"{rows} ROWS" if rows else ""
|
|
|
|
size = self.sql(expression, "size")
|
2025-02-13 15:52:09 +01:00
|
|
|
if size and self.TABLESAMPLE_SIZE_IS_PERCENT:
|
|
|
|
size = f"{size} PERCENT"
|
2025-02-13 14:40:43 +01:00
|
|
|
seed = self.sql(expression, "seed")
|
2025-02-13 15:43:32 +01:00
|
|
|
seed = f" {seed_prefix} ({seed})" if seed else ""
|
|
|
|
kind = expression.args.get("kind", "TABLESAMPLE")
|
|
|
|
return f"{this} {kind} {method}({bucket}{percent}{rows}{size}){seed}{alias}"
|
2025-02-13 14:40:43 +01:00
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def pivot_sql(self, expression: exp.Pivot) -> str:
|
2025-02-13 15:57:23 +01:00
|
|
|
expressions = self.expressions(expression, flat=True)
|
|
|
|
|
|
|
|
if expression.this:
|
|
|
|
this = self.sql(expression, "this")
|
|
|
|
on = f"{self.seg('ON')} {expressions}"
|
|
|
|
using = self.expressions(expression, key="using", flat=True)
|
|
|
|
using = f"{self.seg('USING')} {using}" if using else ""
|
|
|
|
group = self.sql(expression, "group")
|
|
|
|
return f"PIVOT {this}{on}{using}{group}"
|
|
|
|
|
2025-02-13 15:40:23 +01:00
|
|
|
alias = self.sql(expression, "alias")
|
|
|
|
alias = f" AS {alias}" if alias else ""
|
2025-02-13 14:40:43 +01:00
|
|
|
unpivot = expression.args.get("unpivot")
|
|
|
|
direction = "UNPIVOT" if unpivot else "PIVOT"
|
|
|
|
field = self.sql(expression, "field")
|
2025-02-13 15:57:23 +01:00
|
|
|
return f"{direction}({expressions} FOR {field}){alias}"
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def tuple_sql(self, expression: exp.Tuple) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
return f"({self.expressions(expression, flat=True)})"
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def update_sql(self, expression: exp.Update) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
this = self.sql(expression, "this")
|
|
|
|
set_sql = self.expressions(expression, flat=True)
|
|
|
|
from_sql = self.sql(expression, "from")
|
|
|
|
where_sql = self.sql(expression, "where")
|
2025-02-13 15:43:32 +01:00
|
|
|
returning = self.sql(expression, "returning")
|
|
|
|
sql = f"UPDATE {this} SET {set_sql}{from_sql}{where_sql}{returning}"
|
2025-02-13 06:15:54 +01:00
|
|
|
return self.prepend_ctes(expression, sql)
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def values_sql(self, expression: exp.Values) -> str:
|
2025-02-13 14:43:32 +01:00
|
|
|
args = self.expressions(expression)
|
2025-02-13 15:05:06 +01:00
|
|
|
alias = self.sql(expression, "alias")
|
|
|
|
values = f"VALUES{self.seg('')}{args}"
|
|
|
|
values = (
|
|
|
|
f"({values})"
|
|
|
|
if self.WRAP_DERIVED_VALUES and (alias or isinstance(expression.parent, exp.From))
|
|
|
|
else values
|
|
|
|
)
|
|
|
|
return f"{values} AS {alias}" if alias else values
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def var_sql(self, expression: exp.Var) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
return self.sql(expression, "this")
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def into_sql(self, expression: exp.Into) -> str:
|
2025-02-13 14:56:25 +01:00
|
|
|
temporary = " TEMPORARY" if expression.args.get("temporary") else ""
|
|
|
|
unlogged = " UNLOGGED" if expression.args.get("unlogged") else ""
|
|
|
|
return f"{self.seg('INTO')}{temporary or unlogged} {self.sql(expression, 'this')}"
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def from_sql(self, expression: exp.From) -> str:
|
2025-02-13 15:57:23 +01:00
|
|
|
return f"{self.seg('FROM')} {self.sql(expression, 'this')}"
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def group_sql(self, expression: exp.Group) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
group_by = self.op_expressions("GROUP BY", expression)
|
|
|
|
grouping_sets = self.expressions(expression, key="grouping_sets", indent=False)
|
2025-02-13 14:53:05 +01:00
|
|
|
grouping_sets = (
|
|
|
|
f"{self.seg('GROUPING SETS')} {self.wrap(grouping_sets)}" if grouping_sets else ""
|
|
|
|
)
|
2025-02-13 15:08:15 +01:00
|
|
|
|
2025-02-13 15:26:26 +01:00
|
|
|
cube = expression.args.get("cube", [])
|
|
|
|
if seq_get(cube, 0) is True:
|
|
|
|
return f"{group_by}{self.seg('WITH CUBE')}"
|
2025-02-13 15:08:15 +01:00
|
|
|
else:
|
2025-02-13 15:26:26 +01:00
|
|
|
cube_sql = self.expressions(expression, key="cube", indent=False)
|
|
|
|
cube_sql = f"{self.seg('CUBE')} {self.wrap(cube_sql)}" if cube_sql else ""
|
2025-02-13 15:08:15 +01:00
|
|
|
|
2025-02-13 15:26:26 +01:00
|
|
|
rollup = expression.args.get("rollup", [])
|
|
|
|
if seq_get(rollup, 0) is True:
|
|
|
|
return f"{group_by}{self.seg('WITH ROLLUP')}"
|
2025-02-13 15:08:15 +01:00
|
|
|
else:
|
2025-02-13 15:26:26 +01:00
|
|
|
rollup_sql = self.expressions(expression, key="rollup", indent=False)
|
|
|
|
rollup_sql = f"{self.seg('ROLLUP')} {self.wrap(rollup_sql)}" if rollup_sql else ""
|
|
|
|
|
2025-02-13 15:57:23 +01:00
|
|
|
groupings = csv(
|
|
|
|
grouping_sets,
|
|
|
|
cube_sql,
|
|
|
|
rollup_sql,
|
|
|
|
self.seg("WITH TOTALS") if expression.args.get("totals") else "",
|
|
|
|
sep=self.GROUPINGS_SEP,
|
|
|
|
)
|
2025-02-13 15:08:15 +01:00
|
|
|
|
2025-02-13 15:26:26 +01:00
|
|
|
if expression.args.get("expressions") and groupings:
|
2025-02-13 15:57:23 +01:00
|
|
|
group_by = f"{group_by}{self.GROUPINGS_SEP}"
|
2025-02-13 15:26:26 +01:00
|
|
|
|
|
|
|
return f"{group_by}{groupings}"
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def having_sql(self, expression: exp.Having) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
this = self.indent(self.sql(expression, "this"))
|
|
|
|
return f"{self.seg('HAVING')}{self.sep()}{this}"
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def join_sql(self, expression: exp.Join) -> str:
|
2025-02-13 15:57:23 +01:00
|
|
|
op_sql = " ".join(
|
|
|
|
op
|
|
|
|
for op in (
|
|
|
|
"NATURAL" if expression.args.get("natural") else None,
|
|
|
|
"GLOBAL" if expression.args.get("global") else None,
|
|
|
|
expression.side,
|
|
|
|
expression.kind,
|
|
|
|
expression.hint if self.JOIN_HINTS else None,
|
2025-02-13 08:04:41 +01:00
|
|
|
)
|
2025-02-13 15:57:23 +01:00
|
|
|
if op
|
2025-02-13 06:15:54 +01:00
|
|
|
)
|
|
|
|
on_sql = self.sql(expression, "on")
|
|
|
|
using = expression.args.get("using")
|
|
|
|
|
|
|
|
if not on_sql and using:
|
|
|
|
on_sql = csv(*(self.sql(column) for column in using))
|
|
|
|
|
2025-02-13 15:57:23 +01:00
|
|
|
this_sql = self.sql(expression, "this")
|
|
|
|
|
2025-02-13 06:15:54 +01:00
|
|
|
if on_sql:
|
|
|
|
on_sql = self.indent(on_sql, skip_first=True)
|
|
|
|
space = self.seg(" " * self.pad) if self.pretty else " "
|
|
|
|
if using:
|
|
|
|
on_sql = f"{space}USING ({on_sql})"
|
|
|
|
else:
|
|
|
|
on_sql = f"{space}ON {on_sql}"
|
2025-02-13 15:57:23 +01:00
|
|
|
elif not op_sql:
|
|
|
|
return f", {this_sql}"
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 15:57:23 +01:00
|
|
|
op_sql = f"{op_sql} JOIN" if op_sql else "JOIN"
|
|
|
|
return f"{self.seg(op_sql)} {this_sql}{on_sql}"
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def lambda_sql(self, expression: exp.Lambda, arrow_sep: str = "->") -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
args = self.expressions(expression, flat=True)
|
|
|
|
args = f"({args})" if len(args.split(",")) > 1 else args
|
2025-02-13 15:09:58 +01:00
|
|
|
return f"{args} {arrow_sep} {self.sql(expression, 'this')}"
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def lateral_sql(self, expression: exp.Lateral) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
this = self.sql(expression, "this")
|
2025-02-13 14:51:47 +01:00
|
|
|
|
2025-02-13 08:04:41 +01:00
|
|
|
if isinstance(expression.this, exp.Subquery):
|
2025-02-13 14:51:47 +01:00
|
|
|
return f"LATERAL {this}"
|
|
|
|
|
|
|
|
if expression.args.get("view"):
|
2025-02-13 15:01:55 +01:00
|
|
|
alias = expression.args["alias"]
|
|
|
|
columns = self.expressions(alias, key="columns", flat=True)
|
|
|
|
table = f" {alias.name}" if alias.name else ""
|
2025-02-13 15:00:13 +01:00
|
|
|
columns = f" AS {columns}" if columns else ""
|
2025-02-13 14:51:47 +01:00
|
|
|
op_sql = self.seg(f"LATERAL VIEW{' OUTER' if expression.args.get('outer') else ''}")
|
|
|
|
return f"{op_sql}{self.sep()}{this}{table}{columns}"
|
|
|
|
|
2025-02-13 15:01:55 +01:00
|
|
|
alias = self.sql(expression, "alias")
|
|
|
|
alias = f" AS {alias}" if alias else ""
|
|
|
|
return f"LATERAL {this}{alias}"
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def limit_sql(self, expression: exp.Limit) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
this = self.sql(expression, "this")
|
|
|
|
return f"{this}{self.seg('LIMIT')} {self.sql(expression, 'expression')}"
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def offset_sql(self, expression: exp.Offset) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
this = self.sql(expression, "this")
|
|
|
|
return f"{this}{self.seg('OFFSET')} {self.sql(expression, 'expression')}"
|
|
|
|
|
2025-02-13 15:46:19 +01:00
|
|
|
def setitem_sql(self, expression: exp.SetItem) -> str:
|
|
|
|
kind = self.sql(expression, "kind")
|
|
|
|
kind = f"{kind} " if kind else ""
|
|
|
|
this = self.sql(expression, "this")
|
|
|
|
expressions = self.expressions(expression)
|
|
|
|
collate = self.sql(expression, "collate")
|
|
|
|
collate = f" COLLATE {collate}" if collate else ""
|
|
|
|
global_ = "GLOBAL " if expression.args.get("global") else ""
|
|
|
|
return f"{global_}{kind}{this}{expressions}{collate}"
|
|
|
|
|
|
|
|
def set_sql(self, expression: exp.Set) -> str:
|
|
|
|
expressions = (
|
|
|
|
f" {self.expressions(expression, flat=True)}" if expression.expressions else ""
|
|
|
|
)
|
|
|
|
return f"SET{expressions}"
|
|
|
|
|
2025-02-13 15:48:10 +01:00
|
|
|
def pragma_sql(self, expression: exp.Pragma) -> str:
|
|
|
|
return f"PRAGMA {self.sql(expression, 'this')}"
|
|
|
|
|
2025-02-13 15:09:58 +01:00
|
|
|
def lock_sql(self, expression: exp.Lock) -> str:
|
2025-02-13 15:57:23 +01:00
|
|
|
if not self.LOCKING_READS_SUPPORTED:
|
|
|
|
self.unsupported("Locking reads using 'FOR UPDATE/SHARE' are not supported")
|
|
|
|
return ""
|
2025-02-13 15:09:58 +01:00
|
|
|
|
2025-02-13 15:57:23 +01:00
|
|
|
lock_type = "FOR UPDATE" if expression.args["update"] else "FOR SHARE"
|
|
|
|
expressions = self.expressions(expression, flat=True)
|
|
|
|
expressions = f" OF {expressions}" if expressions else ""
|
|
|
|
wait = expression.args.get("wait")
|
|
|
|
|
|
|
|
if wait is not None:
|
|
|
|
if isinstance(wait, exp.Literal):
|
|
|
|
wait = f" WAIT {self.sql(wait)}"
|
|
|
|
else:
|
|
|
|
wait = " NOWAIT" if wait else " SKIP LOCKED"
|
|
|
|
|
|
|
|
return f"{lock_type}{expressions}{wait or ''}"
|
2025-02-13 15:09:58 +01:00
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def literal_sql(self, expression: exp.Literal) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
text = expression.this or ""
|
|
|
|
if expression.is_string:
|
|
|
|
text = text.replace(self.quote_end, self._escaped_quote_end)
|
2025-02-13 15:05:06 +01:00
|
|
|
if self.pretty:
|
|
|
|
text = text.replace("\n", self.SENTINEL_LINE_BREAK)
|
2025-02-13 14:53:05 +01:00
|
|
|
text = f"{self.quote_start}{text}{self.quote_end}"
|
2025-02-13 06:15:54 +01:00
|
|
|
return text
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def loaddata_sql(self, expression: exp.LoadData) -> str:
|
2025-02-13 14:46:58 +01:00
|
|
|
local = " LOCAL" if expression.args.get("local") else ""
|
|
|
|
inpath = f" INPATH {self.sql(expression, 'inpath')}"
|
|
|
|
overwrite = " OVERWRITE" if expression.args.get("overwrite") else ""
|
|
|
|
this = f" INTO TABLE {self.sql(expression, 'this')}"
|
|
|
|
partition = self.sql(expression, "partition")
|
|
|
|
partition = f" {partition}" if partition else ""
|
|
|
|
input_format = self.sql(expression, "input_format")
|
|
|
|
input_format = f" INPUTFORMAT {input_format}" if input_format else ""
|
|
|
|
serde = self.sql(expression, "serde")
|
|
|
|
serde = f" SERDE {serde}" if serde else ""
|
|
|
|
return f"LOAD DATA{local}{inpath}{overwrite}{this}{partition}{input_format}{serde}"
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def null_sql(self, *_) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
return "NULL"
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def boolean_sql(self, expression: exp.Boolean) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
return "TRUE" if expression.this else "FALSE"
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def order_sql(self, expression: exp.Order, flat: bool = False) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
this = self.sql(expression, "this")
|
|
|
|
this = f"{this} " if this else this
|
2025-02-13 14:58:37 +01:00
|
|
|
return self.op_expressions(f"{this}ORDER BY", expression, flat=this or flat) # type: ignore
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def cluster_sql(self, expression: exp.Cluster) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
return self.op_expressions("CLUSTER BY", expression)
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def distribute_sql(self, expression: exp.Distribute) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
return self.op_expressions("DISTRIBUTE BY", expression)
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def sort_sql(self, expression: exp.Sort) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
return self.op_expressions("SORT BY", expression)
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def ordered_sql(self, expression: exp.Ordered) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
desc = expression.args.get("desc")
|
|
|
|
asc = not desc
|
2025-02-13 14:40:43 +01:00
|
|
|
|
2025-02-13 06:15:54 +01:00
|
|
|
nulls_first = expression.args.get("nulls_first")
|
|
|
|
nulls_last = not nulls_first
|
|
|
|
nulls_are_large = self.null_ordering == "nulls_are_large"
|
|
|
|
nulls_are_small = self.null_ordering == "nulls_are_small"
|
|
|
|
nulls_are_last = self.null_ordering == "nulls_are_last"
|
|
|
|
|
|
|
|
sort_order = " DESC" if desc else ""
|
|
|
|
nulls_sort_change = ""
|
2025-02-13 14:53:05 +01:00
|
|
|
if nulls_first and (
|
|
|
|
(asc and nulls_are_large) or (desc and nulls_are_small) or nulls_are_last
|
|
|
|
):
|
2025-02-13 06:15:54 +01:00
|
|
|
nulls_sort_change = " NULLS FIRST"
|
2025-02-13 14:53:05 +01:00
|
|
|
elif (
|
|
|
|
nulls_last
|
|
|
|
and ((asc and nulls_are_small) or (desc and nulls_are_large))
|
|
|
|
and not nulls_are_last
|
|
|
|
):
|
2025-02-13 06:15:54 +01:00
|
|
|
nulls_sort_change = " NULLS LAST"
|
|
|
|
|
|
|
|
if nulls_sort_change and not self.NULL_ORDERING_SUPPORTED:
|
2025-02-13 14:53:05 +01:00
|
|
|
self.unsupported(
|
|
|
|
"Sorting in an ORDER BY on NULLS FIRST/NULLS LAST is not supported by this dialect"
|
|
|
|
)
|
2025-02-13 06:15:54 +01:00
|
|
|
nulls_sort_change = ""
|
|
|
|
|
|
|
|
return f"{self.sql(expression, 'this')}{sort_order}{nulls_sort_change}"
|
|
|
|
|
2025-02-13 15:08:15 +01:00
|
|
|
def matchrecognize_sql(self, expression: exp.MatchRecognize) -> str:
|
|
|
|
partition = self.partition_by_sql(expression)
|
|
|
|
order = self.sql(expression, "order")
|
2025-02-13 15:52:09 +01:00
|
|
|
measures = self.expressions(expression, key="measures")
|
|
|
|
measures = self.seg(f"MEASURES{self.seg(measures)}") if measures else ""
|
2025-02-13 15:08:15 +01:00
|
|
|
rows = self.sql(expression, "rows")
|
|
|
|
rows = self.seg(rows) if rows else ""
|
|
|
|
after = self.sql(expression, "after")
|
|
|
|
after = self.seg(after) if after else ""
|
|
|
|
pattern = self.sql(expression, "pattern")
|
|
|
|
pattern = self.seg(f"PATTERN ({pattern})") if pattern else ""
|
2025-02-13 15:52:09 +01:00
|
|
|
definition_sqls = [
|
|
|
|
f"{self.sql(definition, 'alias')} AS {self.sql(definition, 'this')}"
|
|
|
|
for definition in expression.args.get("define", [])
|
|
|
|
]
|
|
|
|
definitions = self.expressions(sqls=definition_sqls)
|
|
|
|
define = self.seg(f"DEFINE{self.seg(definitions)}") if definitions else ""
|
2025-02-13 15:08:15 +01:00
|
|
|
body = "".join(
|
|
|
|
(
|
|
|
|
partition,
|
|
|
|
order,
|
|
|
|
measures,
|
|
|
|
rows,
|
|
|
|
after,
|
|
|
|
pattern,
|
|
|
|
define,
|
|
|
|
)
|
|
|
|
)
|
2025-02-13 15:52:09 +01:00
|
|
|
alias = self.sql(expression, "alias")
|
|
|
|
alias = f" {alias}" if alias else ""
|
|
|
|
return f"{self.seg('MATCH_RECOGNIZE')} {self.wrap(body)}{alias}"
|
2025-02-13 15:08:15 +01:00
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def query_modifiers(self, expression: exp.Expression, *sqls: str) -> str:
|
2025-02-13 15:48:10 +01:00
|
|
|
limit = expression.args.get("limit")
|
|
|
|
|
|
|
|
if self.LIMIT_FETCH == "LIMIT" and isinstance(limit, exp.Fetch):
|
|
|
|
limit = exp.Limit(expression=limit.args.get("count"))
|
|
|
|
elif self.LIMIT_FETCH == "FETCH" and isinstance(limit, exp.Limit):
|
|
|
|
limit = exp.Fetch(direction="FIRST", count=limit.expression)
|
|
|
|
|
|
|
|
fetch = isinstance(limit, exp.Fetch)
|
|
|
|
|
2025-02-13 06:15:54 +01:00
|
|
|
return csv(
|
|
|
|
*sqls,
|
2025-02-13 15:57:23 +01:00
|
|
|
*[self.sql(join) for join in expression.args.get("joins") or []],
|
2025-02-13 15:08:15 +01:00
|
|
|
self.sql(expression, "match"),
|
2025-02-13 15:57:23 +01:00
|
|
|
*[self.sql(lateral) for lateral in expression.args.get("laterals") or []],
|
2025-02-13 06:15:54 +01:00
|
|
|
self.sql(expression, "where"),
|
|
|
|
self.sql(expression, "group"),
|
|
|
|
self.sql(expression, "having"),
|
2025-02-13 15:57:23 +01:00
|
|
|
*self.after_having_modifiers(expression),
|
2025-02-13 06:15:54 +01:00
|
|
|
self.sql(expression, "order"),
|
2025-02-13 15:48:10 +01:00
|
|
|
self.sql(expression, "offset") if fetch else self.sql(limit),
|
|
|
|
self.sql(limit) if fetch else self.sql(expression, "offset"),
|
2025-02-13 15:57:23 +01:00
|
|
|
*self.after_limit_modifiers(expression),
|
2025-02-13 06:15:54 +01:00
|
|
|
sep="",
|
|
|
|
)
|
|
|
|
|
2025-02-13 15:57:23 +01:00
|
|
|
def after_having_modifiers(self, expression: exp.Expression) -> t.List[str]:
|
|
|
|
return [
|
|
|
|
self.sql(expression, "qualify"),
|
|
|
|
self.seg("WINDOW ") + self.expressions(expression, key="windows", flat=True)
|
|
|
|
if expression.args.get("windows")
|
|
|
|
else "",
|
|
|
|
]
|
|
|
|
|
|
|
|
def after_limit_modifiers(self, expression: exp.Expression) -> t.List[str]:
|
|
|
|
locks = self.expressions(expression, key="locks", sep=" ")
|
|
|
|
locks = f" {locks}" if locks else ""
|
|
|
|
return [locks, self.sql(expression, "sample")]
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def select_sql(self, expression: exp.Select) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
hint = self.sql(expression, "hint")
|
|
|
|
distinct = self.sql(expression, "distinct")
|
|
|
|
distinct = f" {distinct}" if distinct else ""
|
2025-02-13 15:53:39 +01:00
|
|
|
kind = expression.args.get("kind")
|
|
|
|
kind = f" AS {kind}" if kind else ""
|
2025-02-13 06:15:54 +01:00
|
|
|
expressions = self.expressions(expression)
|
|
|
|
expressions = f"{self.sep()}{expressions}" if expressions else expressions
|
|
|
|
sql = self.query_modifiers(
|
|
|
|
expression,
|
2025-02-13 15:53:39 +01:00
|
|
|
f"SELECT{hint}{distinct}{kind}{expressions}",
|
2025-02-13 14:56:25 +01:00
|
|
|
self.sql(expression, "into", comment=False),
|
2025-02-13 14:53:05 +01:00
|
|
|
self.sql(expression, "from", comment=False),
|
2025-02-13 06:15:54 +01:00
|
|
|
)
|
|
|
|
return self.prepend_ctes(expression, sql)
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def schema_sql(self, expression: exp.Schema) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
this = self.sql(expression, "this")
|
|
|
|
this = f"{this} " if this else ""
|
|
|
|
sql = f"({self.sep('')}{self.expressions(expression)}{self.seg(')', sep='')}"
|
|
|
|
return f"{this}{sql}"
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def star_sql(self, expression: exp.Star) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
except_ = self.expressions(expression, key="except", flat=True)
|
2025-02-13 15:07:05 +01:00
|
|
|
except_ = f"{self.seg(self.STAR_MAPPING['except'])} ({except_})" if except_ else ""
|
2025-02-13 06:15:54 +01:00
|
|
|
replace = self.expressions(expression, key="replace", flat=True)
|
2025-02-13 15:07:05 +01:00
|
|
|
replace = f"{self.seg(self.STAR_MAPPING['replace'])} ({replace})" if replace else ""
|
2025-02-13 06:15:54 +01:00
|
|
|
return f"*{except_}{replace}"
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def parameter_sql(self, expression: exp.Parameter) -> str:
|
2025-02-13 15:26:26 +01:00
|
|
|
this = self.sql(expression, "this")
|
|
|
|
this = f"{{{this}}}" if expression.args.get("wrapped") else f"{this}"
|
|
|
|
return f"{self.PARAMETER_TOKEN}{this}"
|
2025-02-13 14:31:47 +01:00
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def sessionparameter_sql(self, expression: exp.SessionParameter) -> str:
|
2025-02-13 14:53:05 +01:00
|
|
|
this = self.sql(expression, "this")
|
|
|
|
kind = expression.text("kind")
|
|
|
|
if kind:
|
|
|
|
kind = f"{kind}."
|
|
|
|
return f"@@{kind}{this}"
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def placeholder_sql(self, expression: exp.Placeholder) -> str:
|
2025-02-13 14:45:11 +01:00
|
|
|
return f":{expression.name}" if expression.name else "?"
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 15:42:13 +01:00
|
|
|
def subquery_sql(self, expression: exp.Subquery, sep: str = " AS ") -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
alias = self.sql(expression, "alias")
|
2025-02-13 15:42:13 +01:00
|
|
|
alias = f"{sep}{alias}" if alias else ""
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 15:57:23 +01:00
|
|
|
pivots = self.expressions(expression, key="pivots", sep=" ", flat=True)
|
|
|
|
pivots = f" {pivots}" if pivots else ""
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 15:57:23 +01:00
|
|
|
sql = self.query_modifiers(expression, self.wrap(expression), alias, pivots)
|
2025-02-13 14:54:32 +01:00
|
|
|
return self.prepend_ctes(expression, sql)
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def qualify_sql(self, expression: exp.Qualify) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
this = self.indent(self.sql(expression, "this"))
|
|
|
|
return f"{self.seg('QUALIFY')}{self.sep()}{this}"
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def union_sql(self, expression: exp.Union) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
return self.prepend_ctes(
|
|
|
|
expression,
|
|
|
|
self.set_operation(expression, self.union_op(expression)),
|
|
|
|
)
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def union_op(self, expression: exp.Union) -> str:
|
2025-02-13 14:45:11 +01:00
|
|
|
kind = " DISTINCT" if self.EXPLICIT_UNION else ""
|
|
|
|
kind = kind if expression.args.get("distinct") else " ALL"
|
|
|
|
return f"UNION{kind}"
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def unnest_sql(self, expression: exp.Unnest) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
args = self.expressions(expression, flat=True)
|
|
|
|
alias = expression.args.get("alias")
|
|
|
|
if alias and self.unnest_column_only:
|
|
|
|
columns = alias.columns
|
|
|
|
alias = self.sql(columns[0]) if columns else ""
|
|
|
|
else:
|
|
|
|
alias = self.sql(expression, "alias")
|
|
|
|
alias = f" AS {alias}" if alias else alias
|
|
|
|
ordinality = " WITH ORDINALITY" if expression.args.get("ordinality") else ""
|
2025-02-13 15:05:06 +01:00
|
|
|
offset = expression.args.get("offset")
|
|
|
|
offset = f" WITH OFFSET AS {self.sql(offset)}" if offset else ""
|
|
|
|
return f"UNNEST({args}){ordinality}{alias}{offset}"
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def where_sql(self, expression: exp.Where) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
this = self.indent(self.sql(expression, "this"))
|
|
|
|
return f"{self.seg('WHERE')}{self.sep()}{this}"
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def window_sql(self, expression: exp.Window) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
this = self.sql(expression, "this")
|
2025-02-13 08:04:41 +01:00
|
|
|
|
2025-02-13 15:08:15 +01:00
|
|
|
partition = self.partition_by_sql(expression)
|
2025-02-13 08:04:41 +01:00
|
|
|
|
2025-02-13 06:15:54 +01:00
|
|
|
order = expression.args.get("order")
|
|
|
|
order_sql = self.order_sql(order, flat=True) if order else ""
|
2025-02-13 08:04:41 +01:00
|
|
|
|
2025-02-13 06:15:54 +01:00
|
|
|
partition_sql = partition + " " if partition and order else partition
|
2025-02-13 08:04:41 +01:00
|
|
|
|
2025-02-13 06:15:54 +01:00
|
|
|
spec = expression.args.get("spec")
|
2025-02-13 15:52:09 +01:00
|
|
|
spec_sql = " " + self.windowspec_sql(spec) if spec else ""
|
2025-02-13 08:04:41 +01:00
|
|
|
|
2025-02-13 06:15:54 +01:00
|
|
|
alias = self.sql(expression, "alias")
|
2025-02-13 15:52:09 +01:00
|
|
|
over = self.sql(expression, "over") or "OVER"
|
|
|
|
this = f"{this} {'AS' if expression.arg_key == 'windows' else over}"
|
|
|
|
|
|
|
|
first = expression.args.get("first")
|
|
|
|
if first is not None:
|
|
|
|
first = " FIRST " if first else " LAST "
|
|
|
|
first = first or ""
|
2025-02-13 06:15:54 +01:00
|
|
|
|
|
|
|
if not partition and not order and not spec and alias:
|
|
|
|
return f"{this} {alias}"
|
|
|
|
|
2025-02-13 15:52:09 +01:00
|
|
|
window_args = alias + first + partition_sql + order_sql + spec_sql
|
2025-02-13 15:03:38 +01:00
|
|
|
|
|
|
|
return f"{this} ({window_args.strip()})"
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 15:08:15 +01:00
|
|
|
def partition_by_sql(self, expression: exp.Window | exp.MatchRecognize) -> str:
|
|
|
|
partition = self.expressions(expression, key="partition_by", flat=True)
|
|
|
|
return f"PARTITION BY {partition}" if partition else ""
|
|
|
|
|
2025-02-13 15:52:09 +01:00
|
|
|
def windowspec_sql(self, expression: exp.WindowSpec) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
kind = self.sql(expression, "kind")
|
2025-02-13 08:04:41 +01:00
|
|
|
start = csv(self.sql(expression, "start"), self.sql(expression, "start_side"), sep=" ")
|
2025-02-13 14:53:05 +01:00
|
|
|
end = (
|
|
|
|
csv(self.sql(expression, "end"), self.sql(expression, "end_side"), sep=" ")
|
|
|
|
or "CURRENT ROW"
|
|
|
|
)
|
2025-02-13 06:15:54 +01:00
|
|
|
return f"{kind} BETWEEN {start} AND {end}"
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def withingroup_sql(self, expression: exp.WithinGroup) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
this = self.sql(expression, "this")
|
2025-02-13 14:58:37 +01:00
|
|
|
expression_sql = self.sql(expression, "expression")[1:] # order has a leading space
|
|
|
|
return f"{this} WITHIN GROUP ({expression_sql})"
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def between_sql(self, expression: exp.Between) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
this = self.sql(expression, "this")
|
|
|
|
low = self.sql(expression, "low")
|
|
|
|
high = self.sql(expression, "high")
|
|
|
|
return f"{this} BETWEEN {low} AND {high}"
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def bracket_sql(self, expression: exp.Bracket) -> str:
|
2025-02-13 15:52:09 +01:00
|
|
|
expressions = apply_index_offset(expression.this, expression.expressions, self.index_offset)
|
2025-02-13 14:58:37 +01:00
|
|
|
expressions_sql = ", ".join(self.sql(e) for e in expressions)
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
return f"{self.sql(expression, 'this')}[{expressions_sql}]"
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def all_sql(self, expression: exp.All) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
return f"ALL {self.wrap(expression)}"
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def any_sql(self, expression: exp.Any) -> str:
|
2025-02-13 15:26:26 +01:00
|
|
|
this = self.sql(expression, "this")
|
|
|
|
if isinstance(expression.this, exp.Subqueryable):
|
|
|
|
this = self.wrap(this)
|
|
|
|
return f"ANY {this}"
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def exists_sql(self, expression: exp.Exists) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
return f"EXISTS{self.wrap(expression)}"
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def case_sql(self, expression: exp.Case) -> str:
|
2025-02-13 14:46:58 +01:00
|
|
|
this = self.sql(expression, "this")
|
|
|
|
statements = [f"CASE {this}" if this else "CASE"]
|
2025-02-13 06:15:54 +01:00
|
|
|
|
|
|
|
for e in expression.args["ifs"]:
|
2025-02-13 14:46:58 +01:00
|
|
|
statements.append(f"WHEN {self.sql(e, 'this')}")
|
|
|
|
statements.append(f"THEN {self.sql(e, 'true')}")
|
|
|
|
|
|
|
|
default = self.sql(expression, "default")
|
|
|
|
|
|
|
|
if default:
|
|
|
|
statements.append(f"ELSE {default}")
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 14:46:58 +01:00
|
|
|
statements.append("END")
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 14:46:58 +01:00
|
|
|
if self.pretty and self.text_width(statements) > self._max_text_width:
|
|
|
|
return self.indent("\n".join(statements), skip_first=True, skip_last=True)
|
|
|
|
|
|
|
|
return " ".join(statements)
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def constraint_sql(self, expression: exp.Constraint) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
this = self.sql(expression, "this")
|
|
|
|
expressions = self.expressions(expression, flat=True)
|
|
|
|
return f"CONSTRAINT {this} {expressions}"
|
|
|
|
|
2025-02-13 15:52:09 +01:00
|
|
|
def nextvaluefor_sql(self, expression: exp.NextValueFor) -> str:
|
|
|
|
order = expression.args.get("order")
|
|
|
|
order = f" OVER ({self.order_sql(order, flat=True)})" if order else ""
|
|
|
|
return f"NEXT VALUE FOR {self.sql(expression, 'this')}{order}"
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def extract_sql(self, expression: exp.Extract) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
this = self.sql(expression, "this")
|
|
|
|
expression_sql = self.sql(expression, "expression")
|
|
|
|
return f"EXTRACT({this} FROM {expression_sql})"
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def trim_sql(self, expression: exp.Trim) -> str:
|
2025-02-13 08:04:41 +01:00
|
|
|
trim_type = self.sql(expression, "position")
|
|
|
|
|
|
|
|
if trim_type == "LEADING":
|
2025-02-13 15:26:26 +01:00
|
|
|
return self.func("LTRIM", expression.this)
|
2025-02-13 08:04:41 +01:00
|
|
|
elif trim_type == "TRAILING":
|
2025-02-13 15:26:26 +01:00
|
|
|
return self.func("RTRIM", expression.this)
|
2025-02-13 08:04:41 +01:00
|
|
|
else:
|
2025-02-13 15:26:26 +01:00
|
|
|
return self.func("TRIM", expression.this, expression.expression)
|
2025-02-13 08:04:41 +01:00
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def concat_sql(self, expression: exp.Concat) -> str:
|
2025-02-13 14:56:25 +01:00
|
|
|
if len(expression.expressions) == 1:
|
|
|
|
return self.sql(expression.expressions[0])
|
|
|
|
return self.function_fallback_sql(expression)
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def check_sql(self, expression: exp.Check) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
this = self.sql(expression, key="this")
|
|
|
|
return f"CHECK ({this})"
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def foreignkey_sql(self, expression: exp.ForeignKey) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
expressions = self.expressions(expression, flat=True)
|
|
|
|
reference = self.sql(expression, "reference")
|
|
|
|
reference = f" {reference}" if reference else ""
|
|
|
|
delete = self.sql(expression, "delete")
|
|
|
|
delete = f" ON DELETE {delete}" if delete else ""
|
|
|
|
update = self.sql(expression, "update")
|
|
|
|
update = f" ON UPDATE {update}" if update else ""
|
|
|
|
return f"FOREIGN KEY ({expressions}){reference}{delete}{update}"
|
|
|
|
|
2025-02-13 15:07:05 +01:00
|
|
|
def primarykey_sql(self, expression: exp.ForeignKey) -> str:
|
|
|
|
expressions = self.expressions(expression, flat=True)
|
2025-02-13 15:52:09 +01:00
|
|
|
options = self.expressions(expression, key="options", flat=True, sep=" ")
|
2025-02-13 15:07:05 +01:00
|
|
|
options = f" {options}" if options else ""
|
|
|
|
return f"PRIMARY KEY ({expressions}){options}"
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def if_sql(self, expression: exp.If) -> str:
|
2025-02-13 14:53:05 +01:00
|
|
|
return self.case_sql(
|
|
|
|
exp.Case(ifs=[expression.copy()], default=expression.args.get("false"))
|
|
|
|
)
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 15:50:57 +01:00
|
|
|
def matchagainst_sql(self, expression: exp.MatchAgainst) -> str:
|
|
|
|
modifier = expression.args.get("modifier")
|
|
|
|
modifier = f" {modifier}" if modifier else ""
|
|
|
|
return f"{self.func('MATCH', *expression.expressions)} AGAINST({self.sql(expression, 'this')}{modifier})"
|
|
|
|
|
2025-02-13 15:48:10 +01:00
|
|
|
def jsonkeyvalue_sql(self, expression: exp.JSONKeyValue) -> str:
|
|
|
|
return f"{self.sql(expression, 'this')}: {self.sql(expression, 'expression')}"
|
|
|
|
|
|
|
|
def jsonobject_sql(self, expression: exp.JSONObject) -> str:
|
|
|
|
expressions = self.expressions(expression)
|
|
|
|
null_handling = expression.args.get("null_handling")
|
|
|
|
null_handling = f" {null_handling}" if null_handling else ""
|
|
|
|
unique_keys = expression.args.get("unique_keys")
|
|
|
|
if unique_keys is not None:
|
|
|
|
unique_keys = f" {'WITH' if unique_keys else 'WITHOUT'} UNIQUE KEYS"
|
|
|
|
else:
|
|
|
|
unique_keys = ""
|
|
|
|
return_type = self.sql(expression, "return_type")
|
|
|
|
return_type = f" RETURNING {return_type}" if return_type else ""
|
|
|
|
format_json = " FORMAT JSON" if expression.args.get("format_json") else ""
|
|
|
|
encoding = self.sql(expression, "encoding")
|
|
|
|
encoding = f" ENCODING {encoding}" if encoding else ""
|
|
|
|
return f"JSON_OBJECT({expressions}{null_handling}{unique_keys}{return_type}{format_json}{encoding})"
|
|
|
|
|
2025-02-13 15:57:23 +01:00
|
|
|
def openjsoncolumndef_sql(self, expression: exp.OpenJSONColumnDef) -> str:
|
|
|
|
this = self.sql(expression, "this")
|
|
|
|
kind = self.sql(expression, "kind")
|
|
|
|
path = self.sql(expression, "path")
|
|
|
|
path = f" {path}" if path else ""
|
|
|
|
as_json = " AS JSON" if expression.args.get("as_json") else ""
|
|
|
|
return f"{this} {kind}{path}{as_json}"
|
|
|
|
|
|
|
|
def openjson_sql(self, expression: exp.OpenJSON) -> str:
|
|
|
|
this = self.sql(expression, "this")
|
|
|
|
path = self.sql(expression, "path")
|
|
|
|
path = f", {path}" if path else ""
|
|
|
|
expressions = self.expressions(expression)
|
|
|
|
with_ = (
|
|
|
|
f" WITH ({self.seg(self.indent(expressions), sep='')}{self.seg(')', sep='')}"
|
|
|
|
if expressions
|
|
|
|
else ""
|
|
|
|
)
|
|
|
|
return f"OPENJSON({this}{path}){with_}"
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def in_sql(self, expression: exp.In) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
query = expression.args.get("query")
|
|
|
|
unnest = expression.args.get("unnest")
|
2025-02-13 14:45:11 +01:00
|
|
|
field = expression.args.get("field")
|
2025-02-13 15:03:38 +01:00
|
|
|
is_global = " GLOBAL" if expression.args.get("is_global") else ""
|
|
|
|
|
2025-02-13 06:15:54 +01:00
|
|
|
if query:
|
|
|
|
in_sql = self.wrap(query)
|
|
|
|
elif unnest:
|
|
|
|
in_sql = self.in_unnest_op(unnest)
|
2025-02-13 14:45:11 +01:00
|
|
|
elif field:
|
|
|
|
in_sql = self.sql(field)
|
2025-02-13 06:15:54 +01:00
|
|
|
else:
|
|
|
|
in_sql = f"({self.expressions(expression, flat=True)})"
|
2025-02-13 15:03:38 +01:00
|
|
|
|
|
|
|
return f"{self.sql(expression, 'this')}{is_global} IN {in_sql}"
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def in_unnest_op(self, unnest: exp.Unnest) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
return f"(SELECT {self.sql(unnest)})"
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def interval_sql(self, expression: exp.Interval) -> str:
|
2025-02-13 15:44:58 +01:00
|
|
|
unit = self.sql(expression, "unit")
|
2025-02-13 15:52:09 +01:00
|
|
|
if not self.INTERVAL_ALLOWS_PLURAL_FORM:
|
|
|
|
unit = self.TIME_PART_SINGULARS.get(unit.lower(), unit)
|
2025-02-13 08:04:41 +01:00
|
|
|
unit = f" {unit}" if unit else ""
|
2025-02-13 15:52:09 +01:00
|
|
|
|
|
|
|
if self.SINGLE_STRING_INTERVAL:
|
|
|
|
this = expression.this.name if expression.this else ""
|
2025-02-13 15:57:23 +01:00
|
|
|
return f"INTERVAL '{this}{unit}'" if this else f"INTERVAL{unit}"
|
2025-02-13 15:52:09 +01:00
|
|
|
|
|
|
|
this = self.sql(expression, "this")
|
|
|
|
if this:
|
|
|
|
unwrapped = isinstance(expression.this, self.UNWRAPPED_INTERVAL_VALUES)
|
|
|
|
this = f" {this}" if unwrapped else f" ({this})"
|
|
|
|
|
2025-02-13 15:01:55 +01:00
|
|
|
return f"INTERVAL{this}{unit}"
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 15:07:05 +01:00
|
|
|
def return_sql(self, expression: exp.Return) -> str:
|
|
|
|
return f"RETURN {self.sql(expression, 'this')}"
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def reference_sql(self, expression: exp.Reference) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
this = self.sql(expression, "this")
|
|
|
|
expressions = self.expressions(expression, flat=True)
|
2025-02-13 15:07:05 +01:00
|
|
|
expressions = f"({expressions})" if expressions else ""
|
2025-02-13 15:52:09 +01:00
|
|
|
options = self.expressions(expression, key="options", flat=True, sep=" ")
|
2025-02-13 15:07:05 +01:00
|
|
|
options = f" {options}" if options else ""
|
|
|
|
return f"REFERENCES {this}{expressions}{options}"
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def anonymous_sql(self, expression: exp.Anonymous) -> str:
|
2025-02-13 15:26:26 +01:00
|
|
|
return self.func(expression.name, *expression.expressions)
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def paren_sql(self, expression: exp.Paren) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
if isinstance(expression.unnest(), exp.Select):
|
2025-02-13 14:54:32 +01:00
|
|
|
sql = self.wrap(expression)
|
|
|
|
else:
|
|
|
|
sql = self.seg(self.indent(self.sql(expression, "this")), sep="")
|
|
|
|
sql = f"({sql}{self.seg(')', sep='')}"
|
|
|
|
|
|
|
|
return self.prepend_ctes(expression, sql)
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def neg_sql(self, expression: exp.Neg) -> str:
|
2025-02-13 14:56:25 +01:00
|
|
|
# This makes sure we don't convert "- - 5" to "--5", which is a comment
|
|
|
|
this_sql = self.sql(expression, "this")
|
|
|
|
sep = " " if this_sql[0] == "-" else ""
|
|
|
|
return f"-{sep}{this_sql}"
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def not_sql(self, expression: exp.Not) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
return f"NOT {self.sql(expression, 'this')}"
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def alias_sql(self, expression: exp.Alias) -> str:
|
2025-02-13 15:52:09 +01:00
|
|
|
alias = self.sql(expression, "alias")
|
|
|
|
alias = f" AS {alias}" if alias else ""
|
|
|
|
return f"{self.sql(expression, 'this')}{alias}"
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def aliases_sql(self, expression: exp.Aliases) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
return f"{self.sql(expression, 'this')} AS ({self.expressions(expression, flat=True)})"
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def attimezone_sql(self, expression: exp.AtTimeZone) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
this = self.sql(expression, "this")
|
|
|
|
zone = self.sql(expression, "zone")
|
|
|
|
return f"{this} AT TIME ZONE {zone}"
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def add_sql(self, expression: exp.Add) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
return self.binary(expression, "+")
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def and_sql(self, expression: exp.And) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
return self.connector_sql(expression, "AND")
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def connector_sql(self, expression: exp.Connector, op: str) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
if not self.pretty:
|
|
|
|
return self.binary(expression, op)
|
|
|
|
|
2025-02-13 15:52:09 +01:00
|
|
|
sqls = tuple(
|
2025-02-13 15:53:39 +01:00
|
|
|
self.maybe_comment(self.sql(e), e, e.parent.comments or []) if i != 1 else self.sql(e)
|
2025-02-13 15:52:09 +01:00
|
|
|
for i, e in enumerate(expression.flatten(unnest=False))
|
|
|
|
)
|
|
|
|
|
2025-02-13 14:46:58 +01:00
|
|
|
sep = "\n" if self.text_width(sqls) > self._max_text_width else " "
|
|
|
|
return f"{sep}{op} ".join(sqls)
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def bitwiseand_sql(self, expression: exp.BitwiseAnd) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
return self.binary(expression, "&")
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def bitwiseleftshift_sql(self, expression: exp.BitwiseLeftShift) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
return self.binary(expression, "<<")
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def bitwisenot_sql(self, expression: exp.BitwiseNot) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
return f"~{self.sql(expression, 'this')}"
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def bitwiseor_sql(self, expression: exp.BitwiseOr) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
return self.binary(expression, "|")
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def bitwiserightshift_sql(self, expression: exp.BitwiseRightShift) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
return self.binary(expression, ">>")
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def bitwisexor_sql(self, expression: exp.BitwiseXor) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
return self.binary(expression, "^")
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def cast_sql(self, expression: exp.Cast) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
return f"CAST({self.sql(expression, 'this')} AS {self.sql(expression, 'to')})"
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def currentdate_sql(self, expression: exp.CurrentDate) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
zone = self.sql(expression, "this")
|
|
|
|
return f"CURRENT_DATE({zone})" if zone else "CURRENT_DATE"
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def collate_sql(self, expression: exp.Collate) -> str:
|
2025-02-13 14:54:32 +01:00
|
|
|
return self.binary(expression, "COLLATE")
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def command_sql(self, expression: exp.Command) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
return f"{self.sql(expression, 'this').upper()} {expression.text('expression').strip()}"
|
|
|
|
|
2025-02-13 15:42:13 +01:00
|
|
|
def comment_sql(self, expression: exp.Comment) -> str:
|
|
|
|
this = self.sql(expression, "this")
|
|
|
|
kind = expression.args["kind"]
|
|
|
|
exists_sql = " IF EXISTS " if expression.args.get("exists") else " "
|
|
|
|
expression_sql = self.sql(expression, "expression")
|
|
|
|
return f"COMMENT{exists_sql}ON {kind} {this} IS {expression_sql}"
|
|
|
|
|
2025-02-13 15:57:23 +01:00
|
|
|
def mergetreettlaction_sql(self, expression: exp.MergeTreeTTLAction) -> str:
|
|
|
|
this = self.sql(expression, "this")
|
|
|
|
delete = " DELETE" if expression.args.get("delete") else ""
|
|
|
|
recompress = self.sql(expression, "recompress")
|
|
|
|
recompress = f" RECOMPRESS {recompress}" if recompress else ""
|
|
|
|
to_disk = self.sql(expression, "to_disk")
|
|
|
|
to_disk = f" TO DISK {to_disk}" if to_disk else ""
|
|
|
|
to_volume = self.sql(expression, "to_volume")
|
|
|
|
to_volume = f" TO VOLUME {to_volume}" if to_volume else ""
|
|
|
|
return f"{this}{delete}{recompress}{to_disk}{to_volume}"
|
|
|
|
|
|
|
|
def mergetreettl_sql(self, expression: exp.MergeTreeTTL) -> str:
|
|
|
|
where = self.sql(expression, "where")
|
|
|
|
group = self.sql(expression, "group")
|
|
|
|
aggregates = self.expressions(expression, key="aggregates")
|
|
|
|
aggregates = self.seg("SET") + self.seg(aggregates) if aggregates else ""
|
|
|
|
|
|
|
|
if not (where or group or aggregates) and len(expression.expressions) == 1:
|
|
|
|
return f"TTL {self.expressions(expression, flat=True)}"
|
|
|
|
|
|
|
|
return f"TTL{self.seg(self.expressions(expression))}{where}{group}{aggregates}"
|
|
|
|
|
2025-02-13 15:46:19 +01:00
|
|
|
def transaction_sql(self, expression: exp.Transaction) -> str:
|
2025-02-13 14:54:32 +01:00
|
|
|
return "BEGIN"
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def commit_sql(self, expression: exp.Commit) -> str:
|
2025-02-13 14:56:25 +01:00
|
|
|
chain = expression.args.get("chain")
|
|
|
|
if chain is not None:
|
|
|
|
chain = " AND CHAIN" if chain else " AND NO CHAIN"
|
|
|
|
|
|
|
|
return f"COMMIT{chain or ''}"
|
2025-02-13 14:54:32 +01:00
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def rollback_sql(self, expression: exp.Rollback) -> str:
|
2025-02-13 14:54:32 +01:00
|
|
|
savepoint = expression.args.get("savepoint")
|
|
|
|
savepoint = f" TO {savepoint}" if savepoint else ""
|
|
|
|
return f"ROLLBACK{savepoint}"
|
|
|
|
|
2025-02-13 15:01:55 +01:00
|
|
|
def altercolumn_sql(self, expression: exp.AlterColumn) -> str:
|
|
|
|
this = self.sql(expression, "this")
|
|
|
|
|
|
|
|
dtype = self.sql(expression, "dtype")
|
|
|
|
if dtype:
|
|
|
|
collate = self.sql(expression, "collate")
|
|
|
|
collate = f" COLLATE {collate}" if collate else ""
|
|
|
|
using = self.sql(expression, "using")
|
|
|
|
using = f" USING {using}" if using else ""
|
|
|
|
return f"ALTER COLUMN {this} TYPE {dtype}{collate}{using}"
|
|
|
|
|
|
|
|
default = self.sql(expression, "default")
|
|
|
|
if default:
|
|
|
|
return f"ALTER COLUMN {this} SET DEFAULT {default}"
|
|
|
|
|
|
|
|
if not expression.args.get("drop"):
|
|
|
|
self.unsupported("Unsupported ALTER COLUMN syntax")
|
|
|
|
|
|
|
|
return f"ALTER COLUMN {this} DROP DEFAULT"
|
|
|
|
|
2025-02-13 15:05:06 +01:00
|
|
|
def renametable_sql(self, expression: exp.RenameTable) -> str:
|
2025-02-13 15:57:23 +01:00
|
|
|
if not self.RENAME_TABLE_WITH_DB:
|
|
|
|
# Remove db from tables
|
|
|
|
expression = expression.transform(
|
|
|
|
lambda n: exp.table_(n.this) if isinstance(n, exp.Table) else n
|
|
|
|
)
|
2025-02-13 15:05:06 +01:00
|
|
|
this = self.sql(expression, "this")
|
|
|
|
return f"RENAME TO {this}"
|
|
|
|
|
2025-02-13 15:01:55 +01:00
|
|
|
def altertable_sql(self, expression: exp.AlterTable) -> str:
|
|
|
|
actions = expression.args["actions"]
|
|
|
|
|
|
|
|
if isinstance(actions[0], exp.ColumnDef):
|
2025-02-13 15:52:09 +01:00
|
|
|
actions = self.expressions(expression, key="actions", prefix="ADD COLUMN ")
|
2025-02-13 15:01:55 +01:00
|
|
|
elif isinstance(actions[0], exp.Schema):
|
2025-02-13 15:52:09 +01:00
|
|
|
actions = self.expressions(expression, key="actions", prefix="ADD COLUMNS ")
|
2025-02-13 15:08:15 +01:00
|
|
|
elif isinstance(actions[0], exp.Delete):
|
2025-02-13 15:52:09 +01:00
|
|
|
actions = self.expressions(expression, key="actions", flat=True)
|
2025-02-13 15:01:55 +01:00
|
|
|
else:
|
2025-02-13 15:52:09 +01:00
|
|
|
actions = self.expressions(expression, key="actions")
|
2025-02-13 15:01:55 +01:00
|
|
|
|
|
|
|
exists = " IF EXISTS" if expression.args.get("exists") else ""
|
|
|
|
return f"ALTER TABLE{exists} {self.sql(expression, 'this')} {actions}"
|
|
|
|
|
2025-02-13 15:08:15 +01:00
|
|
|
def droppartition_sql(self, expression: exp.DropPartition) -> str:
|
|
|
|
expressions = self.expressions(expression)
|
|
|
|
exists = " IF EXISTS " if expression.args.get("exists") else " "
|
|
|
|
return f"DROP{exists}{expressions}"
|
|
|
|
|
2025-02-13 15:07:05 +01:00
|
|
|
def addconstraint_sql(self, expression: exp.AddConstraint) -> str:
|
|
|
|
this = self.sql(expression, "this")
|
|
|
|
expression_ = self.sql(expression, "expression")
|
|
|
|
add_constraint = f"ADD CONSTRAINT {this}" if this else "ADD"
|
|
|
|
|
|
|
|
enforced = expression.args.get("enforced")
|
|
|
|
if enforced is not None:
|
|
|
|
return f"{add_constraint} CHECK ({expression_}){' ENFORCED' if enforced else ''}"
|
|
|
|
|
|
|
|
return f"{add_constraint} {expression_}"
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def distinct_sql(self, expression: exp.Distinct) -> str:
|
2025-02-13 14:46:58 +01:00
|
|
|
this = self.expressions(expression, flat=True)
|
2025-02-13 06:15:54 +01:00
|
|
|
this = f" {this}" if this else ""
|
|
|
|
|
|
|
|
on = self.sql(expression, "on")
|
|
|
|
on = f" ON {on}" if on else ""
|
|
|
|
return f"DISTINCT{this}{on}"
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def ignorenulls_sql(self, expression: exp.IgnoreNulls) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
return f"{self.sql(expression, 'this')} IGNORE NULLS"
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def respectnulls_sql(self, expression: exp.RespectNulls) -> str:
|
2025-02-13 08:04:41 +01:00
|
|
|
return f"{self.sql(expression, 'this')} RESPECT NULLS"
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def intdiv_sql(self, expression: exp.IntDiv) -> str:
|
2025-02-13 15:44:58 +01:00
|
|
|
return self.sql(
|
|
|
|
exp.Cast(
|
|
|
|
this=exp.Div(this=expression.this, expression=expression.expression),
|
|
|
|
to=exp.DataType(this=exp.DataType.Type.INT),
|
|
|
|
)
|
|
|
|
)
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def dpipe_sql(self, expression: exp.DPipe) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
return self.binary(expression, "||")
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def div_sql(self, expression: exp.Div) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
return self.binary(expression, "/")
|
|
|
|
|
2025-02-13 15:42:13 +01:00
|
|
|
def overlaps_sql(self, expression: exp.Overlaps) -> str:
|
|
|
|
return self.binary(expression, "OVERLAPS")
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def distance_sql(self, expression: exp.Distance) -> str:
|
2025-02-13 08:04:41 +01:00
|
|
|
return self.binary(expression, "<->")
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def dot_sql(self, expression: exp.Dot) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
return f"{self.sql(expression, 'this')}.{self.sql(expression, 'expression')}"
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def eq_sql(self, expression: exp.EQ) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
return self.binary(expression, "=")
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def escape_sql(self, expression: exp.Escape) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
return self.binary(expression, "ESCAPE")
|
|
|
|
|
2025-02-13 15:08:15 +01:00
|
|
|
def glob_sql(self, expression: exp.Glob) -> str:
|
|
|
|
return self.binary(expression, "GLOB")
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def gt_sql(self, expression: exp.GT) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
return self.binary(expression, ">")
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def gte_sql(self, expression: exp.GTE) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
return self.binary(expression, ">=")
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def ilike_sql(self, expression: exp.ILike) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
return self.binary(expression, "ILIKE")
|
|
|
|
|
2025-02-13 15:48:10 +01:00
|
|
|
def ilikeany_sql(self, expression: exp.ILikeAny) -> str:
|
|
|
|
return self.binary(expression, "ILIKE ANY")
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def is_sql(self, expression: exp.Is) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
return self.binary(expression, "IS")
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def like_sql(self, expression: exp.Like) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
return self.binary(expression, "LIKE")
|
|
|
|
|
2025-02-13 15:48:10 +01:00
|
|
|
def likeany_sql(self, expression: exp.LikeAny) -> str:
|
|
|
|
return self.binary(expression, "LIKE ANY")
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def similarto_sql(self, expression: exp.SimilarTo) -> str:
|
2025-02-13 08:04:41 +01:00
|
|
|
return self.binary(expression, "SIMILAR TO")
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def lt_sql(self, expression: exp.LT) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
return self.binary(expression, "<")
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def lte_sql(self, expression: exp.LTE) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
return self.binary(expression, "<=")
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def mod_sql(self, expression: exp.Mod) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
return self.binary(expression, "%")
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def mul_sql(self, expression: exp.Mul) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
return self.binary(expression, "*")
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def neq_sql(self, expression: exp.NEQ) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
return self.binary(expression, "<>")
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def nullsafeeq_sql(self, expression: exp.NullSafeEQ) -> str:
|
2025-02-13 14:53:05 +01:00
|
|
|
return self.binary(expression, "IS NOT DISTINCT FROM")
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def nullsafeneq_sql(self, expression: exp.NullSafeNEQ) -> str:
|
2025-02-13 14:53:05 +01:00
|
|
|
return self.binary(expression, "IS DISTINCT FROM")
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def or_sql(self, expression: exp.Or) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
return self.connector_sql(expression, "OR")
|
|
|
|
|
2025-02-13 15:01:55 +01:00
|
|
|
def slice_sql(self, expression: exp.Slice) -> str:
|
|
|
|
return self.binary(expression, ":")
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def sub_sql(self, expression: exp.Sub) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
return self.binary(expression, "-")
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def trycast_sql(self, expression: exp.TryCast) -> str:
|
2025-02-13 08:04:41 +01:00
|
|
|
return f"TRY_CAST({self.sql(expression, 'this')} AS {self.sql(expression, 'to')})"
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def use_sql(self, expression: exp.Use) -> str:
|
2025-02-13 15:08:15 +01:00
|
|
|
kind = self.sql(expression, "kind")
|
|
|
|
kind = f" {kind}" if kind else ""
|
|
|
|
this = self.sql(expression, "this")
|
|
|
|
this = f" {this}" if this else ""
|
|
|
|
return f"USE{kind}{this}"
|
2025-02-13 14:53:05 +01:00
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def binary(self, expression: exp.Binary, op: str) -> str:
|
2025-02-13 15:52:09 +01:00
|
|
|
op = self.maybe_comment(op, comments=expression.comments)
|
2025-02-13 08:04:41 +01:00
|
|
|
return f"{self.sql(expression, 'this')} {op} {self.sql(expression, 'expression')}"
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def function_fallback_sql(self, expression: exp.Func) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
args = []
|
2025-02-13 14:46:58 +01:00
|
|
|
for arg_value in expression.args.values():
|
|
|
|
if isinstance(arg_value, list):
|
|
|
|
for value in arg_value:
|
|
|
|
args.append(value)
|
2025-02-13 14:48:46 +01:00
|
|
|
else:
|
2025-02-13 14:46:58 +01:00
|
|
|
args.append(arg_value)
|
|
|
|
|
2025-02-13 15:26:26 +01:00
|
|
|
return self.func(expression.sql_name(), *args)
|
|
|
|
|
|
|
|
def func(self, name: str, *args: t.Optional[exp.Expression | str]) -> str:
|
|
|
|
return f"{self.normalize_func(name)}({self.format_args(*args)})"
|
2025-02-13 14:46:58 +01:00
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def format_args(self, *args: t.Optional[str | exp.Expression]) -> str:
|
|
|
|
arg_sqls = tuple(self.sql(arg) for arg in args if arg is not None)
|
|
|
|
if self.pretty and self.text_width(arg_sqls) > self._max_text_width:
|
|
|
|
return self.indent("\n" + f",\n".join(arg_sqls) + "\n", skip_first=True, skip_last=True)
|
|
|
|
return ", ".join(arg_sqls)
|
2025-02-13 14:46:58 +01:00
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def text_width(self, args: t.Iterable) -> int:
|
2025-02-13 14:46:58 +01:00
|
|
|
return sum(len(arg) for arg in args)
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def format_time(self, expression: exp.Expression) -> t.Optional[str]:
|
2025-02-13 08:04:41 +01:00
|
|
|
return format_time(self.sql(expression, "format"), self.time_mapping, self.time_trie)
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def expressions(
|
|
|
|
self,
|
2025-02-13 15:52:09 +01:00
|
|
|
expression: t.Optional[exp.Expression] = None,
|
2025-02-13 14:58:37 +01:00
|
|
|
key: t.Optional[str] = None,
|
2025-02-13 15:52:09 +01:00
|
|
|
sqls: t.Optional[t.List[str]] = None,
|
2025-02-13 14:58:37 +01:00
|
|
|
flat: bool = False,
|
|
|
|
indent: bool = True,
|
|
|
|
sep: str = ", ",
|
2025-02-13 15:01:55 +01:00
|
|
|
prefix: str = "",
|
2025-02-13 14:58:37 +01:00
|
|
|
) -> str:
|
2025-02-13 15:52:09 +01:00
|
|
|
expressions = expression.args.get(key or "expressions") if expression else sqls
|
2025-02-13 06:15:54 +01:00
|
|
|
|
|
|
|
if not expressions:
|
|
|
|
return ""
|
|
|
|
|
|
|
|
if flat:
|
|
|
|
return sep.join(self.sql(e) for e in expressions)
|
|
|
|
|
2025-02-13 14:53:05 +01:00
|
|
|
num_sqls = len(expressions)
|
|
|
|
|
|
|
|
# These are calculated once in case we have the leading_comma / pretty option set, correspondingly
|
|
|
|
pad = " " * self.pad
|
|
|
|
stripped_sep = sep.strip()
|
2025-02-13 08:04:41 +01:00
|
|
|
|
2025-02-13 14:53:05 +01:00
|
|
|
result_sqls = []
|
|
|
|
for i, e in enumerate(expressions):
|
|
|
|
sql = self.sql(e, comment=False)
|
2025-02-13 15:03:38 +01:00
|
|
|
comments = self.maybe_comment("", e) if isinstance(e, exp.Expression) else ""
|
2025-02-13 14:53:05 +01:00
|
|
|
|
|
|
|
if self.pretty:
|
|
|
|
if self._leading_comma:
|
2025-02-13 15:01:55 +01:00
|
|
|
result_sqls.append(f"{sep if i > 0 else pad}{prefix}{sql}{comments}")
|
2025-02-13 14:53:05 +01:00
|
|
|
else:
|
2025-02-13 15:01:55 +01:00
|
|
|
result_sqls.append(
|
|
|
|
f"{prefix}{sql}{stripped_sep if i + 1 < num_sqls else ''}{comments}"
|
|
|
|
)
|
2025-02-13 14:53:05 +01:00
|
|
|
else:
|
2025-02-13 15:01:55 +01:00
|
|
|
result_sqls.append(f"{prefix}{sql}{comments}{sep if i + 1 < num_sqls else ''}")
|
2025-02-13 14:53:05 +01:00
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
result_sql = "\n".join(result_sqls) if self.pretty else "".join(result_sqls)
|
|
|
|
return self.indent(result_sql, skip_first=False) if indent else result_sql
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def op_expressions(self, op: str, expression: exp.Expression, flat: bool = False) -> str:
|
2025-02-13 15:26:26 +01:00
|
|
|
flat = flat or isinstance(expression.parent, exp.Properties)
|
2025-02-13 06:15:54 +01:00
|
|
|
expressions_sql = self.expressions(expression, flat=flat)
|
|
|
|
if flat:
|
|
|
|
return f"{op} {expressions_sql}"
|
|
|
|
return f"{self.seg(op)}{self.sep() if expressions_sql else ''}{expressions_sql}"
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def naked_property(self, expression: exp.Property) -> str:
|
2025-02-13 14:56:25 +01:00
|
|
|
property_name = exp.Properties.PROPERTY_TO_NAME.get(expression.__class__)
|
|
|
|
if not property_name:
|
|
|
|
self.unsupported(f"Unsupported property {expression.__class__.__name__}")
|
|
|
|
return f"{property_name} {self.sql(expression, 'this')}"
|
2025-02-13 14:40:43 +01:00
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def set_operation(self, expression: exp.Expression, op: str) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
this = self.sql(expression, "this")
|
|
|
|
op = self.seg(op)
|
2025-02-13 14:53:05 +01:00
|
|
|
return self.query_modifiers(
|
|
|
|
expression, f"{this}{op}{self.sep()}{self.sql(expression, 'expression')}"
|
|
|
|
)
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 15:07:05 +01:00
|
|
|
def tag_sql(self, expression: exp.Tag) -> str:
|
|
|
|
return f"{expression.args.get('prefix')}{self.sql(expression.this)}{expression.args.get('postfix')}"
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def token_sql(self, token_type: TokenType) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
return self.TOKEN_MAPPING.get(token_type, token_type.name)
|
2025-02-13 14:40:43 +01:00
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def userdefinedfunction_sql(self, expression: exp.UserDefinedFunction) -> str:
|
2025-02-13 14:40:43 +01:00
|
|
|
this = self.sql(expression, "this")
|
2025-02-13 15:09:58 +01:00
|
|
|
expressions = self.no_identify(self.expressions, expression)
|
2025-02-13 15:07:05 +01:00
|
|
|
expressions = (
|
|
|
|
self.wrap(expressions) if expression.args.get("wrapped") else f" {expressions}"
|
|
|
|
)
|
|
|
|
return f"{this}{expressions}"
|
2025-02-13 14:40:43 +01:00
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def joinhint_sql(self, expression: exp.JoinHint) -> str:
|
2025-02-13 14:45:11 +01:00
|
|
|
this = self.sql(expression, "this")
|
|
|
|
expressions = self.expressions(expression, flat=True)
|
|
|
|
return f"{this}({expressions})"
|
2025-02-13 14:53:05 +01:00
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def kwarg_sql(self, expression: exp.Kwarg) -> str:
|
2025-02-13 14:53:05 +01:00
|
|
|
return self.binary(expression, "=>")
|
2025-02-13 14:58:37 +01:00
|
|
|
|
|
|
|
def when_sql(self, expression: exp.When) -> str:
|
2025-02-13 15:43:32 +01:00
|
|
|
matched = "MATCHED" if expression.args["matched"] else "NOT MATCHED"
|
|
|
|
source = " BY SOURCE" if self.MATCHED_BY_SOURCE and expression.args.get("source") else ""
|
|
|
|
condition = self.sql(expression, "condition")
|
|
|
|
condition = f" AND {condition}" if condition else ""
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
then_expression = expression.args.get("then")
|
|
|
|
if isinstance(then_expression, exp.Insert):
|
|
|
|
then = f"INSERT {self.sql(then_expression, 'this')}"
|
|
|
|
if "expression" in then_expression.args:
|
|
|
|
then += f" VALUES {self.sql(then_expression, 'expression')}"
|
|
|
|
elif isinstance(then_expression, exp.Update):
|
|
|
|
if isinstance(then_expression.args.get("expressions"), exp.Star):
|
|
|
|
then = f"UPDATE {self.sql(then_expression, 'expressions')}"
|
|
|
|
else:
|
|
|
|
then = f"UPDATE SET {self.expressions(then_expression, flat=True)}"
|
|
|
|
else:
|
|
|
|
then = self.sql(then_expression)
|
2025-02-13 15:43:32 +01:00
|
|
|
return f"WHEN {matched}{source}{condition} THEN {then}"
|
2025-02-13 14:58:37 +01:00
|
|
|
|
|
|
|
def merge_sql(self, expression: exp.Merge) -> str:
|
|
|
|
this = self.sql(expression, "this")
|
|
|
|
using = f"USING {self.sql(expression, 'using')}"
|
|
|
|
on = f"ON {self.sql(expression, 'on')}"
|
|
|
|
return f"MERGE INTO {this} {using} {on} {self.expressions(expression, sep=' ')}"
|
2025-02-13 15:44:58 +01:00
|
|
|
|
|
|
|
def tochar_sql(self, expression: exp.ToChar) -> str:
|
|
|
|
if expression.args.get("format"):
|
|
|
|
self.unsupported("Format argument unsupported for TO_CHAR/TO_VARCHAR function")
|
|
|
|
|
|
|
|
return self.sql(exp.cast(expression.this, "text"))
|
2025-02-13 15:57:23 +01:00
|
|
|
|
|
|
|
|
|
|
|
def cached_generator(
|
|
|
|
cache: t.Optional[t.Dict[int, str]] = None
|
|
|
|
) -> t.Callable[[exp.Expression], str]:
|
|
|
|
"""Returns a cached generator."""
|
|
|
|
cache = {} if cache is None else cache
|
|
|
|
generator = Generator(normalize=True, identify="safe")
|
|
|
|
return lambda e: generator.generate(e, cache)
|