2025-02-13 14:53:05 +01:00
|
|
|
from __future__ import annotations
|
|
|
|
|
2025-02-13 15:42:13 +01:00
|
|
|
import typing as t
|
|
|
|
|
2025-02-13 15:53:39 +01:00
|
|
|
from sqlglot import exp, generator, parser, tokens, transforms
|
2025-02-13 14:40:43 +01:00
|
|
|
from sqlglot.dialects.dialect import (
|
|
|
|
Dialect,
|
2025-02-13 15:46:19 +01:00
|
|
|
date_trunc_to_time,
|
2025-02-13 15:01:55 +01:00
|
|
|
datestrtodate_sql,
|
2025-02-13 14:40:43 +01:00
|
|
|
format_time_lambda,
|
|
|
|
inline_array_sql,
|
2025-02-13 15:48:10 +01:00
|
|
|
max_or_greatest,
|
2025-02-13 15:43:32 +01:00
|
|
|
min_or_least,
|
2025-02-13 14:40:43 +01:00
|
|
|
rename_func,
|
2025-02-13 15:46:19 +01:00
|
|
|
timestamptrunc_sql,
|
2025-02-13 15:01:55 +01:00
|
|
|
timestrtotime_sql,
|
2025-02-13 15:30:09 +01:00
|
|
|
ts_or_ds_to_date_sql,
|
2025-02-13 14:56:25 +01:00
|
|
|
var_map_sql,
|
2025-02-13 14:40:43 +01:00
|
|
|
)
|
2025-02-13 06:15:54 +01:00
|
|
|
from sqlglot.expressions import Literal
|
2025-02-13 15:01:55 +01:00
|
|
|
from sqlglot.helper import flatten, seq_get
|
2025-02-13 15:44:58 +01:00
|
|
|
from sqlglot.parser import binary_range_parser
|
2025-02-13 14:53:05 +01:00
|
|
|
from sqlglot.tokens import TokenType
|
2025-02-13 06:15:54 +01:00
|
|
|
|
|
|
|
|
2025-02-13 15:52:09 +01:00
|
|
|
def _check_int(s: str) -> bool:
|
2025-02-13 06:15:54 +01:00
|
|
|
if s[0] in ("-", "+"):
|
|
|
|
return s[1:].isdigit()
|
|
|
|
return s.isdigit()
|
|
|
|
|
|
|
|
|
|
|
|
# from https://docs.snowflake.com/en/sql-reference/functions/to_timestamp.html
|
2025-02-13 15:52:09 +01:00
|
|
|
def _snowflake_to_timestamp(args: t.Sequence) -> t.Union[exp.StrToTime, exp.UnixToTime]:
|
2025-02-13 06:15:54 +01:00
|
|
|
if len(args) == 2:
|
|
|
|
first_arg, second_arg = args
|
|
|
|
if second_arg.is_string:
|
|
|
|
# case: <string_expr> [ , <format> ]
|
|
|
|
return format_time_lambda(exp.StrToTime, "snowflake")(args)
|
|
|
|
|
|
|
|
# case: <numeric_expr> [ , <scale> ]
|
|
|
|
if second_arg.name not in ["0", "3", "9"]:
|
2025-02-13 14:53:05 +01:00
|
|
|
raise ValueError(
|
|
|
|
f"Scale for snowflake numeric timestamp is {second_arg}, but should be 0, 3, or 9"
|
|
|
|
)
|
2025-02-13 06:15:54 +01:00
|
|
|
|
|
|
|
if second_arg.name == "0":
|
|
|
|
timescale = exp.UnixToTime.SECONDS
|
|
|
|
elif second_arg.name == "3":
|
|
|
|
timescale = exp.UnixToTime.MILLIS
|
|
|
|
elif second_arg.name == "9":
|
|
|
|
timescale = exp.UnixToTime.MICROS
|
|
|
|
|
|
|
|
return exp.UnixToTime(this=first_arg, scale=timescale)
|
|
|
|
|
2025-02-13 14:53:05 +01:00
|
|
|
first_arg = seq_get(args, 0)
|
2025-02-13 06:15:54 +01:00
|
|
|
if not isinstance(first_arg, Literal):
|
|
|
|
# case: <variant_expr>
|
|
|
|
return format_time_lambda(exp.StrToTime, "snowflake", default=True)(args)
|
|
|
|
|
|
|
|
if first_arg.is_string:
|
|
|
|
if _check_int(first_arg.this):
|
|
|
|
# case: <integer>
|
|
|
|
return exp.UnixToTime.from_arg_list(args)
|
|
|
|
|
|
|
|
# case: <date_expr>
|
|
|
|
return format_time_lambda(exp.StrToTime, "snowflake", default=True)(args)
|
|
|
|
|
|
|
|
# case: <numeric_expr>
|
|
|
|
return exp.UnixToTime.from_arg_list(args)
|
|
|
|
|
|
|
|
|
2025-02-13 15:52:09 +01:00
|
|
|
def _unix_to_time_sql(self: generator.Generator, expression: exp.UnixToTime) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
scale = expression.args.get("scale")
|
|
|
|
timestamp = self.sql(expression, "this")
|
|
|
|
if scale in [None, exp.UnixToTime.SECONDS]:
|
|
|
|
return f"TO_TIMESTAMP({timestamp})"
|
|
|
|
if scale == exp.UnixToTime.MILLIS:
|
|
|
|
return f"TO_TIMESTAMP({timestamp}, 3)"
|
|
|
|
if scale == exp.UnixToTime.MICROS:
|
|
|
|
return f"TO_TIMESTAMP({timestamp}, 9)"
|
|
|
|
|
|
|
|
raise ValueError("Improper scale for timestamp")
|
|
|
|
|
|
|
|
|
2025-02-13 14:48:46 +01:00
|
|
|
# https://docs.snowflake.com/en/sql-reference/functions/date_part.html
|
|
|
|
# https://docs.snowflake.com/en/sql-reference/functions-date-time.html#label-supported-date-time-parts
|
2025-02-13 15:52:09 +01:00
|
|
|
def _parse_date_part(self: parser.Parser) -> t.Optional[exp.Expression]:
|
2025-02-13 14:48:46 +01:00
|
|
|
this = self._parse_var() or self._parse_type()
|
2025-02-13 15:52:09 +01:00
|
|
|
|
|
|
|
if not this:
|
|
|
|
return None
|
|
|
|
|
2025-02-13 14:48:46 +01:00
|
|
|
self._match(TokenType.COMMA)
|
|
|
|
expression = self._parse_bitwise()
|
|
|
|
|
|
|
|
name = this.name.upper()
|
|
|
|
if name.startswith("EPOCH"):
|
|
|
|
if name.startswith("EPOCH_MILLISECOND"):
|
|
|
|
scale = 10**3
|
|
|
|
elif name.startswith("EPOCH_MICROSECOND"):
|
|
|
|
scale = 10**6
|
|
|
|
elif name.startswith("EPOCH_NANOSECOND"):
|
|
|
|
scale = 10**9
|
|
|
|
else:
|
|
|
|
scale = None
|
|
|
|
|
|
|
|
ts = self.expression(exp.Cast, this=expression, to=exp.DataType.build("TIMESTAMP"))
|
2025-02-13 15:52:09 +01:00
|
|
|
to_unix: exp.Expression = self.expression(exp.TimeToUnix, this=ts)
|
2025-02-13 14:48:46 +01:00
|
|
|
|
|
|
|
if scale:
|
|
|
|
to_unix = exp.Mul(this=to_unix, expression=exp.Literal.number(scale))
|
|
|
|
|
|
|
|
return to_unix
|
|
|
|
|
|
|
|
return self.expression(exp.Extract, this=this, expression=expression)
|
|
|
|
|
|
|
|
|
2025-02-13 15:42:13 +01:00
|
|
|
# https://docs.snowflake.com/en/sql-reference/functions/div0
|
2025-02-13 15:52:09 +01:00
|
|
|
def _div0_to_if(args: t.Sequence) -> exp.Expression:
|
2025-02-13 15:42:13 +01:00
|
|
|
cond = exp.EQ(this=seq_get(args, 1), expression=exp.Literal.number(0))
|
|
|
|
true = exp.Literal.number(0)
|
2025-02-13 15:44:58 +01:00
|
|
|
false = exp.Div(this=seq_get(args, 0), expression=seq_get(args, 1))
|
2025-02-13 15:42:13 +01:00
|
|
|
return exp.If(this=cond, true=true, false=false)
|
|
|
|
|
|
|
|
|
|
|
|
# https://docs.snowflake.com/en/sql-reference/functions/zeroifnull
|
2025-02-13 15:52:09 +01:00
|
|
|
def _zeroifnull_to_if(args: t.Sequence) -> exp.Expression:
|
2025-02-13 15:43:32 +01:00
|
|
|
cond = exp.Is(this=seq_get(args, 0), expression=exp.Null())
|
2025-02-13 15:42:13 +01:00
|
|
|
return exp.If(this=cond, true=exp.Literal.number(0), false=seq_get(args, 0))
|
|
|
|
|
|
|
|
|
2025-02-13 15:43:32 +01:00
|
|
|
# https://docs.snowflake.com/en/sql-reference/functions/zeroifnull
|
2025-02-13 15:52:09 +01:00
|
|
|
def _nullifzero_to_if(args: t.Sequence) -> exp.Expression:
|
2025-02-13 15:43:32 +01:00
|
|
|
cond = exp.EQ(this=seq_get(args, 0), expression=exp.Literal.number(0))
|
|
|
|
return exp.If(this=cond, true=exp.Null(), false=seq_get(args, 0))
|
|
|
|
|
|
|
|
|
2025-02-13 15:52:09 +01:00
|
|
|
def _datatype_sql(self: generator.Generator, expression: exp.DataType) -> str:
|
2025-02-13 14:56:25 +01:00
|
|
|
if expression.this == exp.DataType.Type.ARRAY:
|
|
|
|
return "ARRAY"
|
|
|
|
elif expression.this == exp.DataType.Type.MAP:
|
|
|
|
return "OBJECT"
|
|
|
|
return self.datatype_sql(expression)
|
|
|
|
|
|
|
|
|
2025-02-13 06:15:54 +01:00
|
|
|
class Snowflake(Dialect):
|
|
|
|
null_ordering = "nulls_are_large"
|
|
|
|
time_format = "'yyyy-mm-dd hh24:mi:ss'"
|
|
|
|
|
|
|
|
time_mapping = {
|
|
|
|
"YYYY": "%Y",
|
|
|
|
"yyyy": "%Y",
|
|
|
|
"YY": "%y",
|
|
|
|
"yy": "%y",
|
|
|
|
"MMMM": "%B",
|
|
|
|
"mmmm": "%B",
|
|
|
|
"MON": "%b",
|
|
|
|
"mon": "%b",
|
|
|
|
"MM": "%m",
|
|
|
|
"mm": "%m",
|
|
|
|
"DD": "%d",
|
2025-02-13 15:52:09 +01:00
|
|
|
"dd": "%-d",
|
|
|
|
"DY": "%a",
|
2025-02-13 06:15:54 +01:00
|
|
|
"dy": "%w",
|
|
|
|
"HH24": "%H",
|
|
|
|
"hh24": "%H",
|
|
|
|
"HH12": "%I",
|
|
|
|
"hh12": "%I",
|
|
|
|
"MI": "%M",
|
|
|
|
"mi": "%M",
|
|
|
|
"SS": "%S",
|
|
|
|
"ss": "%S",
|
|
|
|
"FF": "%f",
|
|
|
|
"ff": "%f",
|
|
|
|
"FF6": "%f",
|
|
|
|
"ff6": "%f",
|
|
|
|
}
|
|
|
|
|
2025-02-13 14:53:05 +01:00
|
|
|
class Parser(parser.Parser):
|
2025-02-13 15:52:09 +01:00
|
|
|
QUOTED_PIVOT_COLUMNS = True
|
|
|
|
|
2025-02-13 06:15:54 +01:00
|
|
|
FUNCTIONS = {
|
2025-02-13 14:53:05 +01:00
|
|
|
**parser.Parser.FUNCTIONS,
|
2025-02-13 06:15:54 +01:00
|
|
|
"ARRAYAGG": exp.ArrayAgg.from_arg_list,
|
2025-02-13 15:44:58 +01:00
|
|
|
"ARRAY_CONSTRUCT": exp.Array.from_arg_list,
|
2025-02-13 15:42:13 +01:00
|
|
|
"ARRAY_TO_STRING": exp.ArrayJoin.from_arg_list,
|
2025-02-13 15:50:57 +01:00
|
|
|
"CONVERT_TIMEZONE": lambda args: exp.AtTimeZone(
|
|
|
|
this=seq_get(args, 1),
|
|
|
|
zone=seq_get(args, 0),
|
|
|
|
),
|
2025-02-13 15:46:19 +01:00
|
|
|
"DATE_TRUNC": date_trunc_to_time,
|
2025-02-13 15:43:32 +01:00
|
|
|
"DATEADD": lambda args: exp.DateAdd(
|
|
|
|
this=seq_get(args, 2),
|
|
|
|
expression=seq_get(args, 1),
|
|
|
|
unit=seq_get(args, 0),
|
|
|
|
),
|
2025-02-13 15:44:58 +01:00
|
|
|
"DATEDIFF": lambda args: exp.DateDiff(
|
|
|
|
this=seq_get(args, 2),
|
|
|
|
expression=seq_get(args, 1),
|
|
|
|
unit=seq_get(args, 0),
|
|
|
|
),
|
2025-02-13 15:42:13 +01:00
|
|
|
"DIV0": _div0_to_if,
|
2025-02-13 06:15:54 +01:00
|
|
|
"IFF": exp.If.from_arg_list,
|
2025-02-13 15:44:58 +01:00
|
|
|
"NULLIFZERO": _nullifzero_to_if,
|
|
|
|
"OBJECT_CONSTRUCT": parser.parse_var_map,
|
|
|
|
"RLIKE": exp.RegexpLike.from_arg_list,
|
|
|
|
"SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)),
|
2025-02-13 15:42:13 +01:00
|
|
|
"TO_ARRAY": exp.Array.from_arg_list,
|
2025-02-13 15:44:58 +01:00
|
|
|
"TO_VARCHAR": exp.ToChar.from_arg_list,
|
2025-02-13 06:15:54 +01:00
|
|
|
"TO_TIMESTAMP": _snowflake_to_timestamp,
|
2025-02-13 15:42:13 +01:00
|
|
|
"ZEROIFNULL": _zeroifnull_to_if,
|
2025-02-13 06:15:54 +01:00
|
|
|
}
|
|
|
|
|
2025-02-13 14:31:47 +01:00
|
|
|
FUNCTION_PARSERS = {
|
2025-02-13 14:53:05 +01:00
|
|
|
**parser.Parser.FUNCTION_PARSERS,
|
2025-02-13 14:48:46 +01:00
|
|
|
"DATE_PART": _parse_date_part,
|
2025-02-13 14:31:47 +01:00
|
|
|
}
|
2025-02-13 14:54:32 +01:00
|
|
|
FUNCTION_PARSERS.pop("TRIM")
|
2025-02-13 14:31:47 +01:00
|
|
|
|
2025-02-13 14:40:43 +01:00
|
|
|
FUNC_TOKENS = {
|
2025-02-13 14:53:05 +01:00
|
|
|
*parser.Parser.FUNC_TOKENS,
|
2025-02-13 14:40:43 +01:00
|
|
|
TokenType.RLIKE,
|
2025-02-13 14:45:11 +01:00
|
|
|
TokenType.TABLE,
|
2025-02-13 14:40:43 +01:00
|
|
|
}
|
|
|
|
|
2025-02-13 06:15:54 +01:00
|
|
|
COLUMN_OPERATORS = {
|
2025-02-13 14:53:05 +01:00
|
|
|
**parser.Parser.COLUMN_OPERATORS, # type: ignore
|
2025-02-13 06:15:54 +01:00
|
|
|
TokenType.COLON: lambda self, this, path: self.expression(
|
|
|
|
exp.Bracket,
|
|
|
|
this=this,
|
|
|
|
expressions=[path],
|
|
|
|
),
|
|
|
|
}
|
|
|
|
|
2025-02-13 15:26:26 +01:00
|
|
|
RANGE_PARSERS = {
|
|
|
|
**parser.Parser.RANGE_PARSERS, # type: ignore
|
2025-02-13 15:44:58 +01:00
|
|
|
TokenType.LIKE_ANY: binary_range_parser(exp.LikeAny),
|
|
|
|
TokenType.ILIKE_ANY: binary_range_parser(exp.ILikeAny),
|
2025-02-13 15:26:26 +01:00
|
|
|
}
|
|
|
|
|
2025-02-13 15:42:13 +01:00
|
|
|
ALTER_PARSERS = {
|
|
|
|
**parser.Parser.ALTER_PARSERS, # type: ignore
|
|
|
|
"UNSET": lambda self: self._parse_alter_table_set_tag(unset=True),
|
|
|
|
"SET": lambda self: self._parse_alter_table_set_tag(),
|
|
|
|
}
|
|
|
|
|
|
|
|
def _parse_alter_table_set_tag(self, unset: bool = False) -> exp.Expression:
|
|
|
|
self._match_text_seq("TAG")
|
|
|
|
parser = t.cast(t.Callable, self._parse_id_var if unset else self._parse_conjunction)
|
|
|
|
return self.expression(exp.SetTag, expressions=self._parse_csv(parser), unset=unset)
|
|
|
|
|
2025-02-13 14:53:05 +01:00
|
|
|
class Tokenizer(tokens.Tokenizer):
|
2025-02-13 06:15:54 +01:00
|
|
|
QUOTES = ["'", "$$"]
|
2025-02-13 15:23:26 +01:00
|
|
|
STRING_ESCAPES = ["\\", "'"]
|
2025-02-13 15:53:39 +01:00
|
|
|
HEX_STRINGS = [("x'", "'"), ("X'", "'")]
|
2025-02-13 14:31:47 +01:00
|
|
|
|
2025-02-13 06:15:54 +01:00
|
|
|
KEYWORDS = {
|
2025-02-13 14:53:05 +01:00
|
|
|
**tokens.Tokenizer.KEYWORDS,
|
2025-02-13 15:07:05 +01:00
|
|
|
"EXCLUDE": TokenType.EXCEPT,
|
2025-02-13 15:26:26 +01:00
|
|
|
"ILIKE ANY": TokenType.ILIKE_ANY,
|
|
|
|
"LIKE ANY": TokenType.LIKE_ANY,
|
2025-02-13 15:08:15 +01:00
|
|
|
"MATCH_RECOGNIZE": TokenType.MATCH_RECOGNIZE,
|
2025-02-13 15:23:26 +01:00
|
|
|
"PUT": TokenType.COMMAND,
|
2025-02-13 15:07:05 +01:00
|
|
|
"RENAME": TokenType.REPLACE,
|
2025-02-13 14:31:47 +01:00
|
|
|
"TIMESTAMP_LTZ": TokenType.TIMESTAMPLTZ,
|
|
|
|
"TIMESTAMP_NTZ": TokenType.TIMESTAMP,
|
|
|
|
"TIMESTAMP_TZ": TokenType.TIMESTAMPTZ,
|
|
|
|
"TIMESTAMPNTZ": TokenType.TIMESTAMP,
|
2025-02-13 14:58:37 +01:00
|
|
|
"MINUS": TokenType.EXCEPT,
|
2025-02-13 14:40:43 +01:00
|
|
|
"SAMPLE": TokenType.TABLE_SAMPLE,
|
2025-02-13 06:15:54 +01:00
|
|
|
}
|
|
|
|
|
2025-02-13 15:26:26 +01:00
|
|
|
SINGLE_TOKENS = {
|
|
|
|
**tokens.Tokenizer.SINGLE_TOKENS,
|
|
|
|
"$": TokenType.PARAMETER,
|
|
|
|
}
|
|
|
|
|
2025-02-13 15:52:09 +01:00
|
|
|
VAR_SINGLE_TOKENS = {"$"}
|
|
|
|
|
2025-02-13 14:53:05 +01:00
|
|
|
class Generator(generator.Generator):
|
2025-02-13 15:26:26 +01:00
|
|
|
PARAMETER_TOKEN = "$"
|
2025-02-13 15:43:32 +01:00
|
|
|
MATCHED_BY_SOURCE = False
|
2025-02-13 15:52:09 +01:00
|
|
|
SINGLE_STRING_INTERVAL = True
|
|
|
|
JOIN_HINTS = False
|
|
|
|
TABLE_HINTS = False
|
2025-02-13 14:51:47 +01:00
|
|
|
|
2025-02-13 06:15:54 +01:00
|
|
|
TRANSFORMS = {
|
2025-02-13 15:01:55 +01:00
|
|
|
**generator.Generator.TRANSFORMS, # type: ignore
|
2025-02-13 14:56:25 +01:00
|
|
|
exp.Array: inline_array_sql,
|
2025-02-13 14:48:46 +01:00
|
|
|
exp.ArrayConcat: rename_func("ARRAY_CAT"),
|
2025-02-13 15:42:13 +01:00
|
|
|
exp.ArrayJoin: rename_func("ARRAY_TO_STRING"),
|
2025-02-13 15:50:57 +01:00
|
|
|
exp.AtTimeZone: lambda self, e: self.func(
|
|
|
|
"CONVERT_TIMEZONE", e.args.get("zone"), e.this
|
|
|
|
),
|
2025-02-13 15:42:13 +01:00
|
|
|
exp.DateAdd: lambda self, e: self.func("DATEADD", e.text("unit"), e.expression, e.this),
|
2025-02-13 15:48:10 +01:00
|
|
|
exp.DateDiff: lambda self, e: self.func(
|
|
|
|
"DATEDIFF", e.text("unit"), e.expression, e.this
|
|
|
|
),
|
2025-02-13 15:01:55 +01:00
|
|
|
exp.DateStrToDate: datestrtodate_sql,
|
2025-02-13 14:56:25 +01:00
|
|
|
exp.DataType: _datatype_sql,
|
2025-02-13 15:52:09 +01:00
|
|
|
exp.DayOfWeek: rename_func("DAYOFWEEK"),
|
2025-02-13 06:15:54 +01:00
|
|
|
exp.If: rename_func("IFF"),
|
2025-02-13 15:46:19 +01:00
|
|
|
exp.LogicalAnd: rename_func("BOOLAND_AGG"),
|
2025-02-13 15:52:09 +01:00
|
|
|
exp.LogicalOr: rename_func("BOOLOR_AGG"),
|
|
|
|
exp.Map: lambda self, e: var_map_sql(self, e, "OBJECT_CONSTRUCT"),
|
|
|
|
exp.Max: max_or_greatest,
|
|
|
|
exp.Min: min_or_least,
|
2025-02-13 14:56:25 +01:00
|
|
|
exp.PartitionedByProperty: lambda self, e: f"PARTITION BY {self.sql(e, 'this')}",
|
2025-02-13 15:53:39 +01:00
|
|
|
exp.Select: transforms.preprocess([transforms.eliminate_distinct_on]),
|
2025-02-13 15:52:09 +01:00
|
|
|
exp.StarMap: rename_func("OBJECT_CONSTRUCT"),
|
2025-02-13 15:26:26 +01:00
|
|
|
exp.StrPosition: lambda self, e: self.func(
|
|
|
|
"POSITION", e.args.get("substr"), e.this, e.args.get("position")
|
|
|
|
),
|
2025-02-13 06:15:54 +01:00
|
|
|
exp.StrToTime: lambda self, e: f"TO_TIMESTAMP({self.sql(e, 'this')}, {self.format_time(e)})",
|
2025-02-13 15:01:55 +01:00
|
|
|
exp.TimeStrToTime: timestrtotime_sql,
|
2025-02-13 14:48:46 +01:00
|
|
|
exp.TimeToUnix: lambda self, e: f"EXTRACT(epoch_second FROM {self.sql(e, 'this')})",
|
2025-02-13 15:52:09 +01:00
|
|
|
exp.TimeToStr: lambda self, e: self.func(
|
|
|
|
"TO_CHAR", exp.cast(e.this, "timestamp"), self.format_time(e)
|
|
|
|
),
|
|
|
|
exp.TimestampTrunc: timestamptrunc_sql,
|
2025-02-13 15:44:58 +01:00
|
|
|
exp.ToChar: lambda self, e: self.function_fallback_sql(e),
|
2025-02-13 15:52:09 +01:00
|
|
|
exp.Trim: lambda self, e: self.func("TRIM", e.this, e.expression),
|
2025-02-13 15:30:09 +01:00
|
|
|
exp.TsOrDsToDate: ts_or_ds_to_date_sql("snowflake"),
|
2025-02-13 14:56:25 +01:00
|
|
|
exp.UnixToTime: _unix_to_time_sql,
|
2025-02-13 15:52:09 +01:00
|
|
|
exp.VarMap: lambda self, e: var_map_sql(self, e, "OBJECT_CONSTRUCT"),
|
2025-02-13 06:15:54 +01:00
|
|
|
}
|
|
|
|
|
2025-02-13 14:31:47 +01:00
|
|
|
TYPE_MAPPING = {
|
2025-02-13 15:01:55 +01:00
|
|
|
**generator.Generator.TYPE_MAPPING, # type: ignore
|
2025-02-13 14:31:47 +01:00
|
|
|
exp.DataType.Type.TIMESTAMP: "TIMESTAMPNTZ",
|
|
|
|
}
|
|
|
|
|
2025-02-13 15:07:05 +01:00
|
|
|
STAR_MAPPING = {
|
|
|
|
"except": "EXCLUDE",
|
|
|
|
"replace": "RENAME",
|
|
|
|
}
|
|
|
|
|
2025-02-13 15:40:23 +01:00
|
|
|
PROPERTIES_LOCATION = {
|
|
|
|
**generator.Generator.PROPERTIES_LOCATION, # type: ignore
|
|
|
|
exp.SetProperty: exp.Properties.Location.UNSUPPORTED,
|
2025-02-13 15:52:09 +01:00
|
|
|
exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED,
|
2025-02-13 15:40:23 +01:00
|
|
|
}
|
|
|
|
|
2025-02-13 15:52:09 +01:00
|
|
|
def except_op(self, expression: exp.Except) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
if not expression.args.get("distinct", False):
|
|
|
|
self.unsupported("EXCEPT with All is not supported in Snowflake")
|
|
|
|
return super().except_op(expression)
|
|
|
|
|
2025-02-13 15:52:09 +01:00
|
|
|
def intersect_op(self, expression: exp.Intersect) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
if not expression.args.get("distinct", False):
|
|
|
|
self.unsupported("INTERSECT with All is not supported in Snowflake")
|
|
|
|
return super().intersect_op(expression)
|
2025-02-13 15:01:55 +01:00
|
|
|
|
|
|
|
def values_sql(self, expression: exp.Values) -> str:
|
|
|
|
"""Due to a bug in Snowflake we want to make sure that all columns in a VALUES table alias are unquoted.
|
|
|
|
|
|
|
|
We also want to make sure that after we find matches where we need to unquote a column that we prevent users
|
|
|
|
from adding quotes to the column by using the `identify` argument when generating the SQL.
|
|
|
|
"""
|
|
|
|
alias = expression.args.get("alias")
|
|
|
|
if alias and alias.args.get("columns"):
|
|
|
|
expression = expression.transform(
|
|
|
|
lambda node: exp.Identifier(**{**node.args, "quoted": False})
|
|
|
|
if isinstance(node, exp.Identifier)
|
|
|
|
and isinstance(node.parent, exp.TableAlias)
|
|
|
|
and node.arg_key == "columns"
|
|
|
|
else node,
|
|
|
|
)
|
|
|
|
return self.no_identify(lambda: super(self.__class__, self).values_sql(expression))
|
|
|
|
return super().values_sql(expression)
|
|
|
|
|
2025-02-13 15:42:13 +01:00
|
|
|
def settag_sql(self, expression: exp.SetTag) -> str:
|
|
|
|
action = "UNSET" if expression.args.get("unset") else "SET"
|
|
|
|
return f"{action} TAG {self.expressions(expression)}"
|
|
|
|
|
2025-02-13 15:01:55 +01:00
|
|
|
def select_sql(self, expression: exp.Select) -> str:
|
|
|
|
"""Due to a bug in Snowflake we want to make sure that all columns in a VALUES table alias are unquoted and also
|
|
|
|
that all columns in a SELECT are unquoted. We also want to make sure that after we find matches where we need
|
|
|
|
to unquote a column that we prevent users from adding quotes to the column by using the `identify` argument when
|
|
|
|
generating the SQL.
|
|
|
|
|
|
|
|
Note: We make an assumption that any columns referenced in a VALUES expression should be unquoted throughout the
|
|
|
|
expression. This might not be true in a case where the same column name can be sourced from another table that can
|
|
|
|
properly quote but should be true in most cases.
|
|
|
|
"""
|
|
|
|
values_identifiers = set(
|
|
|
|
flatten(
|
2025-02-13 15:43:32 +01:00
|
|
|
(v.args.get("alias") or exp.Alias()).args.get("columns", [])
|
|
|
|
for v in expression.find_all(exp.Values)
|
2025-02-13 15:01:55 +01:00
|
|
|
)
|
|
|
|
)
|
|
|
|
if values_identifiers:
|
|
|
|
expression = expression.transform(
|
|
|
|
lambda node: exp.Identifier(**{**node.args, "quoted": False})
|
|
|
|
if isinstance(node, exp.Identifier) and node in values_identifiers
|
|
|
|
else node,
|
|
|
|
)
|
|
|
|
return self.no_identify(lambda: super(self.__class__, self).select_sql(expression))
|
|
|
|
return super().select_sql(expression)
|
2025-02-13 15:03:38 +01:00
|
|
|
|
|
|
|
def describe_sql(self, expression: exp.Describe) -> str:
|
|
|
|
# Default to table if kind is unknown
|
|
|
|
kind_value = expression.args.get("kind") or "TABLE"
|
|
|
|
kind = f" {kind_value}" if kind_value else ""
|
|
|
|
this = f" {self.sql(expression, 'this')}"
|
|
|
|
return f"DESCRIBE{kind}{this}"
|
2025-02-13 15:23:26 +01:00
|
|
|
|
|
|
|
def generatedasidentitycolumnconstraint_sql(
|
|
|
|
self, expression: exp.GeneratedAsIdentityColumnConstraint
|
|
|
|
) -> str:
|
|
|
|
start = expression.args.get("start")
|
|
|
|
start = f" START {start}" if start else ""
|
|
|
|
increment = expression.args.get("increment")
|
|
|
|
increment = f" INCREMENT {increment}" if increment else ""
|
|
|
|
return f"AUTOINCREMENT{start}{increment}"
|