Adding upstream version 25.5.1.
Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
parent
147b6e06e8
commit
4e506fbac7
136 changed files with 80990 additions and 72541 deletions
sqlglot/dialects
|
@ -21,6 +21,7 @@ from sqlglot.dialects.dialect import (
|
|||
timestamptrunc_sql,
|
||||
timestrtotime_sql,
|
||||
var_map_sql,
|
||||
map_date_part,
|
||||
)
|
||||
from sqlglot.helper import flatten, is_float, is_int, seq_get
|
||||
from sqlglot.tokens import TokenType
|
||||
|
@ -75,7 +76,7 @@ def _build_object_construct(args: t.List) -> t.Union[exp.StarMap, exp.Struct]:
|
|||
|
||||
def _build_datediff(args: t.List) -> exp.DateDiff:
|
||||
return exp.DateDiff(
|
||||
this=seq_get(args, 2), expression=seq_get(args, 1), unit=_map_date_part(seq_get(args, 0))
|
||||
this=seq_get(args, 2), expression=seq_get(args, 1), unit=map_date_part(seq_get(args, 0))
|
||||
)
|
||||
|
||||
|
||||
|
@ -84,7 +85,7 @@ def _build_date_time_add(expr_type: t.Type[E]) -> t.Callable[[t.List], E]:
|
|||
return expr_type(
|
||||
this=seq_get(args, 2),
|
||||
expression=seq_get(args, 1),
|
||||
unit=_map_date_part(seq_get(args, 0)),
|
||||
unit=map_date_part(seq_get(args, 0)),
|
||||
)
|
||||
|
||||
return _builder
|
||||
|
@ -143,97 +144,9 @@ def _show_parser(*args: t.Any, **kwargs: t.Any) -> t.Callable[[Snowflake.Parser]
|
|||
return _parse
|
||||
|
||||
|
||||
DATE_PART_MAPPING = {
|
||||
"Y": "YEAR",
|
||||
"YY": "YEAR",
|
||||
"YYY": "YEAR",
|
||||
"YYYY": "YEAR",
|
||||
"YR": "YEAR",
|
||||
"YEARS": "YEAR",
|
||||
"YRS": "YEAR",
|
||||
"MM": "MONTH",
|
||||
"MON": "MONTH",
|
||||
"MONS": "MONTH",
|
||||
"MONTHS": "MONTH",
|
||||
"D": "DAY",
|
||||
"DD": "DAY",
|
||||
"DAYS": "DAY",
|
||||
"DAYOFMONTH": "DAY",
|
||||
"WEEKDAY": "DAYOFWEEK",
|
||||
"DOW": "DAYOFWEEK",
|
||||
"DW": "DAYOFWEEK",
|
||||
"WEEKDAY_ISO": "DAYOFWEEKISO",
|
||||
"DOW_ISO": "DAYOFWEEKISO",
|
||||
"DW_ISO": "DAYOFWEEKISO",
|
||||
"YEARDAY": "DAYOFYEAR",
|
||||
"DOY": "DAYOFYEAR",
|
||||
"DY": "DAYOFYEAR",
|
||||
"W": "WEEK",
|
||||
"WK": "WEEK",
|
||||
"WEEKOFYEAR": "WEEK",
|
||||
"WOY": "WEEK",
|
||||
"WY": "WEEK",
|
||||
"WEEK_ISO": "WEEKISO",
|
||||
"WEEKOFYEARISO": "WEEKISO",
|
||||
"WEEKOFYEAR_ISO": "WEEKISO",
|
||||
"Q": "QUARTER",
|
||||
"QTR": "QUARTER",
|
||||
"QTRS": "QUARTER",
|
||||
"QUARTERS": "QUARTER",
|
||||
"H": "HOUR",
|
||||
"HH": "HOUR",
|
||||
"HR": "HOUR",
|
||||
"HOURS": "HOUR",
|
||||
"HRS": "HOUR",
|
||||
"M": "MINUTE",
|
||||
"MI": "MINUTE",
|
||||
"MIN": "MINUTE",
|
||||
"MINUTES": "MINUTE",
|
||||
"MINS": "MINUTE",
|
||||
"S": "SECOND",
|
||||
"SEC": "SECOND",
|
||||
"SECONDS": "SECOND",
|
||||
"SECS": "SECOND",
|
||||
"MS": "MILLISECOND",
|
||||
"MSEC": "MILLISECOND",
|
||||
"MILLISECONDS": "MILLISECOND",
|
||||
"US": "MICROSECOND",
|
||||
"USEC": "MICROSECOND",
|
||||
"MICROSECONDS": "MICROSECOND",
|
||||
"NS": "NANOSECOND",
|
||||
"NSEC": "NANOSECOND",
|
||||
"NANOSEC": "NANOSECOND",
|
||||
"NSECOND": "NANOSECOND",
|
||||
"NSECONDS": "NANOSECOND",
|
||||
"NANOSECS": "NANOSECOND",
|
||||
"EPOCH": "EPOCH_SECOND",
|
||||
"EPOCH_SECONDS": "EPOCH_SECOND",
|
||||
"EPOCH_MILLISECONDS": "EPOCH_MILLISECOND",
|
||||
"EPOCH_MICROSECONDS": "EPOCH_MICROSECOND",
|
||||
"EPOCH_NANOSECONDS": "EPOCH_NANOSECOND",
|
||||
"TZH": "TIMEZONE_HOUR",
|
||||
"TZM": "TIMEZONE_MINUTE",
|
||||
}
|
||||
|
||||
|
||||
@t.overload
|
||||
def _map_date_part(part: exp.Expression) -> exp.Var:
|
||||
pass
|
||||
|
||||
|
||||
@t.overload
|
||||
def _map_date_part(part: t.Optional[exp.Expression]) -> t.Optional[exp.Expression]:
|
||||
pass
|
||||
|
||||
|
||||
def _map_date_part(part):
|
||||
mapped = DATE_PART_MAPPING.get(part.name.upper()) if part else None
|
||||
return exp.var(mapped) if mapped else part
|
||||
|
||||
|
||||
def _date_trunc_to_time(args: t.List) -> exp.DateTrunc | exp.TimestampTrunc:
|
||||
trunc = date_trunc_to_time(args)
|
||||
trunc.set("unit", _map_date_part(trunc.args["unit"]))
|
||||
trunc.set("unit", map_date_part(trunc.args["unit"]))
|
||||
return trunc
|
||||
|
||||
|
||||
|
@ -328,7 +241,7 @@ class Snowflake(Dialect):
|
|||
class Parser(parser.Parser):
|
||||
IDENTIFY_PIVOT_STRINGS = True
|
||||
DEFAULT_SAMPLING_METHOD = "BERNOULLI"
|
||||
COLON_IS_JSON_EXTRACT = True
|
||||
COLON_IS_VARIANT_EXTRACT = True
|
||||
|
||||
ID_VAR_TOKENS = {
|
||||
*parser.Parser.ID_VAR_TOKENS,
|
||||
|
@ -367,8 +280,10 @@ class Snowflake(Dialect):
|
|||
),
|
||||
"IFF": exp.If.from_arg_list,
|
||||
"LAST_DAY": lambda args: exp.LastDay(
|
||||
this=seq_get(args, 0), unit=_map_date_part(seq_get(args, 1))
|
||||
this=seq_get(args, 0), unit=map_date_part(seq_get(args, 1))
|
||||
),
|
||||
"LEN": lambda args: exp.Length(this=seq_get(args, 0), binary=True),
|
||||
"LENGTH": lambda args: exp.Length(this=seq_get(args, 0), binary=True),
|
||||
"LISTAGG": exp.GroupConcat.from_arg_list,
|
||||
"MEDIAN": lambda args: exp.PercentileCont(
|
||||
this=seq_get(args, 0), expression=exp.Literal.number(0.5)
|
||||
|
@ -385,6 +300,7 @@ class Snowflake(Dialect):
|
|||
"TIMESTAMPDIFF": _build_datediff,
|
||||
"TIMESTAMPFROMPARTS": build_timestamp_from_parts,
|
||||
"TIMESTAMP_FROM_PARTS": build_timestamp_from_parts,
|
||||
"TRY_PARSE_JSON": lambda args: exp.ParseJSON(this=seq_get(args, 0), safe=True),
|
||||
"TRY_TO_DATE": _build_datetime("TRY_TO_DATE", exp.DataType.Type.DATE, safe=True),
|
||||
"TO_DATE": _build_datetime("TO_DATE", exp.DataType.Type.DATE),
|
||||
"TO_NUMBER": lambda args: exp.ToNumber(
|
||||
|
@ -541,7 +457,7 @@ class Snowflake(Dialect):
|
|||
|
||||
self._match(TokenType.COMMA)
|
||||
expression = self._parse_bitwise()
|
||||
this = _map_date_part(this)
|
||||
this = map_date_part(this)
|
||||
name = this.name.upper()
|
||||
|
||||
if name.startswith("EPOCH"):
|
||||
|
@ -588,10 +504,11 @@ class Snowflake(Dialect):
|
|||
|
||||
return lateral
|
||||
|
||||
def _parse_at_before(self, table: exp.Table) -> exp.Table:
|
||||
def _parse_historical_data(self) -> t.Optional[exp.HistoricalData]:
|
||||
# https://docs.snowflake.com/en/sql-reference/constructs/at-before
|
||||
index = self._index
|
||||
if self._match_texts(("AT", "BEFORE")):
|
||||
historical_data = None
|
||||
if self._match_texts(self.HISTORICAL_DATA_PREFIX):
|
||||
this = self._prev.text.upper()
|
||||
kind = (
|
||||
self._match(TokenType.L_PAREN)
|
||||
|
@ -602,14 +519,27 @@ class Snowflake(Dialect):
|
|||
|
||||
if expression:
|
||||
self._match_r_paren()
|
||||
when = self.expression(
|
||||
historical_data = self.expression(
|
||||
exp.HistoricalData, this=this, kind=kind, expression=expression
|
||||
)
|
||||
table.set("when", when)
|
||||
else:
|
||||
self._retreat(index)
|
||||
|
||||
return table
|
||||
return historical_data
|
||||
|
||||
def _parse_changes(self) -> t.Optional[exp.Changes]:
|
||||
if not self._match_text_seq("CHANGES", "(", "INFORMATION", "=>"):
|
||||
return None
|
||||
|
||||
information = self._parse_var(any_token=True)
|
||||
self._match_r_paren()
|
||||
|
||||
return self.expression(
|
||||
exp.Changes,
|
||||
information=information,
|
||||
at_before=self._parse_historical_data(),
|
||||
end=self._parse_historical_data(),
|
||||
)
|
||||
|
||||
def _parse_table_parts(
|
||||
self, schema: bool = False, is_db_reference: bool = False, wildcard: bool = False
|
||||
|
@ -643,7 +573,15 @@ class Snowflake(Dialect):
|
|||
else:
|
||||
table = super()._parse_table_parts(schema=schema, is_db_reference=is_db_reference)
|
||||
|
||||
return self._parse_at_before(table)
|
||||
changes = self._parse_changes()
|
||||
if changes:
|
||||
table.set("changes", changes)
|
||||
|
||||
at_before = self._parse_historical_data()
|
||||
if at_before:
|
||||
table.set("when", at_before)
|
||||
|
||||
return table
|
||||
|
||||
def _parse_id_var(
|
||||
self,
|
||||
|
@ -771,6 +709,7 @@ class Snowflake(Dialect):
|
|||
"WAREHOUSE": TokenType.WAREHOUSE,
|
||||
"STREAMLIT": TokenType.STREAMLIT,
|
||||
}
|
||||
KEYWORDS.pop("/*+")
|
||||
|
||||
SINGLE_TOKENS = {
|
||||
**tokens.Tokenizer.SINGLE_TOKENS,
|
||||
|
@ -839,6 +778,9 @@ class Snowflake(Dialect):
|
|||
exp.Map: lambda self, e: var_map_sql(self, e, "OBJECT_CONSTRUCT"),
|
||||
exp.Max: max_or_greatest,
|
||||
exp.Min: min_or_least,
|
||||
exp.ParseJSON: lambda self, e: self.func(
|
||||
"TRY_PARSE_JSON" if e.args.get("safe") else "PARSE_JSON", e.this
|
||||
),
|
||||
exp.PartitionedByProperty: lambda self, e: f"PARTITION BY {self.sql(e, 'this')}",
|
||||
exp.PercentileCont: transforms.preprocess(
|
||||
[transforms.add_within_group_for_percentiles]
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue