Adding upstream version 24.0.0.
Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
parent
b6ae88ec81
commit
8b1190270c
127 changed files with 40727 additions and 46460 deletions
|
@ -336,6 +336,7 @@ class Snowflake(Dialect):
|
|||
class Parser(parser.Parser):
|
||||
IDENTIFY_PIVOT_STRINGS = True
|
||||
DEFAULT_SAMPLING_METHOD = "BERNOULLI"
|
||||
COLON_IS_JSON_EXTRACT = True
|
||||
|
||||
ID_VAR_TOKENS = {
|
||||
*parser.Parser.ID_VAR_TOKENS,
|
||||
|
@ -482,44 +483,28 @@ class Snowflake(Dialect):
|
|||
|
||||
SCHEMA_KINDS = {"OBJECTS", "TABLES", "VIEWS", "SEQUENCES", "UNIQUE KEYS", "IMPORTED KEYS"}
|
||||
|
||||
NON_TABLE_CREATABLES = {"STORAGE INTEGRATION", "TAG", "WAREHOUSE", "STREAMLIT"}
|
||||
|
||||
LAMBDAS = {
|
||||
**parser.Parser.LAMBDAS,
|
||||
TokenType.ARROW: lambda self, expressions: self.expression(
|
||||
exp.Lambda,
|
||||
this=self._replace_lambda(
|
||||
self._parse_conjunction(),
|
||||
expressions,
|
||||
),
|
||||
expressions=[e.this if isinstance(e, exp.Cast) else e for e in expressions],
|
||||
),
|
||||
}
|
||||
|
||||
def _parse_create(self) -> exp.Create | exp.Command:
|
||||
expression = super()._parse_create()
|
||||
if isinstance(expression, exp.Create) and expression.kind == "TAG":
|
||||
if isinstance(expression, exp.Create) and expression.kind in self.NON_TABLE_CREATABLES:
|
||||
# Replace the Table node with the enclosed Identifier
|
||||
expression.this.replace(expression.this.this)
|
||||
|
||||
return expression
|
||||
|
||||
def _parse_column_ops(self, this: t.Optional[exp.Expression]) -> t.Optional[exp.Expression]:
|
||||
this = super()._parse_column_ops(this)
|
||||
|
||||
casts = []
|
||||
json_path = []
|
||||
|
||||
while self._match(TokenType.COLON):
|
||||
path = super()._parse_column_ops(self._parse_field(any_token=True))
|
||||
|
||||
# The cast :: operator has a lower precedence than the extraction operator :, so
|
||||
# we rearrange the AST appropriately to avoid casting the 2nd argument of GET_PATH
|
||||
while isinstance(path, exp.Cast):
|
||||
casts.append(path.to)
|
||||
path = path.this
|
||||
|
||||
if path:
|
||||
json_path.append(path.sql(dialect="snowflake", copy=False))
|
||||
|
||||
if json_path:
|
||||
this = self.expression(
|
||||
exp.JSONExtract,
|
||||
this=this,
|
||||
expression=self.dialect.to_json_path(exp.Literal.string(".".join(json_path))),
|
||||
)
|
||||
|
||||
while casts:
|
||||
this = self.expression(exp.Cast, this=this, to=casts.pop())
|
||||
|
||||
return this
|
||||
|
||||
# https://docs.snowflake.com/en/sql-reference/functions/date_part.html
|
||||
# https://docs.snowflake.com/en/sql-reference/functions-date-time.html#label-supported-date-time-parts
|
||||
def _parse_date_part(self: Snowflake.Parser) -> t.Optional[exp.Expression]:
|
||||
|
@ -716,6 +701,19 @@ class Snowflake(Dialect):
|
|||
|
||||
return exp.var("".join(part.text for part in parts if part))
|
||||
|
||||
def _parse_lambda_arg(self) -> t.Optional[exp.Expression]:
|
||||
this = super()._parse_lambda_arg()
|
||||
|
||||
if not this:
|
||||
return this
|
||||
|
||||
typ = self._parse_types()
|
||||
|
||||
if typ:
|
||||
return self.expression(exp.Cast, this=this, to=typ)
|
||||
|
||||
return this
|
||||
|
||||
class Tokenizer(tokens.Tokenizer):
|
||||
STRING_ESCAPES = ["\\", "'"]
|
||||
HEX_STRINGS = [("x'", "'"), ("X'", "'")]
|
||||
|
@ -744,6 +742,8 @@ class Snowflake(Dialect):
|
|||
"TAG": TokenType.TAG,
|
||||
"TIMESTAMP_TZ": TokenType.TIMESTAMPTZ,
|
||||
"TOP": TokenType.TOP,
|
||||
"WAREHOUSE": TokenType.WAREHOUSE,
|
||||
"STREAMLIT": TokenType.STREAMLIT,
|
||||
}
|
||||
|
||||
SINGLE_TOKENS = {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue