2025-02-13 15:01:55 +01:00
|
|
|
"""Supports BigQuery Standard SQL."""
|
|
|
|
|
2025-02-13 14:53:05 +01:00
|
|
|
from __future__ import annotations
|
|
|
|
|
2025-02-13 15:03:38 +01:00
|
|
|
from sqlglot import exp, generator, parser, tokens, transforms
|
2025-02-13 06:15:54 +01:00
|
|
|
from sqlglot.dialects.dialect import (
|
|
|
|
Dialect,
|
2025-02-13 15:01:55 +01:00
|
|
|
datestrtodate_sql,
|
2025-02-13 06:15:54 +01:00
|
|
|
inline_array_sql,
|
|
|
|
no_ilike_sql,
|
|
|
|
rename_func,
|
2025-02-13 15:01:55 +01:00
|
|
|
timestrtotime_sql,
|
2025-02-13 06:15:54 +01:00
|
|
|
)
|
2025-02-13 14:53:05 +01:00
|
|
|
from sqlglot.helper import seq_get
|
|
|
|
from sqlglot.tokens import TokenType
|
2025-02-13 06:15:54 +01:00
|
|
|
|
|
|
|
|
|
|
|
def _date_add(expression_class):
|
|
|
|
def func(args):
|
2025-02-13 14:53:05 +01:00
|
|
|
interval = seq_get(args, 1)
|
2025-02-13 06:15:54 +01:00
|
|
|
return expression_class(
|
2025-02-13 14:53:05 +01:00
|
|
|
this=seq_get(args, 0),
|
2025-02-13 06:15:54 +01:00
|
|
|
expression=interval.this,
|
|
|
|
unit=interval.args.get("unit"),
|
|
|
|
)
|
|
|
|
|
|
|
|
return func
|
|
|
|
|
|
|
|
|
2025-02-13 14:53:05 +01:00
|
|
|
def _date_trunc(args):
|
|
|
|
unit = seq_get(args, 1)
|
|
|
|
if isinstance(unit, exp.Column):
|
|
|
|
unit = exp.Var(this=unit.name)
|
|
|
|
return exp.DateTrunc(this=seq_get(args, 0), expression=unit)
|
|
|
|
|
|
|
|
|
2025-02-13 06:15:54 +01:00
|
|
|
def _date_add_sql(data_type, kind):
|
|
|
|
def func(self, expression):
|
|
|
|
this = self.sql(expression, "this")
|
|
|
|
unit = self.sql(expression, "unit") or "'day'"
|
|
|
|
expression = self.sql(expression, "expression")
|
|
|
|
return f"{data_type}_{kind}({this}, INTERVAL {expression} {unit})"
|
|
|
|
|
|
|
|
return func
|
|
|
|
|
|
|
|
|
2025-02-13 14:43:32 +01:00
|
|
|
def _derived_table_values_to_unnest(self, expression):
|
|
|
|
if not isinstance(expression.unnest().parent, exp.From):
|
2025-02-13 15:03:38 +01:00
|
|
|
expression = transforms.remove_precision_parameterized_types(expression)
|
2025-02-13 14:43:32 +01:00
|
|
|
return self.values_sql(expression)
|
2025-02-13 15:03:38 +01:00
|
|
|
rows = [tuple_exp.expressions for tuple_exp in expression.find_all(exp.Tuple)]
|
2025-02-13 14:40:43 +01:00
|
|
|
structs = []
|
|
|
|
for row in rows:
|
|
|
|
aliases = [
|
2025-02-13 14:53:05 +01:00
|
|
|
exp.alias_(value, column_name)
|
|
|
|
for value, column_name in zip(row, expression.args["alias"].args["columns"])
|
2025-02-13 14:40:43 +01:00
|
|
|
]
|
|
|
|
structs.append(exp.Struct(expressions=aliases))
|
|
|
|
unnest_exp = exp.Unnest(expressions=[exp.Array(expressions=structs)])
|
|
|
|
return self.unnest_sql(unnest_exp)
|
|
|
|
|
|
|
|
|
|
|
|
def _returnsproperty_sql(self, expression):
|
2025-02-13 14:56:25 +01:00
|
|
|
this = expression.this
|
|
|
|
if isinstance(this, exp.Schema):
|
|
|
|
this = f"{this.this} <{self.expressions(this)}>"
|
2025-02-13 14:40:43 +01:00
|
|
|
else:
|
2025-02-13 14:56:25 +01:00
|
|
|
this = self.sql(this)
|
|
|
|
return f"RETURNS {this}"
|
2025-02-13 14:40:43 +01:00
|
|
|
|
|
|
|
|
|
|
|
def _create_sql(self, expression):
|
|
|
|
kind = expression.args.get("kind")
|
|
|
|
returns = expression.find(exp.ReturnsProperty)
|
|
|
|
if kind.upper() == "FUNCTION" and returns and returns.args.get("is_table"):
|
|
|
|
expression = expression.copy()
|
|
|
|
expression.set("kind", "TABLE FUNCTION")
|
|
|
|
if isinstance(
|
|
|
|
expression.expression,
|
|
|
|
(
|
|
|
|
exp.Subquery,
|
|
|
|
exp.Literal,
|
|
|
|
),
|
|
|
|
):
|
|
|
|
expression.set("expression", expression.expression.this)
|
|
|
|
|
|
|
|
return self.create_sql(expression)
|
|
|
|
|
|
|
|
return self.create_sql(expression)
|
|
|
|
|
|
|
|
|
2025-02-13 06:15:54 +01:00
|
|
|
class BigQuery(Dialect):
|
|
|
|
unnest_column_only = True
|
2025-02-13 14:48:46 +01:00
|
|
|
time_mapping = {
|
|
|
|
"%M": "%-M",
|
|
|
|
"%d": "%-d",
|
|
|
|
"%m": "%-m",
|
|
|
|
"%y": "%-y",
|
|
|
|
"%H": "%-H",
|
|
|
|
"%I": "%-I",
|
|
|
|
"%S": "%-S",
|
|
|
|
"%j": "%-j",
|
|
|
|
}
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 14:53:05 +01:00
|
|
|
class Tokenizer(tokens.Tokenizer):
|
2025-02-13 06:15:54 +01:00
|
|
|
QUOTES = [
|
|
|
|
(prefix + quote, quote) if prefix else quote
|
|
|
|
for quote in ["'", '"', '"""', "'''"]
|
|
|
|
for prefix in ["", "r", "R"]
|
|
|
|
]
|
2025-02-13 14:53:05 +01:00
|
|
|
COMMENTS = ["--", "#", ("/*", "*/")]
|
2025-02-13 06:15:54 +01:00
|
|
|
IDENTIFIERS = ["`"]
|
2025-02-13 14:53:05 +01:00
|
|
|
ESCAPES = ["\\"]
|
2025-02-13 08:04:41 +01:00
|
|
|
HEX_STRINGS = [("0x", ""), ("0X", "")]
|
2025-02-13 06:15:54 +01:00
|
|
|
|
|
|
|
KEYWORDS = {
|
2025-02-13 14:53:05 +01:00
|
|
|
**tokens.Tokenizer.KEYWORDS,
|
2025-02-13 14:58:37 +01:00
|
|
|
"BEGIN": TokenType.COMMAND,
|
|
|
|
"BEGIN TRANSACTION": TokenType.BEGIN,
|
2025-02-13 06:15:54 +01:00
|
|
|
"CURRENT_DATETIME": TokenType.CURRENT_DATETIME,
|
|
|
|
"CURRENT_TIME": TokenType.CURRENT_TIME,
|
2025-02-13 15:03:38 +01:00
|
|
|
"DECLARE": TokenType.COMMAND,
|
2025-02-13 06:15:54 +01:00
|
|
|
"GEOGRAPHY": TokenType.GEOGRAPHY,
|
|
|
|
"FLOAT64": TokenType.DOUBLE,
|
2025-02-13 14:58:37 +01:00
|
|
|
"INT64": TokenType.BIGINT,
|
|
|
|
"NOT DETERMINISTIC": TokenType.VOLATILE,
|
2025-02-13 06:15:54 +01:00
|
|
|
"UNKNOWN": TokenType.NULL,
|
|
|
|
}
|
2025-02-13 14:53:05 +01:00
|
|
|
KEYWORDS.pop("DIV")
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 14:53:05 +01:00
|
|
|
class Parser(parser.Parser):
|
2025-02-13 06:15:54 +01:00
|
|
|
FUNCTIONS = {
|
2025-02-13 15:01:55 +01:00
|
|
|
**parser.Parser.FUNCTIONS, # type: ignore
|
2025-02-13 14:53:05 +01:00
|
|
|
"DATE_TRUNC": _date_trunc,
|
2025-02-13 06:15:54 +01:00
|
|
|
"DATE_ADD": _date_add(exp.DateAdd),
|
|
|
|
"DATETIME_ADD": _date_add(exp.DatetimeAdd),
|
2025-02-13 14:53:05 +01:00
|
|
|
"DIV": lambda args: exp.IntDiv(this=seq_get(args, 0), expression=seq_get(args, 1)),
|
2025-02-13 14:58:37 +01:00
|
|
|
"REGEXP_CONTAINS": exp.RegexpLike.from_arg_list,
|
2025-02-13 06:15:54 +01:00
|
|
|
"TIME_ADD": _date_add(exp.TimeAdd),
|
|
|
|
"TIMESTAMP_ADD": _date_add(exp.TimestampAdd),
|
|
|
|
"DATE_SUB": _date_add(exp.DateSub),
|
|
|
|
"DATETIME_SUB": _date_add(exp.DatetimeSub),
|
|
|
|
"TIME_SUB": _date_add(exp.TimeSub),
|
|
|
|
"TIMESTAMP_SUB": _date_add(exp.TimestampSub),
|
2025-02-13 14:53:05 +01:00
|
|
|
"PARSE_TIMESTAMP": lambda args: exp.StrToTime(
|
|
|
|
this=seq_get(args, 1), format=seq_get(args, 0)
|
|
|
|
),
|
2025-02-13 06:15:54 +01:00
|
|
|
}
|
|
|
|
|
2025-02-13 14:56:25 +01:00
|
|
|
FUNCTION_PARSERS = {
|
2025-02-13 15:01:55 +01:00
|
|
|
**parser.Parser.FUNCTION_PARSERS, # type: ignore
|
2025-02-13 14:58:37 +01:00
|
|
|
"ARRAY": lambda self: self.expression(exp.Array, expressions=[self._parse_statement()]),
|
2025-02-13 14:56:25 +01:00
|
|
|
}
|
|
|
|
FUNCTION_PARSERS.pop("TRIM")
|
|
|
|
|
2025-02-13 06:15:54 +01:00
|
|
|
NO_PAREN_FUNCTIONS = {
|
2025-02-13 15:01:55 +01:00
|
|
|
**parser.Parser.NO_PAREN_FUNCTIONS, # type: ignore
|
2025-02-13 06:15:54 +01:00
|
|
|
TokenType.CURRENT_DATETIME: exp.CurrentDatetime,
|
|
|
|
TokenType.CURRENT_TIME: exp.CurrentTime,
|
|
|
|
}
|
|
|
|
|
2025-02-13 14:40:43 +01:00
|
|
|
NESTED_TYPE_TOKENS = {
|
2025-02-13 15:01:55 +01:00
|
|
|
*parser.Parser.NESTED_TYPE_TOKENS, # type: ignore
|
2025-02-13 14:40:43 +01:00
|
|
|
TokenType.TABLE,
|
|
|
|
}
|
|
|
|
|
2025-02-13 15:05:06 +01:00
|
|
|
ID_VAR_TOKENS = {
|
|
|
|
*parser.Parser.ID_VAR_TOKENS, # type: ignore
|
|
|
|
TokenType.VALUES,
|
|
|
|
}
|
|
|
|
|
2025-02-13 14:53:05 +01:00
|
|
|
class Generator(generator.Generator):
|
2025-02-13 06:15:54 +01:00
|
|
|
TRANSFORMS = {
|
2025-02-13 15:01:55 +01:00
|
|
|
**generator.Generator.TRANSFORMS, # type: ignore
|
2025-02-13 15:03:38 +01:00
|
|
|
**transforms.REMOVE_PRECISION_PARAMETERIZED_TYPES, # type: ignore
|
2025-02-13 06:15:54 +01:00
|
|
|
exp.ArraySize: rename_func("ARRAY_LENGTH"),
|
|
|
|
exp.DateAdd: _date_add_sql("DATE", "ADD"),
|
|
|
|
exp.DateSub: _date_add_sql("DATE", "SUB"),
|
|
|
|
exp.DatetimeAdd: _date_add_sql("DATETIME", "ADD"),
|
|
|
|
exp.DatetimeSub: _date_add_sql("DATETIME", "SUB"),
|
2025-02-13 14:45:11 +01:00
|
|
|
exp.DateDiff: lambda self, e: f"DATE_DIFF({self.sql(e, 'this')}, {self.sql(e, 'expression')}, {self.sql(e.args.get('unit', 'DAY'))})",
|
2025-02-13 15:01:55 +01:00
|
|
|
exp.DateStrToDate: datestrtodate_sql,
|
|
|
|
exp.GroupConcat: rename_func("STRING_AGG"),
|
2025-02-13 06:15:54 +01:00
|
|
|
exp.ILike: no_ilike_sql,
|
2025-02-13 14:53:05 +01:00
|
|
|
exp.IntDiv: rename_func("DIV"),
|
2025-02-13 14:48:46 +01:00
|
|
|
exp.StrToTime: lambda self, e: f"PARSE_TIMESTAMP({self.format_time(e)}, {self.sql(e, 'this')})",
|
2025-02-13 06:15:54 +01:00
|
|
|
exp.TimeAdd: _date_add_sql("TIME", "ADD"),
|
|
|
|
exp.TimeSub: _date_add_sql("TIME", "SUB"),
|
|
|
|
exp.TimestampAdd: _date_add_sql("TIMESTAMP", "ADD"),
|
|
|
|
exp.TimestampSub: _date_add_sql("TIMESTAMP", "SUB"),
|
2025-02-13 15:01:55 +01:00
|
|
|
exp.TimeStrToTime: timestrtotime_sql,
|
2025-02-13 06:15:54 +01:00
|
|
|
exp.VariancePop: rename_func("VAR_POP"),
|
2025-02-13 14:43:32 +01:00
|
|
|
exp.Values: _derived_table_values_to_unnest,
|
2025-02-13 14:40:43 +01:00
|
|
|
exp.ReturnsProperty: _returnsproperty_sql,
|
|
|
|
exp.Create: _create_sql,
|
2025-02-13 14:56:25 +01:00
|
|
|
exp.Trim: lambda self, e: f"TRIM({self.format_args(e.this, e.expression)})",
|
2025-02-13 14:53:05 +01:00
|
|
|
exp.VolatilityProperty: lambda self, e: f"DETERMINISTIC"
|
|
|
|
if e.name == "IMMUTABLE"
|
|
|
|
else "NOT DETERMINISTIC",
|
2025-02-13 14:58:37 +01:00
|
|
|
exp.RegexpLike: rename_func("REGEXP_CONTAINS"),
|
2025-02-13 06:15:54 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
TYPE_MAPPING = {
|
2025-02-13 15:01:55 +01:00
|
|
|
**generator.Generator.TYPE_MAPPING, # type: ignore
|
2025-02-13 06:15:54 +01:00
|
|
|
exp.DataType.Type.TINYINT: "INT64",
|
|
|
|
exp.DataType.Type.SMALLINT: "INT64",
|
|
|
|
exp.DataType.Type.INT: "INT64",
|
|
|
|
exp.DataType.Type.BIGINT: "INT64",
|
|
|
|
exp.DataType.Type.DECIMAL: "NUMERIC",
|
|
|
|
exp.DataType.Type.FLOAT: "FLOAT64",
|
|
|
|
exp.DataType.Type.DOUBLE: "FLOAT64",
|
|
|
|
exp.DataType.Type.BOOLEAN: "BOOL",
|
|
|
|
exp.DataType.Type.TEXT: "STRING",
|
|
|
|
exp.DataType.Type.VARCHAR: "STRING",
|
|
|
|
exp.DataType.Type.NVARCHAR: "STRING",
|
|
|
|
}
|
|
|
|
|
2025-02-13 14:43:32 +01:00
|
|
|
ROOT_PROPERTIES = {
|
|
|
|
exp.LanguageProperty,
|
|
|
|
exp.ReturnsProperty,
|
|
|
|
exp.VolatilityProperty,
|
|
|
|
}
|
|
|
|
|
2025-02-13 14:56:25 +01:00
|
|
|
WITH_PROPERTIES = {exp.Property}
|
2025-02-13 14:43:32 +01:00
|
|
|
|
2025-02-13 14:45:11 +01:00
|
|
|
EXPLICIT_UNION = True
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def array_sql(self, expression: exp.Array) -> str:
|
|
|
|
first_arg = seq_get(expression.expressions, 0)
|
|
|
|
if isinstance(first_arg, exp.Subqueryable):
|
|
|
|
return f"ARRAY{self.wrap(self.sql(first_arg))}"
|
|
|
|
|
|
|
|
return inline_array_sql(self, expression)
|
|
|
|
|
|
|
|
def transaction_sql(self, *_) -> str:
|
2025-02-13 14:54:32 +01:00
|
|
|
return "BEGIN TRANSACTION"
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def commit_sql(self, *_) -> str:
|
2025-02-13 14:54:32 +01:00
|
|
|
return "COMMIT TRANSACTION"
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def rollback_sql(self, *_) -> str:
|
2025-02-13 14:54:32 +01:00
|
|
|
return "ROLLBACK TRANSACTION"
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def in_unnest_op(self, expression: exp.Unnest) -> str:
|
|
|
|
return self.sql(expression)
|
2025-02-13 06:15:54 +01:00
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def except_op(self, expression: exp.Except) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
if not expression.args.get("distinct", False):
|
|
|
|
self.unsupported("EXCEPT without DISTINCT is not supported in BigQuery")
|
|
|
|
return f"EXCEPT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}"
|
|
|
|
|
2025-02-13 14:58:37 +01:00
|
|
|
def intersect_op(self, expression: exp.Intersect) -> str:
|
2025-02-13 06:15:54 +01:00
|
|
|
if not expression.args.get("distinct", False):
|
2025-02-13 08:04:41 +01:00
|
|
|
self.unsupported("INTERSECT without DISTINCT is not supported in BigQuery")
|
|
|
|
return f"INTERSECT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}"
|