Adding upstream version 15.0.0.
Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
parent
70d5d3451a
commit
bb75596aa9
167 changed files with 58268 additions and 51337 deletions
|
@ -11,9 +11,9 @@ from sqlglot.dialects.dialect import (
|
|||
datestrtodate_sql,
|
||||
format_time_lambda,
|
||||
no_comment_column_constraint_sql,
|
||||
no_pivot_sql,
|
||||
no_properties_sql,
|
||||
no_safe_divide_sql,
|
||||
pivot_column_names,
|
||||
rename_func,
|
||||
str_position_sql,
|
||||
str_to_time_sql,
|
||||
|
@ -31,10 +31,11 @@ def _ts_or_ds_add_sql(self: generator.Generator, expression: exp.TsOrDsAdd) -> s
|
|||
return f"CAST({this} AS DATE) + {self.sql(exp.Interval(this=expression.expression, unit=unit))}"
|
||||
|
||||
|
||||
def _date_add_sql(self: generator.Generator, expression: exp.DateAdd) -> str:
|
||||
def _date_delta_sql(self: generator.Generator, expression: exp.DateAdd | exp.DateSub) -> str:
|
||||
this = self.sql(expression, "this")
|
||||
unit = self.sql(expression, "unit").strip("'") or "DAY"
|
||||
return f"{this} + {self.sql(exp.Interval(this=expression.expression, unit=unit))}"
|
||||
op = "+" if isinstance(expression, exp.DateAdd) else "-"
|
||||
return f"{this} {op} {self.sql(exp.Interval(this=expression.expression, unit=unit))}"
|
||||
|
||||
|
||||
def _array_sort_sql(self: generator.Generator, expression: exp.ArraySort) -> str:
|
||||
|
@ -50,11 +51,11 @@ def _sort_array_sql(self: generator.Generator, expression: exp.SortArray) -> str
|
|||
return f"ARRAY_SORT({this})"
|
||||
|
||||
|
||||
def _sort_array_reverse(args: t.Sequence) -> exp.Expression:
|
||||
def _sort_array_reverse(args: t.List) -> exp.Expression:
|
||||
return exp.SortArray(this=seq_get(args, 0), asc=exp.false())
|
||||
|
||||
|
||||
def _parse_date_diff(args: t.Sequence) -> exp.Expression:
|
||||
def _parse_date_diff(args: t.List) -> exp.Expression:
|
||||
return exp.DateDiff(
|
||||
this=seq_get(args, 2),
|
||||
expression=seq_get(args, 1),
|
||||
|
@ -89,11 +90,14 @@ def _regexp_extract_sql(self: generator.Generator, expression: exp.RegexpExtract
|
|||
|
||||
|
||||
class DuckDB(Dialect):
|
||||
null_ordering = "nulls_are_last"
|
||||
|
||||
class Tokenizer(tokens.Tokenizer):
|
||||
KEYWORDS = {
|
||||
**tokens.Tokenizer.KEYWORDS,
|
||||
"~": TokenType.RLIKE,
|
||||
":=": TokenType.EQ,
|
||||
"//": TokenType.DIV,
|
||||
"ATTACH": TokenType.COMMAND,
|
||||
"BINARY": TokenType.VARBINARY,
|
||||
"BPCHAR": TokenType.TEXT,
|
||||
|
@ -104,6 +108,7 @@ class DuckDB(Dialect):
|
|||
"INT1": TokenType.TINYINT,
|
||||
"LOGICAL": TokenType.BOOLEAN,
|
||||
"NUMERIC": TokenType.DOUBLE,
|
||||
"PIVOT_WIDER": TokenType.PIVOT,
|
||||
"SIGNED": TokenType.INT,
|
||||
"STRING": TokenType.VARCHAR,
|
||||
"UBIGINT": TokenType.UBIGINT,
|
||||
|
@ -114,8 +119,7 @@ class DuckDB(Dialect):
|
|||
|
||||
class Parser(parser.Parser):
|
||||
FUNCTIONS = {
|
||||
**parser.Parser.FUNCTIONS, # type: ignore
|
||||
"APPROX_COUNT_DISTINCT": exp.ApproxDistinct.from_arg_list,
|
||||
**parser.Parser.FUNCTIONS,
|
||||
"ARRAY_LENGTH": exp.ArraySize.from_arg_list,
|
||||
"ARRAY_SORT": exp.SortArray.from_arg_list,
|
||||
"ARRAY_REVERSE_SORT": _sort_array_reverse,
|
||||
|
@ -152,11 +156,17 @@ class DuckDB(Dialect):
|
|||
TokenType.UTINYINT,
|
||||
}
|
||||
|
||||
def _pivot_column_names(self, aggregations: t.List[exp.Expression]) -> t.List[str]:
|
||||
if len(aggregations) == 1:
|
||||
return super()._pivot_column_names(aggregations)
|
||||
return pivot_column_names(aggregations, dialect="duckdb")
|
||||
|
||||
class Generator(generator.Generator):
|
||||
JOIN_HINTS = False
|
||||
TABLE_HINTS = False
|
||||
LIMIT_FETCH = "LIMIT"
|
||||
STRUCT_DELIMITER = ("(", ")")
|
||||
RENAME_TABLE_WITH_DB = False
|
||||
|
||||
TRANSFORMS = {
|
||||
**generator.Generator.TRANSFORMS,
|
||||
|
@ -175,7 +185,8 @@ class DuckDB(Dialect):
|
|||
exp.DayOfWeek: rename_func("DAYOFWEEK"),
|
||||
exp.DayOfYear: rename_func("DAYOFYEAR"),
|
||||
exp.DataType: _datatype_sql,
|
||||
exp.DateAdd: _date_add_sql,
|
||||
exp.DateAdd: _date_delta_sql,
|
||||
exp.DateSub: _date_delta_sql,
|
||||
exp.DateDiff: lambda self, e: self.func(
|
||||
"DATE_DIFF", f"'{e.args.get('unit', 'day')}'", e.expression, e.this
|
||||
),
|
||||
|
@ -183,13 +194,13 @@ class DuckDB(Dialect):
|
|||
exp.DateToDi: lambda self, e: f"CAST(STRFTIME({self.sql(e, 'this')}, {DuckDB.dateint_format}) AS INT)",
|
||||
exp.DiToDate: lambda self, e: f"CAST(STRPTIME(CAST({self.sql(e, 'this')} AS TEXT), {DuckDB.dateint_format}) AS DATE)",
|
||||
exp.Explode: rename_func("UNNEST"),
|
||||
exp.IntDiv: lambda self, e: self.binary(e, "//"),
|
||||
exp.JSONExtract: arrow_json_extract_sql,
|
||||
exp.JSONExtractScalar: arrow_json_extract_scalar_sql,
|
||||
exp.JSONBExtract: arrow_json_extract_sql,
|
||||
exp.JSONBExtractScalar: arrow_json_extract_scalar_sql,
|
||||
exp.LogicalOr: rename_func("BOOL_OR"),
|
||||
exp.LogicalAnd: rename_func("BOOL_AND"),
|
||||
exp.Pivot: no_pivot_sql,
|
||||
exp.Properties: no_properties_sql,
|
||||
exp.RegexpExtract: _regexp_extract_sql,
|
||||
exp.RegexpLike: rename_func("REGEXP_MATCHES"),
|
||||
|
@ -232,11 +243,11 @@ class DuckDB(Dialect):
|
|||
STAR_MAPPING = {**generator.Generator.STAR_MAPPING, "except": "EXCLUDE"}
|
||||
|
||||
PROPERTIES_LOCATION = {
|
||||
**generator.Generator.PROPERTIES_LOCATION, # type: ignore
|
||||
**generator.Generator.PROPERTIES_LOCATION,
|
||||
exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED,
|
||||
}
|
||||
|
||||
def tablesample_sql(
|
||||
self, expression: exp.TableSample, seed_prefix: str = "SEED", sep=" AS "
|
||||
self, expression: exp.TableSample, seed_prefix: str = "SEED", sep: str = " AS "
|
||||
) -> str:
|
||||
return super().tablesample_sql(expression, seed_prefix="REPEATABLE", sep=sep)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue