sqlglot.dialects.tsql
1from __future__ import annotations 2 3import datetime 4import re 5import typing as t 6from functools import partial, reduce 7 8from sqlglot import exp, generator, parser, tokens, transforms 9from sqlglot.dialects.dialect import ( 10 Dialect, 11 NormalizationStrategy, 12 any_value_to_max_sql, 13 date_delta_sql, 14 datestrtodate_sql, 15 generatedasidentitycolumnconstraint_sql, 16 max_or_greatest, 17 min_or_least, 18 build_date_delta, 19 rename_func, 20 strposition_sql, 21 trim_sql, 22 timestrtotime_sql, 23) 24from sqlglot.helper import seq_get 25from sqlglot.parser import build_coalesce 26from sqlglot.time import format_time 27from sqlglot.tokens import TokenType 28 29if t.TYPE_CHECKING: 30 from sqlglot._typing import E 31 32FULL_FORMAT_TIME_MAPPING = { 33 "weekday": "%A", 34 "dw": "%A", 35 "w": "%A", 36 "month": "%B", 37 "mm": "%B", 38 "m": "%B", 39} 40 41DATE_DELTA_INTERVAL = { 42 "year": "year", 43 "yyyy": "year", 44 "yy": "year", 45 "quarter": "quarter", 46 "qq": "quarter", 47 "q": "quarter", 48 "month": "month", 49 "mm": "month", 50 "m": "month", 51 "week": "week", 52 "ww": "week", 53 "wk": "week", 54 "day": "day", 55 "dd": "day", 56 "d": "day", 57} 58 59 60DATE_FMT_RE = re.compile("([dD]{1,2})|([mM]{1,2})|([yY]{1,4})|([hH]{1,2})|([sS]{1,2})") 61 62# N = Numeric, C=Currency 63TRANSPILE_SAFE_NUMBER_FMT = {"N", "C"} 64 65DEFAULT_START_DATE = datetime.date(1900, 1, 1) 66 67BIT_TYPES = {exp.EQ, exp.NEQ, exp.Is, exp.In, exp.Select, exp.Alias} 68 69# Unsupported options: 70# - OPTIMIZE FOR ( @variable_name { UNKNOWN | = <literal_constant> } [ , ...n ] ) 71# - TABLE HINT 72OPTIONS: parser.OPTIONS_TYPE = { 73 **dict.fromkeys( 74 ( 75 "DISABLE_OPTIMIZED_PLAN_FORCING", 76 "FAST", 77 "IGNORE_NONCLUSTERED_COLUMNSTORE_INDEX", 78 "LABEL", 79 "MAXDOP", 80 "MAXRECURSION", 81 "MAX_GRANT_PERCENT", 82 "MIN_GRANT_PERCENT", 83 "NO_PERFORMANCE_SPOOL", 84 "QUERYTRACEON", 85 "RECOMPILE", 86 ), 87 tuple(), 88 ), 89 "CONCAT": ("UNION",), 90 "DISABLE": ("EXTERNALPUSHDOWN", "SCALEOUTEXECUTION"), 91 "EXPAND": ("VIEWS",), 92 "FORCE": ("EXTERNALPUSHDOWN", "ORDER", "SCALEOUTEXECUTION"), 93 "HASH": ("GROUP", "JOIN", "UNION"), 94 "KEEP": ("PLAN",), 95 "KEEPFIXED": ("PLAN",), 96 "LOOP": ("JOIN",), 97 "MERGE": ("JOIN", "UNION"), 98 "OPTIMIZE": (("FOR", "UNKNOWN"),), 99 "ORDER": ("GROUP",), 100 "PARAMETERIZATION": ("FORCED", "SIMPLE"), 101 "ROBUST": ("PLAN",), 102 "USE": ("PLAN",), 103} 104 105OPTIONS_THAT_REQUIRE_EQUAL = ("MAX_GRANT_PERCENT", "MIN_GRANT_PERCENT", "LABEL") 106 107 108def _build_formatted_time( 109 exp_class: t.Type[E], full_format_mapping: t.Optional[bool] = None 110) -> t.Callable[[t.List], E]: 111 def _builder(args: t.List) -> E: 112 assert len(args) == 2 113 114 return exp_class( 115 this=exp.cast(args[1], exp.DataType.Type.DATETIME2), 116 format=exp.Literal.string( 117 format_time( 118 args[0].name.lower(), 119 ( 120 {**TSQL.TIME_MAPPING, **FULL_FORMAT_TIME_MAPPING} 121 if full_format_mapping 122 else TSQL.TIME_MAPPING 123 ), 124 ) 125 ), 126 ) 127 128 return _builder 129 130 131def _build_format(args: t.List) -> exp.NumberToStr | exp.TimeToStr: 132 this = seq_get(args, 0) 133 fmt = seq_get(args, 1) 134 culture = seq_get(args, 2) 135 136 number_fmt = fmt and (fmt.name in TRANSPILE_SAFE_NUMBER_FMT or not DATE_FMT_RE.search(fmt.name)) 137 138 if number_fmt: 139 return exp.NumberToStr(this=this, format=fmt, culture=culture) 140 141 if fmt: 142 fmt = exp.Literal.string( 143 format_time(fmt.name, TSQL.FORMAT_TIME_MAPPING) 144 if len(fmt.name) == 1 145 else format_time(fmt.name, TSQL.TIME_MAPPING) 146 ) 147 148 return exp.TimeToStr(this=this, format=fmt, culture=culture) 149 150 151def _build_eomonth(args: t.List) -> exp.LastDay: 152 date = exp.TsOrDsToDate(this=seq_get(args, 0)) 153 month_lag = seq_get(args, 1) 154 155 if month_lag is None: 156 this: exp.Expression = date 157 else: 158 unit = DATE_DELTA_INTERVAL.get("month") 159 this = exp.DateAdd(this=date, expression=month_lag, unit=unit and exp.var(unit)) 160 161 return exp.LastDay(this=this) 162 163 164def _build_hashbytes(args: t.List) -> exp.Expression: 165 kind, data = args 166 kind = kind.name.upper() if kind.is_string else "" 167 168 if kind == "MD5": 169 args.pop(0) 170 return exp.MD5(this=data) 171 if kind in ("SHA", "SHA1"): 172 args.pop(0) 173 return exp.SHA(this=data) 174 if kind == "SHA2_256": 175 return exp.SHA2(this=data, length=exp.Literal.number(256)) 176 if kind == "SHA2_512": 177 return exp.SHA2(this=data, length=exp.Literal.number(512)) 178 179 return exp.func("HASHBYTES", *args) 180 181 182DATEPART_ONLY_FORMATS = {"DW", "WK", "HOUR", "QUARTER"} 183 184 185def _format_sql(self: TSQL.Generator, expression: exp.NumberToStr | exp.TimeToStr) -> str: 186 fmt = expression.args["format"] 187 188 if not isinstance(expression, exp.NumberToStr): 189 if fmt.is_string: 190 mapped_fmt = format_time(fmt.name, TSQL.INVERSE_TIME_MAPPING) 191 192 name = (mapped_fmt or "").upper() 193 if name in DATEPART_ONLY_FORMATS: 194 return self.func("DATEPART", name, expression.this) 195 196 fmt_sql = self.sql(exp.Literal.string(mapped_fmt)) 197 else: 198 fmt_sql = self.format_time(expression) or self.sql(fmt) 199 else: 200 fmt_sql = self.sql(fmt) 201 202 return self.func("FORMAT", expression.this, fmt_sql, expression.args.get("culture")) 203 204 205def _string_agg_sql(self: TSQL.Generator, expression: exp.GroupConcat) -> str: 206 this = expression.this 207 distinct = expression.find(exp.Distinct) 208 if distinct: 209 # exp.Distinct can appear below an exp.Order or an exp.GroupConcat expression 210 self.unsupported("T-SQL STRING_AGG doesn't support DISTINCT.") 211 this = distinct.pop().expressions[0] 212 213 order = "" 214 if isinstance(expression.this, exp.Order): 215 if expression.this.this: 216 this = expression.this.this.pop() 217 # Order has a leading space 218 order = f" WITHIN GROUP ({self.sql(expression.this)[1:]})" 219 220 separator = expression.args.get("separator") or exp.Literal.string(",") 221 return f"STRING_AGG({self.format_args(this, separator)}){order}" 222 223 224def _build_date_delta( 225 exp_class: t.Type[E], unit_mapping: t.Optional[t.Dict[str, str]] = None 226) -> t.Callable[[t.List], E]: 227 def _builder(args: t.List) -> E: 228 unit = seq_get(args, 0) 229 if unit and unit_mapping: 230 unit = exp.var(unit_mapping.get(unit.name.lower(), unit.name)) 231 232 start_date = seq_get(args, 1) 233 if start_date and start_date.is_number: 234 # Numeric types are valid DATETIME values 235 if start_date.is_int: 236 adds = DEFAULT_START_DATE + datetime.timedelta(days=start_date.to_py()) 237 start_date = exp.Literal.string(adds.strftime("%F")) 238 else: 239 # We currently don't handle float values, i.e. they're not converted to equivalent DATETIMEs. 240 # This is not a problem when generating T-SQL code, it is when transpiling to other dialects. 241 return exp_class(this=seq_get(args, 2), expression=start_date, unit=unit) 242 243 return exp_class( 244 this=exp.TimeStrToTime(this=seq_get(args, 2)), 245 expression=exp.TimeStrToTime(this=start_date), 246 unit=unit, 247 ) 248 249 return _builder 250 251 252def qualify_derived_table_outputs(expression: exp.Expression) -> exp.Expression: 253 """Ensures all (unnamed) output columns are aliased for CTEs and Subqueries.""" 254 alias = expression.args.get("alias") 255 256 if ( 257 isinstance(expression, (exp.CTE, exp.Subquery)) 258 and isinstance(alias, exp.TableAlias) 259 and not alias.columns 260 ): 261 from sqlglot.optimizer.qualify_columns import qualify_outputs 262 263 # We keep track of the unaliased column projection indexes instead of the expressions 264 # themselves, because the latter are going to be replaced by new nodes when the aliases 265 # are added and hence we won't be able to reach these newly added Alias parents 266 query = expression.this 267 unaliased_column_indexes = ( 268 i for i, c in enumerate(query.selects) if isinstance(c, exp.Column) and not c.alias 269 ) 270 271 qualify_outputs(query) 272 273 # Preserve the quoting information of columns for newly added Alias nodes 274 query_selects = query.selects 275 for select_index in unaliased_column_indexes: 276 alias = query_selects[select_index] 277 column = alias.this 278 if isinstance(column.this, exp.Identifier): 279 alias.args["alias"].set("quoted", column.this.quoted) 280 281 return expression 282 283 284# https://learn.microsoft.com/en-us/sql/t-sql/functions/datetimefromparts-transact-sql?view=sql-server-ver16#syntax 285def _build_datetimefromparts(args: t.List) -> exp.TimestampFromParts: 286 return exp.TimestampFromParts( 287 year=seq_get(args, 0), 288 month=seq_get(args, 1), 289 day=seq_get(args, 2), 290 hour=seq_get(args, 3), 291 min=seq_get(args, 4), 292 sec=seq_get(args, 5), 293 milli=seq_get(args, 6), 294 ) 295 296 297# https://learn.microsoft.com/en-us/sql/t-sql/functions/timefromparts-transact-sql?view=sql-server-ver16#syntax 298def _build_timefromparts(args: t.List) -> exp.TimeFromParts: 299 return exp.TimeFromParts( 300 hour=seq_get(args, 0), 301 min=seq_get(args, 1), 302 sec=seq_get(args, 2), 303 fractions=seq_get(args, 3), 304 precision=seq_get(args, 4), 305 ) 306 307 308def _build_with_arg_as_text( 309 klass: t.Type[exp.Expression], 310) -> t.Callable[[t.List[exp.Expression]], exp.Expression]: 311 def _parse(args: t.List[exp.Expression]) -> exp.Expression: 312 this = seq_get(args, 0) 313 314 if this and not this.is_string: 315 this = exp.cast(this, exp.DataType.Type.TEXT) 316 317 expression = seq_get(args, 1) 318 kwargs = {"this": this} 319 320 if expression: 321 kwargs["expression"] = expression 322 323 return klass(**kwargs) 324 325 return _parse 326 327 328# https://learn.microsoft.com/en-us/sql/t-sql/functions/parsename-transact-sql?view=sql-server-ver16 329def _build_parsename(args: t.List) -> exp.SplitPart | exp.Anonymous: 330 # PARSENAME(...) will be stored into exp.SplitPart if: 331 # - All args are literals 332 # - The part index (2nd arg) is <= 4 (max valid value, otherwise TSQL returns NULL) 333 if len(args) == 2 and all(isinstance(arg, exp.Literal) for arg in args): 334 this = args[0] 335 part_index = args[1] 336 split_count = len(this.name.split(".")) 337 if split_count <= 4: 338 return exp.SplitPart( 339 this=this, 340 delimiter=exp.Literal.string("."), 341 part_index=exp.Literal.number(split_count + 1 - part_index.to_py()), 342 ) 343 344 return exp.Anonymous(this="PARSENAME", expressions=args) 345 346 347def _build_json_query(args: t.List, dialect: Dialect) -> exp.JSONExtract: 348 if len(args) == 1: 349 # The default value for path is '$'. As a result, if you don't provide a 350 # value for path, JSON_QUERY returns the input expression. 351 args.append(exp.Literal.string("$")) 352 353 return parser.build_extract_json_with_path(exp.JSONExtract)(args, dialect) 354 355 356def _json_extract_sql( 357 self: TSQL.Generator, expression: exp.JSONExtract | exp.JSONExtractScalar 358) -> str: 359 json_query = self.func("JSON_QUERY", expression.this, expression.expression) 360 json_value = self.func("JSON_VALUE", expression.this, expression.expression) 361 return self.func("ISNULL", json_query, json_value) 362 363 364def _timestrtotime_sql(self: TSQL.Generator, expression: exp.TimeStrToTime): 365 sql = timestrtotime_sql(self, expression) 366 if expression.args.get("zone"): 367 # If there is a timezone, produce an expression like: 368 # CAST('2020-01-01 12:13:14-08:00' AS DATETIMEOFFSET) AT TIME ZONE 'UTC' 369 # If you dont have AT TIME ZONE 'UTC', wrapping that expression in another cast back to DATETIME2 just drops the timezone information 370 return self.sql(exp.AtTimeZone(this=sql, zone=exp.Literal.string("UTC"))) 371 return sql 372 373 374def _build_datetrunc(args: t.List) -> exp.TimestampTrunc: 375 unit = seq_get(args, 0) 376 this = seq_get(args, 1) 377 378 if this and this.is_string: 379 this = exp.cast(this, exp.DataType.Type.DATETIME2) 380 381 return exp.TimestampTrunc(this=this, unit=unit) 382 383 384class TSQL(Dialect): 385 SUPPORTS_SEMI_ANTI_JOIN = False 386 LOG_BASE_FIRST = False 387 TYPED_DIVISION = True 388 CONCAT_COALESCE = True 389 NORMALIZATION_STRATEGY = NormalizationStrategy.CASE_INSENSITIVE 390 391 TIME_FORMAT = "'yyyy-mm-dd hh:mm:ss'" 392 393 TIME_MAPPING = { 394 "year": "%Y", 395 "dayofyear": "%j", 396 "day": "%d", 397 "dy": "%d", 398 "y": "%Y", 399 "week": "%W", 400 "ww": "%W", 401 "wk": "%W", 402 "hour": "%h", 403 "hh": "%I", 404 "minute": "%M", 405 "mi": "%M", 406 "n": "%M", 407 "second": "%S", 408 "ss": "%S", 409 "s": "%-S", 410 "millisecond": "%f", 411 "ms": "%f", 412 "weekday": "%w", 413 "dw": "%w", 414 "month": "%m", 415 "mm": "%M", 416 "m": "%-M", 417 "Y": "%Y", 418 "YYYY": "%Y", 419 "YY": "%y", 420 "MMMM": "%B", 421 "MMM": "%b", 422 "MM": "%m", 423 "M": "%-m", 424 "dddd": "%A", 425 "dd": "%d", 426 "d": "%-d", 427 "HH": "%H", 428 "H": "%-H", 429 "h": "%-I", 430 "ffffff": "%f", 431 "yyyy": "%Y", 432 "yy": "%y", 433 } 434 435 CONVERT_FORMAT_MAPPING = { 436 "0": "%b %d %Y %-I:%M%p", 437 "1": "%m/%d/%y", 438 "2": "%y.%m.%d", 439 "3": "%d/%m/%y", 440 "4": "%d.%m.%y", 441 "5": "%d-%m-%y", 442 "6": "%d %b %y", 443 "7": "%b %d, %y", 444 "8": "%H:%M:%S", 445 "9": "%b %d %Y %-I:%M:%S:%f%p", 446 "10": "mm-dd-yy", 447 "11": "yy/mm/dd", 448 "12": "yymmdd", 449 "13": "%d %b %Y %H:%M:ss:%f", 450 "14": "%H:%M:%S:%f", 451 "20": "%Y-%m-%d %H:%M:%S", 452 "21": "%Y-%m-%d %H:%M:%S.%f", 453 "22": "%m/%d/%y %-I:%M:%S %p", 454 "23": "%Y-%m-%d", 455 "24": "%H:%M:%S", 456 "25": "%Y-%m-%d %H:%M:%S.%f", 457 "100": "%b %d %Y %-I:%M%p", 458 "101": "%m/%d/%Y", 459 "102": "%Y.%m.%d", 460 "103": "%d/%m/%Y", 461 "104": "%d.%m.%Y", 462 "105": "%d-%m-%Y", 463 "106": "%d %b %Y", 464 "107": "%b %d, %Y", 465 "108": "%H:%M:%S", 466 "109": "%b %d %Y %-I:%M:%S:%f%p", 467 "110": "%m-%d-%Y", 468 "111": "%Y/%m/%d", 469 "112": "%Y%m%d", 470 "113": "%d %b %Y %H:%M:%S:%f", 471 "114": "%H:%M:%S:%f", 472 "120": "%Y-%m-%d %H:%M:%S", 473 "121": "%Y-%m-%d %H:%M:%S.%f", 474 } 475 476 FORMAT_TIME_MAPPING = { 477 "y": "%B %Y", 478 "d": "%m/%d/%Y", 479 "H": "%-H", 480 "h": "%-I", 481 "s": "%Y-%m-%d %H:%M:%S", 482 "D": "%A,%B,%Y", 483 "f": "%A,%B,%Y %-I:%M %p", 484 "F": "%A,%B,%Y %-I:%M:%S %p", 485 "g": "%m/%d/%Y %-I:%M %p", 486 "G": "%m/%d/%Y %-I:%M:%S %p", 487 "M": "%B %-d", 488 "m": "%B %-d", 489 "O": "%Y-%m-%dT%H:%M:%S", 490 "u": "%Y-%M-%D %H:%M:%S%z", 491 "U": "%A, %B %D, %Y %H:%M:%S%z", 492 "T": "%-I:%M:%S %p", 493 "t": "%-I:%M", 494 "Y": "%a %Y", 495 } 496 497 class Tokenizer(tokens.Tokenizer): 498 IDENTIFIERS = [("[", "]"), '"'] 499 QUOTES = ["'", '"'] 500 HEX_STRINGS = [("0x", ""), ("0X", "")] 501 VAR_SINGLE_TOKENS = {"@", "$", "#"} 502 503 KEYWORDS = { 504 **tokens.Tokenizer.KEYWORDS, 505 "CLUSTERED INDEX": TokenType.INDEX, 506 "DATETIME2": TokenType.DATETIME2, 507 "DATETIMEOFFSET": TokenType.TIMESTAMPTZ, 508 "DECLARE": TokenType.DECLARE, 509 "EXEC": TokenType.COMMAND, 510 "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT, 511 "IMAGE": TokenType.IMAGE, 512 "MONEY": TokenType.MONEY, 513 "NONCLUSTERED INDEX": TokenType.INDEX, 514 "NTEXT": TokenType.TEXT, 515 "OPTION": TokenType.OPTION, 516 "OUTPUT": TokenType.RETURNING, 517 "PRINT": TokenType.COMMAND, 518 "PROC": TokenType.PROCEDURE, 519 "REAL": TokenType.FLOAT, 520 "ROWVERSION": TokenType.ROWVERSION, 521 "SMALLDATETIME": TokenType.SMALLDATETIME, 522 "SMALLMONEY": TokenType.SMALLMONEY, 523 "SQL_VARIANT": TokenType.VARIANT, 524 "SYSTEM_USER": TokenType.CURRENT_USER, 525 "TOP": TokenType.TOP, 526 "TIMESTAMP": TokenType.ROWVERSION, 527 "TINYINT": TokenType.UTINYINT, 528 "UNIQUEIDENTIFIER": TokenType.UNIQUEIDENTIFIER, 529 "UPDATE STATISTICS": TokenType.COMMAND, 530 "XML": TokenType.XML, 531 } 532 KEYWORDS.pop("/*+") 533 534 COMMANDS = {*tokens.Tokenizer.COMMANDS, TokenType.END} 535 536 class Parser(parser.Parser): 537 SET_REQUIRES_ASSIGNMENT_DELIMITER = False 538 LOG_DEFAULTS_TO_LN = True 539 ALTER_TABLE_ADD_REQUIRED_FOR_EACH_COLUMN = False 540 STRING_ALIASES = True 541 NO_PAREN_IF_COMMANDS = False 542 543 QUERY_MODIFIER_PARSERS = { 544 **parser.Parser.QUERY_MODIFIER_PARSERS, 545 TokenType.OPTION: lambda self: ("options", self._parse_options()), 546 } 547 548 FUNCTIONS = { 549 **parser.Parser.FUNCTIONS, 550 "CHARINDEX": lambda args: exp.StrPosition( 551 this=seq_get(args, 1), 552 substr=seq_get(args, 0), 553 position=seq_get(args, 2), 554 ), 555 "COUNT": lambda args: exp.Count( 556 this=seq_get(args, 0), expressions=args[1:], big_int=False 557 ), 558 "COUNT_BIG": lambda args: exp.Count( 559 this=seq_get(args, 0), expressions=args[1:], big_int=True 560 ), 561 "DATEADD": build_date_delta(exp.DateAdd, unit_mapping=DATE_DELTA_INTERVAL), 562 "DATEDIFF": _build_date_delta(exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL), 563 "DATENAME": _build_formatted_time(exp.TimeToStr, full_format_mapping=True), 564 "DATEPART": _build_formatted_time(exp.TimeToStr), 565 "DATETIMEFROMPARTS": _build_datetimefromparts, 566 "EOMONTH": _build_eomonth, 567 "FORMAT": _build_format, 568 "GETDATE": exp.CurrentTimestamp.from_arg_list, 569 "HASHBYTES": _build_hashbytes, 570 "ISNULL": build_coalesce, 571 "JSON_QUERY": _build_json_query, 572 "JSON_VALUE": parser.build_extract_json_with_path(exp.JSONExtractScalar), 573 "LEN": _build_with_arg_as_text(exp.Length), 574 "LEFT": _build_with_arg_as_text(exp.Left), 575 "RIGHT": _build_with_arg_as_text(exp.Right), 576 "PARSENAME": _build_parsename, 577 "REPLICATE": exp.Repeat.from_arg_list, 578 "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)), 579 "SYSDATETIME": exp.CurrentTimestamp.from_arg_list, 580 "SUSER_NAME": exp.CurrentUser.from_arg_list, 581 "SUSER_SNAME": exp.CurrentUser.from_arg_list, 582 "SYSTEM_USER": exp.CurrentUser.from_arg_list, 583 "TIMEFROMPARTS": _build_timefromparts, 584 "DATETRUNC": _build_datetrunc, 585 } 586 587 JOIN_HINTS = {"LOOP", "HASH", "MERGE", "REMOTE"} 588 589 PROCEDURE_OPTIONS = dict.fromkeys( 590 ("ENCRYPTION", "RECOMPILE", "SCHEMABINDING", "NATIVE_COMPILATION", "EXECUTE"), tuple() 591 ) 592 593 RETURNS_TABLE_TOKENS = parser.Parser.ID_VAR_TOKENS - { 594 TokenType.TABLE, 595 *parser.Parser.TYPE_TOKENS, 596 } 597 598 STATEMENT_PARSERS = { 599 **parser.Parser.STATEMENT_PARSERS, 600 TokenType.DECLARE: lambda self: self._parse_declare(), 601 } 602 603 RANGE_PARSERS = { 604 **parser.Parser.RANGE_PARSERS, 605 TokenType.DCOLON: lambda self, this: self.expression( 606 exp.ScopeResolution, 607 this=this, 608 expression=self._parse_function() or self._parse_var(any_token=True), 609 ), 610 } 611 612 NO_PAREN_FUNCTION_PARSERS = { 613 **parser.Parser.NO_PAREN_FUNCTION_PARSERS, 614 "NEXT": lambda self: self._parse_next_value_for(), 615 } 616 617 # The DCOLON (::) operator serves as a scope resolution (exp.ScopeResolution) operator in T-SQL 618 COLUMN_OPERATORS = { 619 **parser.Parser.COLUMN_OPERATORS, 620 TokenType.DCOLON: lambda self, this, to: self.expression(exp.Cast, this=this, to=to) 621 if isinstance(to, exp.DataType) and to.this != exp.DataType.Type.USERDEFINED 622 else self.expression(exp.ScopeResolution, this=this, expression=to), 623 } 624 625 def _parse_dcolon(self) -> t.Optional[exp.Expression]: 626 # We want to use _parse_types() if the first token after :: is a known type, 627 # otherwise we could parse something like x::varchar(max) into a function 628 if self._match_set(self.TYPE_TOKENS, advance=False): 629 return self._parse_types() 630 631 return self._parse_function() or self._parse_types() 632 633 def _parse_options(self) -> t.Optional[t.List[exp.Expression]]: 634 if not self._match(TokenType.OPTION): 635 return None 636 637 def _parse_option() -> t.Optional[exp.Expression]: 638 option = self._parse_var_from_options(OPTIONS) 639 if not option: 640 return None 641 642 self._match(TokenType.EQ) 643 return self.expression( 644 exp.QueryOption, this=option, expression=self._parse_primary_or_var() 645 ) 646 647 return self._parse_wrapped_csv(_parse_option) 648 649 def _parse_projections(self) -> t.List[exp.Expression]: 650 """ 651 T-SQL supports the syntax alias = expression in the SELECT's projection list, 652 so we transform all parsed Selects to convert their EQ projections into Aliases. 653 654 See: https://learn.microsoft.com/en-us/sql/t-sql/queries/select-clause-transact-sql?view=sql-server-ver16#syntax 655 """ 656 return [ 657 ( 658 exp.alias_(projection.expression, projection.this.this, copy=False) 659 if isinstance(projection, exp.EQ) and isinstance(projection.this, exp.Column) 660 else projection 661 ) 662 for projection in super()._parse_projections() 663 ] 664 665 def _parse_commit_or_rollback(self) -> exp.Commit | exp.Rollback: 666 """Applies to SQL Server and Azure SQL Database 667 COMMIT [ { TRAN | TRANSACTION } 668 [ transaction_name | @tran_name_variable ] ] 669 [ WITH ( DELAYED_DURABILITY = { OFF | ON } ) ] 670 671 ROLLBACK { TRAN | TRANSACTION } 672 [ transaction_name | @tran_name_variable 673 | savepoint_name | @savepoint_variable ] 674 """ 675 rollback = self._prev.token_type == TokenType.ROLLBACK 676 677 self._match_texts(("TRAN", "TRANSACTION")) 678 this = self._parse_id_var() 679 680 if rollback: 681 return self.expression(exp.Rollback, this=this) 682 683 durability = None 684 if self._match_pair(TokenType.WITH, TokenType.L_PAREN): 685 self._match_text_seq("DELAYED_DURABILITY") 686 self._match(TokenType.EQ) 687 688 if self._match_text_seq("OFF"): 689 durability = False 690 else: 691 self._match(TokenType.ON) 692 durability = True 693 694 self._match_r_paren() 695 696 return self.expression(exp.Commit, this=this, durability=durability) 697 698 def _parse_transaction(self) -> exp.Transaction | exp.Command: 699 """Applies to SQL Server and Azure SQL Database 700 BEGIN { TRAN | TRANSACTION } 701 [ { transaction_name | @tran_name_variable } 702 [ WITH MARK [ 'description' ] ] 703 ] 704 """ 705 if self._match_texts(("TRAN", "TRANSACTION")): 706 transaction = self.expression(exp.Transaction, this=self._parse_id_var()) 707 if self._match_text_seq("WITH", "MARK"): 708 transaction.set("mark", self._parse_string()) 709 710 return transaction 711 712 return self._parse_as_command(self._prev) 713 714 def _parse_returns(self) -> exp.ReturnsProperty: 715 table = self._parse_id_var(any_token=False, tokens=self.RETURNS_TABLE_TOKENS) 716 returns = super()._parse_returns() 717 returns.set("table", table) 718 return returns 719 720 def _parse_convert( 721 self, strict: bool, safe: t.Optional[bool] = None 722 ) -> t.Optional[exp.Expression]: 723 this = self._parse_types() 724 self._match(TokenType.COMMA) 725 args = [this, *self._parse_csv(self._parse_assignment)] 726 convert = exp.Convert.from_arg_list(args) 727 convert.set("safe", safe) 728 convert.set("strict", strict) 729 return convert 730 731 def _parse_user_defined_function( 732 self, kind: t.Optional[TokenType] = None 733 ) -> t.Optional[exp.Expression]: 734 this = super()._parse_user_defined_function(kind=kind) 735 736 if ( 737 kind == TokenType.FUNCTION 738 or isinstance(this, exp.UserDefinedFunction) 739 or self._match(TokenType.ALIAS, advance=False) 740 ): 741 return this 742 743 if not self._match(TokenType.WITH, advance=False): 744 expressions = self._parse_csv(self._parse_function_parameter) 745 else: 746 expressions = None 747 748 return self.expression(exp.UserDefinedFunction, this=this, expressions=expressions) 749 750 def _parse_id_var( 751 self, 752 any_token: bool = True, 753 tokens: t.Optional[t.Collection[TokenType]] = None, 754 ) -> t.Optional[exp.Expression]: 755 is_temporary = self._match(TokenType.HASH) 756 is_global = is_temporary and self._match(TokenType.HASH) 757 758 this = super()._parse_id_var(any_token=any_token, tokens=tokens) 759 if this: 760 if is_global: 761 this.set("global", True) 762 elif is_temporary: 763 this.set("temporary", True) 764 765 return this 766 767 def _parse_create(self) -> exp.Create | exp.Command: 768 create = super()._parse_create() 769 770 if isinstance(create, exp.Create): 771 table = create.this.this if isinstance(create.this, exp.Schema) else create.this 772 if isinstance(table, exp.Table) and table.this and table.this.args.get("temporary"): 773 if not create.args.get("properties"): 774 create.set("properties", exp.Properties(expressions=[])) 775 776 create.args["properties"].append("expressions", exp.TemporaryProperty()) 777 778 return create 779 780 def _parse_if(self) -> t.Optional[exp.Expression]: 781 index = self._index 782 783 if self._match_text_seq("OBJECT_ID"): 784 self._parse_wrapped_csv(self._parse_string) 785 if self._match_text_seq("IS", "NOT", "NULL") and self._match(TokenType.DROP): 786 return self._parse_drop(exists=True) 787 self._retreat(index) 788 789 return super()._parse_if() 790 791 def _parse_unique(self) -> exp.UniqueColumnConstraint: 792 if self._match_texts(("CLUSTERED", "NONCLUSTERED")): 793 this = self.CONSTRAINT_PARSERS[self._prev.text.upper()](self) 794 else: 795 this = self._parse_schema(self._parse_id_var(any_token=False)) 796 797 return self.expression(exp.UniqueColumnConstraint, this=this) 798 799 def _parse_partition(self) -> t.Optional[exp.Partition]: 800 if not self._match_text_seq("WITH", "(", "PARTITIONS"): 801 return None 802 803 def parse_range(): 804 low = self._parse_bitwise() 805 high = self._parse_bitwise() if self._match_text_seq("TO") else None 806 807 return ( 808 self.expression(exp.PartitionRange, this=low, expression=high) if high else low 809 ) 810 811 partition = self.expression( 812 exp.Partition, expressions=self._parse_wrapped_csv(parse_range) 813 ) 814 815 self._match_r_paren() 816 817 return partition 818 819 def _parse_declare(self) -> exp.Declare | exp.Command: 820 index = self._index 821 expressions = self._try_parse(partial(self._parse_csv, self._parse_declareitem)) 822 823 if not expressions or self._curr: 824 self._retreat(index) 825 return self._parse_as_command(self._prev) 826 827 return self.expression(exp.Declare, expressions=expressions) 828 829 def _parse_declareitem(self) -> t.Optional[exp.DeclareItem]: 830 var = self._parse_id_var() 831 if not var: 832 return None 833 834 value = None 835 self._match(TokenType.ALIAS) 836 if self._match(TokenType.TABLE): 837 data_type = self._parse_schema() 838 else: 839 data_type = self._parse_types() 840 if self._match(TokenType.EQ): 841 value = self._parse_bitwise() 842 843 return self.expression(exp.DeclareItem, this=var, kind=data_type, default=value) 844 845 class Generator(generator.Generator): 846 LIMIT_IS_TOP = True 847 QUERY_HINTS = False 848 RETURNING_END = False 849 NVL2_SUPPORTED = False 850 ALTER_TABLE_INCLUDE_COLUMN_KEYWORD = False 851 LIMIT_FETCH = "FETCH" 852 COMPUTED_COLUMN_WITH_TYPE = False 853 CTE_RECURSIVE_KEYWORD_REQUIRED = False 854 ENSURE_BOOLS = True 855 NULL_ORDERING_SUPPORTED = None 856 SUPPORTS_SINGLE_ARG_CONCAT = False 857 TABLESAMPLE_SEED_KEYWORD = "REPEATABLE" 858 SUPPORTS_SELECT_INTO = True 859 JSON_PATH_BRACKETED_KEY_SUPPORTED = False 860 SUPPORTS_TO_NUMBER = False 861 SET_OP_MODIFIERS = False 862 COPY_PARAMS_EQ_REQUIRED = True 863 PARSE_JSON_NAME = None 864 EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE = False 865 866 EXPRESSIONS_WITHOUT_NESTED_CTES = { 867 exp.Create, 868 exp.Delete, 869 exp.Insert, 870 exp.Intersect, 871 exp.Except, 872 exp.Merge, 873 exp.Select, 874 exp.Subquery, 875 exp.Union, 876 exp.Update, 877 } 878 879 SUPPORTED_JSON_PATH_PARTS = { 880 exp.JSONPathKey, 881 exp.JSONPathRoot, 882 exp.JSONPathSubscript, 883 } 884 885 TYPE_MAPPING = { 886 **generator.Generator.TYPE_MAPPING, 887 exp.DataType.Type.BOOLEAN: "BIT", 888 exp.DataType.Type.DATETIME2: "DATETIME2", 889 exp.DataType.Type.DECIMAL: "NUMERIC", 890 exp.DataType.Type.DOUBLE: "FLOAT", 891 exp.DataType.Type.INT: "INTEGER", 892 exp.DataType.Type.ROWVERSION: "ROWVERSION", 893 exp.DataType.Type.TEXT: "VARCHAR(MAX)", 894 exp.DataType.Type.TIMESTAMP: "DATETIME2", 895 exp.DataType.Type.TIMESTAMPNTZ: "DATETIME2", 896 exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET", 897 exp.DataType.Type.SMALLDATETIME: "SMALLDATETIME", 898 exp.DataType.Type.UTINYINT: "TINYINT", 899 exp.DataType.Type.VARIANT: "SQL_VARIANT", 900 } 901 902 TYPE_MAPPING.pop(exp.DataType.Type.NCHAR) 903 TYPE_MAPPING.pop(exp.DataType.Type.NVARCHAR) 904 905 TRANSFORMS = { 906 **generator.Generator.TRANSFORMS, 907 exp.AnyValue: any_value_to_max_sql, 908 exp.ArrayToString: rename_func("STRING_AGG"), 909 exp.AutoIncrementColumnConstraint: lambda *_: "IDENTITY", 910 exp.Chr: rename_func("CHAR"), 911 exp.DateAdd: date_delta_sql("DATEADD"), 912 exp.DateDiff: date_delta_sql("DATEDIFF"), 913 exp.CTE: transforms.preprocess([qualify_derived_table_outputs]), 914 exp.CurrentDate: rename_func("GETDATE"), 915 exp.CurrentTimestamp: rename_func("GETDATE"), 916 exp.DateStrToDate: datestrtodate_sql, 917 exp.Extract: rename_func("DATEPART"), 918 exp.GeneratedAsIdentityColumnConstraint: generatedasidentitycolumnconstraint_sql, 919 exp.GroupConcat: _string_agg_sql, 920 exp.If: rename_func("IIF"), 921 exp.JSONExtract: _json_extract_sql, 922 exp.JSONExtractScalar: _json_extract_sql, 923 exp.LastDay: lambda self, e: self.func("EOMONTH", e.this), 924 exp.Ln: rename_func("LOG"), 925 exp.Max: max_or_greatest, 926 exp.MD5: lambda self, e: self.func("HASHBYTES", exp.Literal.string("MD5"), e.this), 927 exp.Min: min_or_least, 928 exp.NumberToStr: _format_sql, 929 exp.Repeat: rename_func("REPLICATE"), 930 exp.Select: transforms.preprocess( 931 [ 932 transforms.eliminate_distinct_on, 933 transforms.eliminate_semi_and_anti_joins, 934 transforms.eliminate_qualify, 935 transforms.unnest_generate_date_array_using_recursive_cte, 936 ] 937 ), 938 exp.Stddev: rename_func("STDEV"), 939 exp.StrPosition: lambda self, e: strposition_sql( 940 self, e, func_name="CHARINDEX", supports_position=True 941 ), 942 exp.Subquery: transforms.preprocess([qualify_derived_table_outputs]), 943 exp.SHA: lambda self, e: self.func("HASHBYTES", exp.Literal.string("SHA1"), e.this), 944 exp.SHA2: lambda self, e: self.func( 945 "HASHBYTES", exp.Literal.string(f"SHA2_{e.args.get('length', 256)}"), e.this 946 ), 947 exp.TemporaryProperty: lambda self, e: "", 948 exp.TimeStrToTime: _timestrtotime_sql, 949 exp.TimeToStr: _format_sql, 950 exp.Trim: trim_sql, 951 exp.TsOrDsAdd: date_delta_sql("DATEADD", cast=True), 952 exp.TsOrDsDiff: date_delta_sql("DATEDIFF"), 953 exp.TimestampTrunc: lambda self, e: self.func("DATETRUNC", e.unit, e.this), 954 exp.DateFromParts: rename_func("DATEFROMPARTS"), 955 } 956 957 TRANSFORMS.pop(exp.ReturnsProperty) 958 959 PROPERTIES_LOCATION = { 960 **generator.Generator.PROPERTIES_LOCATION, 961 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 962 } 963 964 def scope_resolution(self, rhs: str, scope_name: str) -> str: 965 return f"{scope_name}::{rhs}" 966 967 def select_sql(self, expression: exp.Select) -> str: 968 if expression.args.get("offset"): 969 if not expression.args.get("order"): 970 # ORDER BY is required in order to use OFFSET in a query, so we use 971 # a noop order by, since we don't really care about the order. 972 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 973 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 974 975 limit = expression.args.get("limit") 976 if isinstance(limit, exp.Limit): 977 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 978 # we replace here because otherwise TOP would be generated in select_sql 979 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 980 981 return super().select_sql(expression) 982 983 def convert_sql(self, expression: exp.Convert) -> str: 984 name = "TRY_CONVERT" if expression.args.get("safe") else "CONVERT" 985 return self.func( 986 name, expression.this, expression.expression, expression.args.get("style") 987 ) 988 989 def queryoption_sql(self, expression: exp.QueryOption) -> str: 990 option = self.sql(expression, "this") 991 value = self.sql(expression, "expression") 992 if value: 993 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 994 return f"{option} {optional_equal_sign}{value}" 995 return option 996 997 def lateral_op(self, expression: exp.Lateral) -> str: 998 cross_apply = expression.args.get("cross_apply") 999 if cross_apply is True: 1000 return "CROSS APPLY" 1001 if cross_apply is False: 1002 return "OUTER APPLY" 1003 1004 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 1005 self.unsupported("LATERAL clause is not supported.") 1006 return "LATERAL" 1007 1008 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 1009 this = expression.this 1010 split_count = len(this.name.split(".")) 1011 delimiter = expression.args.get("delimiter") 1012 part_index = expression.args.get("part_index") 1013 1014 if ( 1015 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 1016 or (delimiter and delimiter.name != ".") 1017 or not part_index 1018 or split_count > 4 1019 ): 1020 self.unsupported( 1021 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 1022 ) 1023 return "" 1024 1025 return self.func( 1026 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 1027 ) 1028 1029 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1030 nano = expression.args.get("nano") 1031 if nano is not None: 1032 nano.pop() 1033 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1034 1035 if expression.args.get("fractions") is None: 1036 expression.set("fractions", exp.Literal.number(0)) 1037 if expression.args.get("precision") is None: 1038 expression.set("precision", exp.Literal.number(0)) 1039 1040 return rename_func("TIMEFROMPARTS")(self, expression) 1041 1042 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1043 zone = expression.args.get("zone") 1044 if zone is not None: 1045 zone.pop() 1046 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1047 1048 nano = expression.args.get("nano") 1049 if nano is not None: 1050 nano.pop() 1051 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1052 1053 if expression.args.get("milli") is None: 1054 expression.set("milli", exp.Literal.number(0)) 1055 1056 return rename_func("DATETIMEFROMPARTS")(self, expression) 1057 1058 def setitem_sql(self, expression: exp.SetItem) -> str: 1059 this = expression.this 1060 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1061 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1062 return f"{self.sql(this.left)} {self.sql(this.right)}" 1063 1064 return super().setitem_sql(expression) 1065 1066 def boolean_sql(self, expression: exp.Boolean) -> str: 1067 if type(expression.parent) in BIT_TYPES or isinstance( 1068 expression.find_ancestor(exp.Values, exp.Select), exp.Values 1069 ): 1070 return "1" if expression.this else "0" 1071 1072 return "(1 = 1)" if expression.this else "(1 = 0)" 1073 1074 def is_sql(self, expression: exp.Is) -> str: 1075 if isinstance(expression.expression, exp.Boolean): 1076 return self.binary(expression, "=") 1077 return self.binary(expression, "IS") 1078 1079 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1080 sql = self.sql(expression, "this") 1081 properties = expression.args.get("properties") 1082 1083 if sql[:1] != "#" and any( 1084 isinstance(prop, exp.TemporaryProperty) 1085 for prop in (properties.expressions if properties else []) 1086 ): 1087 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1088 1089 return sql 1090 1091 def create_sql(self, expression: exp.Create) -> str: 1092 kind = expression.kind 1093 exists = expression.args.pop("exists", None) 1094 1095 like_property = expression.find(exp.LikeProperty) 1096 if like_property: 1097 ctas_expression = like_property.this 1098 else: 1099 ctas_expression = expression.expression 1100 1101 if kind == "VIEW": 1102 expression.this.set("catalog", None) 1103 with_ = expression.args.get("with") 1104 if ctas_expression and with_: 1105 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1106 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1107 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1108 ctas_expression.set("with", with_.pop()) 1109 1110 sql = super().create_sql(expression) 1111 1112 table = expression.find(exp.Table) 1113 1114 # Convert CTAS statement to SELECT .. INTO .. 1115 if kind == "TABLE" and ctas_expression: 1116 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1117 ctas_expression = ctas_expression.subquery() 1118 1119 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1120 select_into.set("into", exp.Into(this=table)) 1121 1122 if like_property: 1123 select_into.limit(0, copy=False) 1124 1125 sql = self.sql(select_into) 1126 1127 if exists: 1128 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1129 sql_with_ctes = self.prepend_ctes(expression, sql) 1130 sql_literal = self.sql(exp.Literal.string(sql_with_ctes)) 1131 if kind == "SCHEMA": 1132 return f"""IF NOT EXISTS (SELECT * FROM information_schema.schemata WHERE schema_name = {identifier}) EXEC({sql_literal})""" 1133 elif kind == "TABLE": 1134 assert table 1135 where = exp.and_( 1136 exp.column("table_name").eq(table.name), 1137 exp.column("table_schema").eq(table.db) if table.db else None, 1138 exp.column("table_catalog").eq(table.catalog) if table.catalog else None, 1139 ) 1140 return f"""IF NOT EXISTS (SELECT * FROM information_schema.tables WHERE {where}) EXEC({sql_literal})""" 1141 elif kind == "INDEX": 1142 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1143 return f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql_literal})""" 1144 elif expression.args.get("replace"): 1145 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1146 1147 return self.prepend_ctes(expression, sql) 1148 1149 def count_sql(self, expression: exp.Count) -> str: 1150 func_name = "COUNT_BIG" if expression.args.get("big_int") else "COUNT" 1151 return rename_func(func_name)(self, expression) 1152 1153 def offset_sql(self, expression: exp.Offset) -> str: 1154 return f"{super().offset_sql(expression)} ROWS" 1155 1156 def version_sql(self, expression: exp.Version) -> str: 1157 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1158 this = f"FOR {name}" 1159 expr = expression.expression 1160 kind = expression.text("kind") 1161 if kind in ("FROM", "BETWEEN"): 1162 args = expr.expressions 1163 sep = "TO" if kind == "FROM" else "AND" 1164 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1165 else: 1166 expr_sql = self.sql(expr) 1167 1168 expr_sql = f" {expr_sql}" if expr_sql else "" 1169 return f"{this} {kind}{expr_sql}" 1170 1171 def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str: 1172 table = expression.args.get("table") 1173 table = f"{table} " if table else "" 1174 return f"RETURNS {table}{self.sql(expression, 'this')}" 1175 1176 def returning_sql(self, expression: exp.Returning) -> str: 1177 into = self.sql(expression, "into") 1178 into = self.seg(f"INTO {into}") if into else "" 1179 return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}" 1180 1181 def transaction_sql(self, expression: exp.Transaction) -> str: 1182 this = self.sql(expression, "this") 1183 this = f" {this}" if this else "" 1184 mark = self.sql(expression, "mark") 1185 mark = f" WITH MARK {mark}" if mark else "" 1186 return f"BEGIN TRANSACTION{this}{mark}" 1187 1188 def commit_sql(self, expression: exp.Commit) -> str: 1189 this = self.sql(expression, "this") 1190 this = f" {this}" if this else "" 1191 durability = expression.args.get("durability") 1192 durability = ( 1193 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1194 if durability is not None 1195 else "" 1196 ) 1197 return f"COMMIT TRANSACTION{this}{durability}" 1198 1199 def rollback_sql(self, expression: exp.Rollback) -> str: 1200 this = self.sql(expression, "this") 1201 this = f" {this}" if this else "" 1202 return f"ROLLBACK TRANSACTION{this}" 1203 1204 def identifier_sql(self, expression: exp.Identifier) -> str: 1205 identifier = super().identifier_sql(expression) 1206 1207 if expression.args.get("global"): 1208 identifier = f"##{identifier}" 1209 elif expression.args.get("temporary"): 1210 identifier = f"#{identifier}" 1211 1212 return identifier 1213 1214 def constraint_sql(self, expression: exp.Constraint) -> str: 1215 this = self.sql(expression, "this") 1216 expressions = self.expressions(expression, flat=True, sep=" ") 1217 return f"CONSTRAINT {this} {expressions}" 1218 1219 def length_sql(self, expression: exp.Length) -> str: 1220 return self._uncast_text(expression, "LEN") 1221 1222 def right_sql(self, expression: exp.Right) -> str: 1223 return self._uncast_text(expression, "RIGHT") 1224 1225 def left_sql(self, expression: exp.Left) -> str: 1226 return self._uncast_text(expression, "LEFT") 1227 1228 def _uncast_text(self, expression: exp.Expression, name: str) -> str: 1229 this = expression.this 1230 if isinstance(this, exp.Cast) and this.is_type(exp.DataType.Type.TEXT): 1231 this_sql = self.sql(this, "this") 1232 else: 1233 this_sql = self.sql(this) 1234 expression_sql = self.sql(expression, "expression") 1235 return self.func(name, this_sql, expression_sql if expression_sql else None) 1236 1237 def partition_sql(self, expression: exp.Partition) -> str: 1238 return f"WITH (PARTITIONS({self.expressions(expression, flat=True)}))" 1239 1240 def alter_sql(self, expression: exp.Alter) -> str: 1241 action = seq_get(expression.args.get("actions") or [], 0) 1242 if isinstance(action, exp.AlterRename): 1243 return f"EXEC sp_rename '{self.sql(expression.this)}', '{action.this.name}'" 1244 return super().alter_sql(expression) 1245 1246 def drop_sql(self, expression: exp.Drop) -> str: 1247 if expression.args["kind"] == "VIEW": 1248 expression.this.set("catalog", None) 1249 return super().drop_sql(expression) 1250 1251 def declare_sql(self, expression: exp.Declare) -> str: 1252 return f"DECLARE {self.expressions(expression, flat=True)}" 1253 1254 def declareitem_sql(self, expression: exp.DeclareItem) -> str: 1255 variable = self.sql(expression, "this") 1256 default = self.sql(expression, "default") 1257 default = f" = {default}" if default else "" 1258 1259 kind = self.sql(expression, "kind") 1260 if isinstance(expression.args.get("kind"), exp.Schema): 1261 kind = f"TABLE {kind}" 1262 1263 return f"{variable} AS {kind}{default}" 1264 1265 def options_modifier(self, expression: exp.Expression) -> str: 1266 options = self.expressions(expression, key="options") 1267 return f" OPTION{self.wrap(options)}" if options else "" 1268 1269 def dpipe_sql(self, expression: exp.DPipe) -> str: 1270 return self.sql( 1271 reduce(lambda x, y: exp.Add(this=x, expression=y), expression.flatten()) 1272 ) 1273 1274 def isascii_sql(self, expression: exp.IsAscii) -> str: 1275 return f"(PATINDEX(CONVERT(VARCHAR(MAX), 0x255b5e002d7f5d25) COLLATE Latin1_General_BIN, {self.sql(expression.this)}) = 0)"
253def qualify_derived_table_outputs(expression: exp.Expression) -> exp.Expression: 254 """Ensures all (unnamed) output columns are aliased for CTEs and Subqueries.""" 255 alias = expression.args.get("alias") 256 257 if ( 258 isinstance(expression, (exp.CTE, exp.Subquery)) 259 and isinstance(alias, exp.TableAlias) 260 and not alias.columns 261 ): 262 from sqlglot.optimizer.qualify_columns import qualify_outputs 263 264 # We keep track of the unaliased column projection indexes instead of the expressions 265 # themselves, because the latter are going to be replaced by new nodes when the aliases 266 # are added and hence we won't be able to reach these newly added Alias parents 267 query = expression.this 268 unaliased_column_indexes = ( 269 i for i, c in enumerate(query.selects) if isinstance(c, exp.Column) and not c.alias 270 ) 271 272 qualify_outputs(query) 273 274 # Preserve the quoting information of columns for newly added Alias nodes 275 query_selects = query.selects 276 for select_index in unaliased_column_indexes: 277 alias = query_selects[select_index] 278 column = alias.this 279 if isinstance(column.this, exp.Identifier): 280 alias.args["alias"].set("quoted", column.this.quoted) 281 282 return expression
Ensures all (unnamed) output columns are aliased for CTEs and Subqueries.
385class TSQL(Dialect): 386 SUPPORTS_SEMI_ANTI_JOIN = False 387 LOG_BASE_FIRST = False 388 TYPED_DIVISION = True 389 CONCAT_COALESCE = True 390 NORMALIZATION_STRATEGY = NormalizationStrategy.CASE_INSENSITIVE 391 392 TIME_FORMAT = "'yyyy-mm-dd hh:mm:ss'" 393 394 TIME_MAPPING = { 395 "year": "%Y", 396 "dayofyear": "%j", 397 "day": "%d", 398 "dy": "%d", 399 "y": "%Y", 400 "week": "%W", 401 "ww": "%W", 402 "wk": "%W", 403 "hour": "%h", 404 "hh": "%I", 405 "minute": "%M", 406 "mi": "%M", 407 "n": "%M", 408 "second": "%S", 409 "ss": "%S", 410 "s": "%-S", 411 "millisecond": "%f", 412 "ms": "%f", 413 "weekday": "%w", 414 "dw": "%w", 415 "month": "%m", 416 "mm": "%M", 417 "m": "%-M", 418 "Y": "%Y", 419 "YYYY": "%Y", 420 "YY": "%y", 421 "MMMM": "%B", 422 "MMM": "%b", 423 "MM": "%m", 424 "M": "%-m", 425 "dddd": "%A", 426 "dd": "%d", 427 "d": "%-d", 428 "HH": "%H", 429 "H": "%-H", 430 "h": "%-I", 431 "ffffff": "%f", 432 "yyyy": "%Y", 433 "yy": "%y", 434 } 435 436 CONVERT_FORMAT_MAPPING = { 437 "0": "%b %d %Y %-I:%M%p", 438 "1": "%m/%d/%y", 439 "2": "%y.%m.%d", 440 "3": "%d/%m/%y", 441 "4": "%d.%m.%y", 442 "5": "%d-%m-%y", 443 "6": "%d %b %y", 444 "7": "%b %d, %y", 445 "8": "%H:%M:%S", 446 "9": "%b %d %Y %-I:%M:%S:%f%p", 447 "10": "mm-dd-yy", 448 "11": "yy/mm/dd", 449 "12": "yymmdd", 450 "13": "%d %b %Y %H:%M:ss:%f", 451 "14": "%H:%M:%S:%f", 452 "20": "%Y-%m-%d %H:%M:%S", 453 "21": "%Y-%m-%d %H:%M:%S.%f", 454 "22": "%m/%d/%y %-I:%M:%S %p", 455 "23": "%Y-%m-%d", 456 "24": "%H:%M:%S", 457 "25": "%Y-%m-%d %H:%M:%S.%f", 458 "100": "%b %d %Y %-I:%M%p", 459 "101": "%m/%d/%Y", 460 "102": "%Y.%m.%d", 461 "103": "%d/%m/%Y", 462 "104": "%d.%m.%Y", 463 "105": "%d-%m-%Y", 464 "106": "%d %b %Y", 465 "107": "%b %d, %Y", 466 "108": "%H:%M:%S", 467 "109": "%b %d %Y %-I:%M:%S:%f%p", 468 "110": "%m-%d-%Y", 469 "111": "%Y/%m/%d", 470 "112": "%Y%m%d", 471 "113": "%d %b %Y %H:%M:%S:%f", 472 "114": "%H:%M:%S:%f", 473 "120": "%Y-%m-%d %H:%M:%S", 474 "121": "%Y-%m-%d %H:%M:%S.%f", 475 } 476 477 FORMAT_TIME_MAPPING = { 478 "y": "%B %Y", 479 "d": "%m/%d/%Y", 480 "H": "%-H", 481 "h": "%-I", 482 "s": "%Y-%m-%d %H:%M:%S", 483 "D": "%A,%B,%Y", 484 "f": "%A,%B,%Y %-I:%M %p", 485 "F": "%A,%B,%Y %-I:%M:%S %p", 486 "g": "%m/%d/%Y %-I:%M %p", 487 "G": "%m/%d/%Y %-I:%M:%S %p", 488 "M": "%B %-d", 489 "m": "%B %-d", 490 "O": "%Y-%m-%dT%H:%M:%S", 491 "u": "%Y-%M-%D %H:%M:%S%z", 492 "U": "%A, %B %D, %Y %H:%M:%S%z", 493 "T": "%-I:%M:%S %p", 494 "t": "%-I:%M", 495 "Y": "%a %Y", 496 } 497 498 class Tokenizer(tokens.Tokenizer): 499 IDENTIFIERS = [("[", "]"), '"'] 500 QUOTES = ["'", '"'] 501 HEX_STRINGS = [("0x", ""), ("0X", "")] 502 VAR_SINGLE_TOKENS = {"@", "$", "#"} 503 504 KEYWORDS = { 505 **tokens.Tokenizer.KEYWORDS, 506 "CLUSTERED INDEX": TokenType.INDEX, 507 "DATETIME2": TokenType.DATETIME2, 508 "DATETIMEOFFSET": TokenType.TIMESTAMPTZ, 509 "DECLARE": TokenType.DECLARE, 510 "EXEC": TokenType.COMMAND, 511 "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT, 512 "IMAGE": TokenType.IMAGE, 513 "MONEY": TokenType.MONEY, 514 "NONCLUSTERED INDEX": TokenType.INDEX, 515 "NTEXT": TokenType.TEXT, 516 "OPTION": TokenType.OPTION, 517 "OUTPUT": TokenType.RETURNING, 518 "PRINT": TokenType.COMMAND, 519 "PROC": TokenType.PROCEDURE, 520 "REAL": TokenType.FLOAT, 521 "ROWVERSION": TokenType.ROWVERSION, 522 "SMALLDATETIME": TokenType.SMALLDATETIME, 523 "SMALLMONEY": TokenType.SMALLMONEY, 524 "SQL_VARIANT": TokenType.VARIANT, 525 "SYSTEM_USER": TokenType.CURRENT_USER, 526 "TOP": TokenType.TOP, 527 "TIMESTAMP": TokenType.ROWVERSION, 528 "TINYINT": TokenType.UTINYINT, 529 "UNIQUEIDENTIFIER": TokenType.UNIQUEIDENTIFIER, 530 "UPDATE STATISTICS": TokenType.COMMAND, 531 "XML": TokenType.XML, 532 } 533 KEYWORDS.pop("/*+") 534 535 COMMANDS = {*tokens.Tokenizer.COMMANDS, TokenType.END} 536 537 class Parser(parser.Parser): 538 SET_REQUIRES_ASSIGNMENT_DELIMITER = False 539 LOG_DEFAULTS_TO_LN = True 540 ALTER_TABLE_ADD_REQUIRED_FOR_EACH_COLUMN = False 541 STRING_ALIASES = True 542 NO_PAREN_IF_COMMANDS = False 543 544 QUERY_MODIFIER_PARSERS = { 545 **parser.Parser.QUERY_MODIFIER_PARSERS, 546 TokenType.OPTION: lambda self: ("options", self._parse_options()), 547 } 548 549 FUNCTIONS = { 550 **parser.Parser.FUNCTIONS, 551 "CHARINDEX": lambda args: exp.StrPosition( 552 this=seq_get(args, 1), 553 substr=seq_get(args, 0), 554 position=seq_get(args, 2), 555 ), 556 "COUNT": lambda args: exp.Count( 557 this=seq_get(args, 0), expressions=args[1:], big_int=False 558 ), 559 "COUNT_BIG": lambda args: exp.Count( 560 this=seq_get(args, 0), expressions=args[1:], big_int=True 561 ), 562 "DATEADD": build_date_delta(exp.DateAdd, unit_mapping=DATE_DELTA_INTERVAL), 563 "DATEDIFF": _build_date_delta(exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL), 564 "DATENAME": _build_formatted_time(exp.TimeToStr, full_format_mapping=True), 565 "DATEPART": _build_formatted_time(exp.TimeToStr), 566 "DATETIMEFROMPARTS": _build_datetimefromparts, 567 "EOMONTH": _build_eomonth, 568 "FORMAT": _build_format, 569 "GETDATE": exp.CurrentTimestamp.from_arg_list, 570 "HASHBYTES": _build_hashbytes, 571 "ISNULL": build_coalesce, 572 "JSON_QUERY": _build_json_query, 573 "JSON_VALUE": parser.build_extract_json_with_path(exp.JSONExtractScalar), 574 "LEN": _build_with_arg_as_text(exp.Length), 575 "LEFT": _build_with_arg_as_text(exp.Left), 576 "RIGHT": _build_with_arg_as_text(exp.Right), 577 "PARSENAME": _build_parsename, 578 "REPLICATE": exp.Repeat.from_arg_list, 579 "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)), 580 "SYSDATETIME": exp.CurrentTimestamp.from_arg_list, 581 "SUSER_NAME": exp.CurrentUser.from_arg_list, 582 "SUSER_SNAME": exp.CurrentUser.from_arg_list, 583 "SYSTEM_USER": exp.CurrentUser.from_arg_list, 584 "TIMEFROMPARTS": _build_timefromparts, 585 "DATETRUNC": _build_datetrunc, 586 } 587 588 JOIN_HINTS = {"LOOP", "HASH", "MERGE", "REMOTE"} 589 590 PROCEDURE_OPTIONS = dict.fromkeys( 591 ("ENCRYPTION", "RECOMPILE", "SCHEMABINDING", "NATIVE_COMPILATION", "EXECUTE"), tuple() 592 ) 593 594 RETURNS_TABLE_TOKENS = parser.Parser.ID_VAR_TOKENS - { 595 TokenType.TABLE, 596 *parser.Parser.TYPE_TOKENS, 597 } 598 599 STATEMENT_PARSERS = { 600 **parser.Parser.STATEMENT_PARSERS, 601 TokenType.DECLARE: lambda self: self._parse_declare(), 602 } 603 604 RANGE_PARSERS = { 605 **parser.Parser.RANGE_PARSERS, 606 TokenType.DCOLON: lambda self, this: self.expression( 607 exp.ScopeResolution, 608 this=this, 609 expression=self._parse_function() or self._parse_var(any_token=True), 610 ), 611 } 612 613 NO_PAREN_FUNCTION_PARSERS = { 614 **parser.Parser.NO_PAREN_FUNCTION_PARSERS, 615 "NEXT": lambda self: self._parse_next_value_for(), 616 } 617 618 # The DCOLON (::) operator serves as a scope resolution (exp.ScopeResolution) operator in T-SQL 619 COLUMN_OPERATORS = { 620 **parser.Parser.COLUMN_OPERATORS, 621 TokenType.DCOLON: lambda self, this, to: self.expression(exp.Cast, this=this, to=to) 622 if isinstance(to, exp.DataType) and to.this != exp.DataType.Type.USERDEFINED 623 else self.expression(exp.ScopeResolution, this=this, expression=to), 624 } 625 626 def _parse_dcolon(self) -> t.Optional[exp.Expression]: 627 # We want to use _parse_types() if the first token after :: is a known type, 628 # otherwise we could parse something like x::varchar(max) into a function 629 if self._match_set(self.TYPE_TOKENS, advance=False): 630 return self._parse_types() 631 632 return self._parse_function() or self._parse_types() 633 634 def _parse_options(self) -> t.Optional[t.List[exp.Expression]]: 635 if not self._match(TokenType.OPTION): 636 return None 637 638 def _parse_option() -> t.Optional[exp.Expression]: 639 option = self._parse_var_from_options(OPTIONS) 640 if not option: 641 return None 642 643 self._match(TokenType.EQ) 644 return self.expression( 645 exp.QueryOption, this=option, expression=self._parse_primary_or_var() 646 ) 647 648 return self._parse_wrapped_csv(_parse_option) 649 650 def _parse_projections(self) -> t.List[exp.Expression]: 651 """ 652 T-SQL supports the syntax alias = expression in the SELECT's projection list, 653 so we transform all parsed Selects to convert their EQ projections into Aliases. 654 655 See: https://learn.microsoft.com/en-us/sql/t-sql/queries/select-clause-transact-sql?view=sql-server-ver16#syntax 656 """ 657 return [ 658 ( 659 exp.alias_(projection.expression, projection.this.this, copy=False) 660 if isinstance(projection, exp.EQ) and isinstance(projection.this, exp.Column) 661 else projection 662 ) 663 for projection in super()._parse_projections() 664 ] 665 666 def _parse_commit_or_rollback(self) -> exp.Commit | exp.Rollback: 667 """Applies to SQL Server and Azure SQL Database 668 COMMIT [ { TRAN | TRANSACTION } 669 [ transaction_name | @tran_name_variable ] ] 670 [ WITH ( DELAYED_DURABILITY = { OFF | ON } ) ] 671 672 ROLLBACK { TRAN | TRANSACTION } 673 [ transaction_name | @tran_name_variable 674 | savepoint_name | @savepoint_variable ] 675 """ 676 rollback = self._prev.token_type == TokenType.ROLLBACK 677 678 self._match_texts(("TRAN", "TRANSACTION")) 679 this = self._parse_id_var() 680 681 if rollback: 682 return self.expression(exp.Rollback, this=this) 683 684 durability = None 685 if self._match_pair(TokenType.WITH, TokenType.L_PAREN): 686 self._match_text_seq("DELAYED_DURABILITY") 687 self._match(TokenType.EQ) 688 689 if self._match_text_seq("OFF"): 690 durability = False 691 else: 692 self._match(TokenType.ON) 693 durability = True 694 695 self._match_r_paren() 696 697 return self.expression(exp.Commit, this=this, durability=durability) 698 699 def _parse_transaction(self) -> exp.Transaction | exp.Command: 700 """Applies to SQL Server and Azure SQL Database 701 BEGIN { TRAN | TRANSACTION } 702 [ { transaction_name | @tran_name_variable } 703 [ WITH MARK [ 'description' ] ] 704 ] 705 """ 706 if self._match_texts(("TRAN", "TRANSACTION")): 707 transaction = self.expression(exp.Transaction, this=self._parse_id_var()) 708 if self._match_text_seq("WITH", "MARK"): 709 transaction.set("mark", self._parse_string()) 710 711 return transaction 712 713 return self._parse_as_command(self._prev) 714 715 def _parse_returns(self) -> exp.ReturnsProperty: 716 table = self._parse_id_var(any_token=False, tokens=self.RETURNS_TABLE_TOKENS) 717 returns = super()._parse_returns() 718 returns.set("table", table) 719 return returns 720 721 def _parse_convert( 722 self, strict: bool, safe: t.Optional[bool] = None 723 ) -> t.Optional[exp.Expression]: 724 this = self._parse_types() 725 self._match(TokenType.COMMA) 726 args = [this, *self._parse_csv(self._parse_assignment)] 727 convert = exp.Convert.from_arg_list(args) 728 convert.set("safe", safe) 729 convert.set("strict", strict) 730 return convert 731 732 def _parse_user_defined_function( 733 self, kind: t.Optional[TokenType] = None 734 ) -> t.Optional[exp.Expression]: 735 this = super()._parse_user_defined_function(kind=kind) 736 737 if ( 738 kind == TokenType.FUNCTION 739 or isinstance(this, exp.UserDefinedFunction) 740 or self._match(TokenType.ALIAS, advance=False) 741 ): 742 return this 743 744 if not self._match(TokenType.WITH, advance=False): 745 expressions = self._parse_csv(self._parse_function_parameter) 746 else: 747 expressions = None 748 749 return self.expression(exp.UserDefinedFunction, this=this, expressions=expressions) 750 751 def _parse_id_var( 752 self, 753 any_token: bool = True, 754 tokens: t.Optional[t.Collection[TokenType]] = None, 755 ) -> t.Optional[exp.Expression]: 756 is_temporary = self._match(TokenType.HASH) 757 is_global = is_temporary and self._match(TokenType.HASH) 758 759 this = super()._parse_id_var(any_token=any_token, tokens=tokens) 760 if this: 761 if is_global: 762 this.set("global", True) 763 elif is_temporary: 764 this.set("temporary", True) 765 766 return this 767 768 def _parse_create(self) -> exp.Create | exp.Command: 769 create = super()._parse_create() 770 771 if isinstance(create, exp.Create): 772 table = create.this.this if isinstance(create.this, exp.Schema) else create.this 773 if isinstance(table, exp.Table) and table.this and table.this.args.get("temporary"): 774 if not create.args.get("properties"): 775 create.set("properties", exp.Properties(expressions=[])) 776 777 create.args["properties"].append("expressions", exp.TemporaryProperty()) 778 779 return create 780 781 def _parse_if(self) -> t.Optional[exp.Expression]: 782 index = self._index 783 784 if self._match_text_seq("OBJECT_ID"): 785 self._parse_wrapped_csv(self._parse_string) 786 if self._match_text_seq("IS", "NOT", "NULL") and self._match(TokenType.DROP): 787 return self._parse_drop(exists=True) 788 self._retreat(index) 789 790 return super()._parse_if() 791 792 def _parse_unique(self) -> exp.UniqueColumnConstraint: 793 if self._match_texts(("CLUSTERED", "NONCLUSTERED")): 794 this = self.CONSTRAINT_PARSERS[self._prev.text.upper()](self) 795 else: 796 this = self._parse_schema(self._parse_id_var(any_token=False)) 797 798 return self.expression(exp.UniqueColumnConstraint, this=this) 799 800 def _parse_partition(self) -> t.Optional[exp.Partition]: 801 if not self._match_text_seq("WITH", "(", "PARTITIONS"): 802 return None 803 804 def parse_range(): 805 low = self._parse_bitwise() 806 high = self._parse_bitwise() if self._match_text_seq("TO") else None 807 808 return ( 809 self.expression(exp.PartitionRange, this=low, expression=high) if high else low 810 ) 811 812 partition = self.expression( 813 exp.Partition, expressions=self._parse_wrapped_csv(parse_range) 814 ) 815 816 self._match_r_paren() 817 818 return partition 819 820 def _parse_declare(self) -> exp.Declare | exp.Command: 821 index = self._index 822 expressions = self._try_parse(partial(self._parse_csv, self._parse_declareitem)) 823 824 if not expressions or self._curr: 825 self._retreat(index) 826 return self._parse_as_command(self._prev) 827 828 return self.expression(exp.Declare, expressions=expressions) 829 830 def _parse_declareitem(self) -> t.Optional[exp.DeclareItem]: 831 var = self._parse_id_var() 832 if not var: 833 return None 834 835 value = None 836 self._match(TokenType.ALIAS) 837 if self._match(TokenType.TABLE): 838 data_type = self._parse_schema() 839 else: 840 data_type = self._parse_types() 841 if self._match(TokenType.EQ): 842 value = self._parse_bitwise() 843 844 return self.expression(exp.DeclareItem, this=var, kind=data_type, default=value) 845 846 class Generator(generator.Generator): 847 LIMIT_IS_TOP = True 848 QUERY_HINTS = False 849 RETURNING_END = False 850 NVL2_SUPPORTED = False 851 ALTER_TABLE_INCLUDE_COLUMN_KEYWORD = False 852 LIMIT_FETCH = "FETCH" 853 COMPUTED_COLUMN_WITH_TYPE = False 854 CTE_RECURSIVE_KEYWORD_REQUIRED = False 855 ENSURE_BOOLS = True 856 NULL_ORDERING_SUPPORTED = None 857 SUPPORTS_SINGLE_ARG_CONCAT = False 858 TABLESAMPLE_SEED_KEYWORD = "REPEATABLE" 859 SUPPORTS_SELECT_INTO = True 860 JSON_PATH_BRACKETED_KEY_SUPPORTED = False 861 SUPPORTS_TO_NUMBER = False 862 SET_OP_MODIFIERS = False 863 COPY_PARAMS_EQ_REQUIRED = True 864 PARSE_JSON_NAME = None 865 EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE = False 866 867 EXPRESSIONS_WITHOUT_NESTED_CTES = { 868 exp.Create, 869 exp.Delete, 870 exp.Insert, 871 exp.Intersect, 872 exp.Except, 873 exp.Merge, 874 exp.Select, 875 exp.Subquery, 876 exp.Union, 877 exp.Update, 878 } 879 880 SUPPORTED_JSON_PATH_PARTS = { 881 exp.JSONPathKey, 882 exp.JSONPathRoot, 883 exp.JSONPathSubscript, 884 } 885 886 TYPE_MAPPING = { 887 **generator.Generator.TYPE_MAPPING, 888 exp.DataType.Type.BOOLEAN: "BIT", 889 exp.DataType.Type.DATETIME2: "DATETIME2", 890 exp.DataType.Type.DECIMAL: "NUMERIC", 891 exp.DataType.Type.DOUBLE: "FLOAT", 892 exp.DataType.Type.INT: "INTEGER", 893 exp.DataType.Type.ROWVERSION: "ROWVERSION", 894 exp.DataType.Type.TEXT: "VARCHAR(MAX)", 895 exp.DataType.Type.TIMESTAMP: "DATETIME2", 896 exp.DataType.Type.TIMESTAMPNTZ: "DATETIME2", 897 exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET", 898 exp.DataType.Type.SMALLDATETIME: "SMALLDATETIME", 899 exp.DataType.Type.UTINYINT: "TINYINT", 900 exp.DataType.Type.VARIANT: "SQL_VARIANT", 901 } 902 903 TYPE_MAPPING.pop(exp.DataType.Type.NCHAR) 904 TYPE_MAPPING.pop(exp.DataType.Type.NVARCHAR) 905 906 TRANSFORMS = { 907 **generator.Generator.TRANSFORMS, 908 exp.AnyValue: any_value_to_max_sql, 909 exp.ArrayToString: rename_func("STRING_AGG"), 910 exp.AutoIncrementColumnConstraint: lambda *_: "IDENTITY", 911 exp.Chr: rename_func("CHAR"), 912 exp.DateAdd: date_delta_sql("DATEADD"), 913 exp.DateDiff: date_delta_sql("DATEDIFF"), 914 exp.CTE: transforms.preprocess([qualify_derived_table_outputs]), 915 exp.CurrentDate: rename_func("GETDATE"), 916 exp.CurrentTimestamp: rename_func("GETDATE"), 917 exp.DateStrToDate: datestrtodate_sql, 918 exp.Extract: rename_func("DATEPART"), 919 exp.GeneratedAsIdentityColumnConstraint: generatedasidentitycolumnconstraint_sql, 920 exp.GroupConcat: _string_agg_sql, 921 exp.If: rename_func("IIF"), 922 exp.JSONExtract: _json_extract_sql, 923 exp.JSONExtractScalar: _json_extract_sql, 924 exp.LastDay: lambda self, e: self.func("EOMONTH", e.this), 925 exp.Ln: rename_func("LOG"), 926 exp.Max: max_or_greatest, 927 exp.MD5: lambda self, e: self.func("HASHBYTES", exp.Literal.string("MD5"), e.this), 928 exp.Min: min_or_least, 929 exp.NumberToStr: _format_sql, 930 exp.Repeat: rename_func("REPLICATE"), 931 exp.Select: transforms.preprocess( 932 [ 933 transforms.eliminate_distinct_on, 934 transforms.eliminate_semi_and_anti_joins, 935 transforms.eliminate_qualify, 936 transforms.unnest_generate_date_array_using_recursive_cte, 937 ] 938 ), 939 exp.Stddev: rename_func("STDEV"), 940 exp.StrPosition: lambda self, e: strposition_sql( 941 self, e, func_name="CHARINDEX", supports_position=True 942 ), 943 exp.Subquery: transforms.preprocess([qualify_derived_table_outputs]), 944 exp.SHA: lambda self, e: self.func("HASHBYTES", exp.Literal.string("SHA1"), e.this), 945 exp.SHA2: lambda self, e: self.func( 946 "HASHBYTES", exp.Literal.string(f"SHA2_{e.args.get('length', 256)}"), e.this 947 ), 948 exp.TemporaryProperty: lambda self, e: "", 949 exp.TimeStrToTime: _timestrtotime_sql, 950 exp.TimeToStr: _format_sql, 951 exp.Trim: trim_sql, 952 exp.TsOrDsAdd: date_delta_sql("DATEADD", cast=True), 953 exp.TsOrDsDiff: date_delta_sql("DATEDIFF"), 954 exp.TimestampTrunc: lambda self, e: self.func("DATETRUNC", e.unit, e.this), 955 exp.DateFromParts: rename_func("DATEFROMPARTS"), 956 } 957 958 TRANSFORMS.pop(exp.ReturnsProperty) 959 960 PROPERTIES_LOCATION = { 961 **generator.Generator.PROPERTIES_LOCATION, 962 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 963 } 964 965 def scope_resolution(self, rhs: str, scope_name: str) -> str: 966 return f"{scope_name}::{rhs}" 967 968 def select_sql(self, expression: exp.Select) -> str: 969 if expression.args.get("offset"): 970 if not expression.args.get("order"): 971 # ORDER BY is required in order to use OFFSET in a query, so we use 972 # a noop order by, since we don't really care about the order. 973 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 974 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 975 976 limit = expression.args.get("limit") 977 if isinstance(limit, exp.Limit): 978 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 979 # we replace here because otherwise TOP would be generated in select_sql 980 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 981 982 return super().select_sql(expression) 983 984 def convert_sql(self, expression: exp.Convert) -> str: 985 name = "TRY_CONVERT" if expression.args.get("safe") else "CONVERT" 986 return self.func( 987 name, expression.this, expression.expression, expression.args.get("style") 988 ) 989 990 def queryoption_sql(self, expression: exp.QueryOption) -> str: 991 option = self.sql(expression, "this") 992 value = self.sql(expression, "expression") 993 if value: 994 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 995 return f"{option} {optional_equal_sign}{value}" 996 return option 997 998 def lateral_op(self, expression: exp.Lateral) -> str: 999 cross_apply = expression.args.get("cross_apply") 1000 if cross_apply is True: 1001 return "CROSS APPLY" 1002 if cross_apply is False: 1003 return "OUTER APPLY" 1004 1005 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 1006 self.unsupported("LATERAL clause is not supported.") 1007 return "LATERAL" 1008 1009 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 1010 this = expression.this 1011 split_count = len(this.name.split(".")) 1012 delimiter = expression.args.get("delimiter") 1013 part_index = expression.args.get("part_index") 1014 1015 if ( 1016 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 1017 or (delimiter and delimiter.name != ".") 1018 or not part_index 1019 or split_count > 4 1020 ): 1021 self.unsupported( 1022 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 1023 ) 1024 return "" 1025 1026 return self.func( 1027 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 1028 ) 1029 1030 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1031 nano = expression.args.get("nano") 1032 if nano is not None: 1033 nano.pop() 1034 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1035 1036 if expression.args.get("fractions") is None: 1037 expression.set("fractions", exp.Literal.number(0)) 1038 if expression.args.get("precision") is None: 1039 expression.set("precision", exp.Literal.number(0)) 1040 1041 return rename_func("TIMEFROMPARTS")(self, expression) 1042 1043 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1044 zone = expression.args.get("zone") 1045 if zone is not None: 1046 zone.pop() 1047 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1048 1049 nano = expression.args.get("nano") 1050 if nano is not None: 1051 nano.pop() 1052 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1053 1054 if expression.args.get("milli") is None: 1055 expression.set("milli", exp.Literal.number(0)) 1056 1057 return rename_func("DATETIMEFROMPARTS")(self, expression) 1058 1059 def setitem_sql(self, expression: exp.SetItem) -> str: 1060 this = expression.this 1061 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1062 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1063 return f"{self.sql(this.left)} {self.sql(this.right)}" 1064 1065 return super().setitem_sql(expression) 1066 1067 def boolean_sql(self, expression: exp.Boolean) -> str: 1068 if type(expression.parent) in BIT_TYPES or isinstance( 1069 expression.find_ancestor(exp.Values, exp.Select), exp.Values 1070 ): 1071 return "1" if expression.this else "0" 1072 1073 return "(1 = 1)" if expression.this else "(1 = 0)" 1074 1075 def is_sql(self, expression: exp.Is) -> str: 1076 if isinstance(expression.expression, exp.Boolean): 1077 return self.binary(expression, "=") 1078 return self.binary(expression, "IS") 1079 1080 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1081 sql = self.sql(expression, "this") 1082 properties = expression.args.get("properties") 1083 1084 if sql[:1] != "#" and any( 1085 isinstance(prop, exp.TemporaryProperty) 1086 for prop in (properties.expressions if properties else []) 1087 ): 1088 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1089 1090 return sql 1091 1092 def create_sql(self, expression: exp.Create) -> str: 1093 kind = expression.kind 1094 exists = expression.args.pop("exists", None) 1095 1096 like_property = expression.find(exp.LikeProperty) 1097 if like_property: 1098 ctas_expression = like_property.this 1099 else: 1100 ctas_expression = expression.expression 1101 1102 if kind == "VIEW": 1103 expression.this.set("catalog", None) 1104 with_ = expression.args.get("with") 1105 if ctas_expression and with_: 1106 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1107 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1108 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1109 ctas_expression.set("with", with_.pop()) 1110 1111 sql = super().create_sql(expression) 1112 1113 table = expression.find(exp.Table) 1114 1115 # Convert CTAS statement to SELECT .. INTO .. 1116 if kind == "TABLE" and ctas_expression: 1117 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1118 ctas_expression = ctas_expression.subquery() 1119 1120 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1121 select_into.set("into", exp.Into(this=table)) 1122 1123 if like_property: 1124 select_into.limit(0, copy=False) 1125 1126 sql = self.sql(select_into) 1127 1128 if exists: 1129 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1130 sql_with_ctes = self.prepend_ctes(expression, sql) 1131 sql_literal = self.sql(exp.Literal.string(sql_with_ctes)) 1132 if kind == "SCHEMA": 1133 return f"""IF NOT EXISTS (SELECT * FROM information_schema.schemata WHERE schema_name = {identifier}) EXEC({sql_literal})""" 1134 elif kind == "TABLE": 1135 assert table 1136 where = exp.and_( 1137 exp.column("table_name").eq(table.name), 1138 exp.column("table_schema").eq(table.db) if table.db else None, 1139 exp.column("table_catalog").eq(table.catalog) if table.catalog else None, 1140 ) 1141 return f"""IF NOT EXISTS (SELECT * FROM information_schema.tables WHERE {where}) EXEC({sql_literal})""" 1142 elif kind == "INDEX": 1143 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1144 return f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql_literal})""" 1145 elif expression.args.get("replace"): 1146 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1147 1148 return self.prepend_ctes(expression, sql) 1149 1150 def count_sql(self, expression: exp.Count) -> str: 1151 func_name = "COUNT_BIG" if expression.args.get("big_int") else "COUNT" 1152 return rename_func(func_name)(self, expression) 1153 1154 def offset_sql(self, expression: exp.Offset) -> str: 1155 return f"{super().offset_sql(expression)} ROWS" 1156 1157 def version_sql(self, expression: exp.Version) -> str: 1158 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1159 this = f"FOR {name}" 1160 expr = expression.expression 1161 kind = expression.text("kind") 1162 if kind in ("FROM", "BETWEEN"): 1163 args = expr.expressions 1164 sep = "TO" if kind == "FROM" else "AND" 1165 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1166 else: 1167 expr_sql = self.sql(expr) 1168 1169 expr_sql = f" {expr_sql}" if expr_sql else "" 1170 return f"{this} {kind}{expr_sql}" 1171 1172 def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str: 1173 table = expression.args.get("table") 1174 table = f"{table} " if table else "" 1175 return f"RETURNS {table}{self.sql(expression, 'this')}" 1176 1177 def returning_sql(self, expression: exp.Returning) -> str: 1178 into = self.sql(expression, "into") 1179 into = self.seg(f"INTO {into}") if into else "" 1180 return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}" 1181 1182 def transaction_sql(self, expression: exp.Transaction) -> str: 1183 this = self.sql(expression, "this") 1184 this = f" {this}" if this else "" 1185 mark = self.sql(expression, "mark") 1186 mark = f" WITH MARK {mark}" if mark else "" 1187 return f"BEGIN TRANSACTION{this}{mark}" 1188 1189 def commit_sql(self, expression: exp.Commit) -> str: 1190 this = self.sql(expression, "this") 1191 this = f" {this}" if this else "" 1192 durability = expression.args.get("durability") 1193 durability = ( 1194 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1195 if durability is not None 1196 else "" 1197 ) 1198 return f"COMMIT TRANSACTION{this}{durability}" 1199 1200 def rollback_sql(self, expression: exp.Rollback) -> str: 1201 this = self.sql(expression, "this") 1202 this = f" {this}" if this else "" 1203 return f"ROLLBACK TRANSACTION{this}" 1204 1205 def identifier_sql(self, expression: exp.Identifier) -> str: 1206 identifier = super().identifier_sql(expression) 1207 1208 if expression.args.get("global"): 1209 identifier = f"##{identifier}" 1210 elif expression.args.get("temporary"): 1211 identifier = f"#{identifier}" 1212 1213 return identifier 1214 1215 def constraint_sql(self, expression: exp.Constraint) -> str: 1216 this = self.sql(expression, "this") 1217 expressions = self.expressions(expression, flat=True, sep=" ") 1218 return f"CONSTRAINT {this} {expressions}" 1219 1220 def length_sql(self, expression: exp.Length) -> str: 1221 return self._uncast_text(expression, "LEN") 1222 1223 def right_sql(self, expression: exp.Right) -> str: 1224 return self._uncast_text(expression, "RIGHT") 1225 1226 def left_sql(self, expression: exp.Left) -> str: 1227 return self._uncast_text(expression, "LEFT") 1228 1229 def _uncast_text(self, expression: exp.Expression, name: str) -> str: 1230 this = expression.this 1231 if isinstance(this, exp.Cast) and this.is_type(exp.DataType.Type.TEXT): 1232 this_sql = self.sql(this, "this") 1233 else: 1234 this_sql = self.sql(this) 1235 expression_sql = self.sql(expression, "expression") 1236 return self.func(name, this_sql, expression_sql if expression_sql else None) 1237 1238 def partition_sql(self, expression: exp.Partition) -> str: 1239 return f"WITH (PARTITIONS({self.expressions(expression, flat=True)}))" 1240 1241 def alter_sql(self, expression: exp.Alter) -> str: 1242 action = seq_get(expression.args.get("actions") or [], 0) 1243 if isinstance(action, exp.AlterRename): 1244 return f"EXEC sp_rename '{self.sql(expression.this)}', '{action.this.name}'" 1245 return super().alter_sql(expression) 1246 1247 def drop_sql(self, expression: exp.Drop) -> str: 1248 if expression.args["kind"] == "VIEW": 1249 expression.this.set("catalog", None) 1250 return super().drop_sql(expression) 1251 1252 def declare_sql(self, expression: exp.Declare) -> str: 1253 return f"DECLARE {self.expressions(expression, flat=True)}" 1254 1255 def declareitem_sql(self, expression: exp.DeclareItem) -> str: 1256 variable = self.sql(expression, "this") 1257 default = self.sql(expression, "default") 1258 default = f" = {default}" if default else "" 1259 1260 kind = self.sql(expression, "kind") 1261 if isinstance(expression.args.get("kind"), exp.Schema): 1262 kind = f"TABLE {kind}" 1263 1264 return f"{variable} AS {kind}{default}" 1265 1266 def options_modifier(self, expression: exp.Expression) -> str: 1267 options = self.expressions(expression, key="options") 1268 return f" OPTION{self.wrap(options)}" if options else "" 1269 1270 def dpipe_sql(self, expression: exp.DPipe) -> str: 1271 return self.sql( 1272 reduce(lambda x, y: exp.Add(this=x, expression=y), expression.flatten()) 1273 ) 1274 1275 def isascii_sql(self, expression: exp.IsAscii) -> str: 1276 return f"(PATINDEX(CONVERT(VARCHAR(MAX), 0x255b5e002d7f5d25) COLLATE Latin1_General_BIN, {self.sql(expression.this)}) = 0)"
Whether the base comes first in the LOG
function.
Possible values: True
, False
, None
(two arguments are not supported by LOG
)
Whether the behavior of a / b
depends on the types of a
and b
.
False means a / b
is always float division.
True means a / b
is integer division if both a
and b
are integers.
A NULL
arg in CONCAT
yields NULL
by default, but in some dialects it yields an empty string.
Specifies the strategy according to which identifiers should be normalized.
Associates this dialect's time formats with their equivalent Python strftime
formats.
Inherited Members
- sqlglot.dialects.dialect.Dialect
- Dialect
- INDEX_OFFSET
- WEEK_OFFSET
- UNNEST_COLUMN_ONLY
- ALIAS_POST_TABLESAMPLE
- TABLESAMPLE_SIZE_IS_PERCENT
- IDENTIFIERS_CAN_START_WITH_DIGIT
- DPIPE_IS_STRING_CONCAT
- STRICT_STRING_CONCAT
- SUPPORTS_USER_DEFINED_TYPES
- COPY_PARAMS_ARE_CSV
- NORMALIZE_FUNCTIONS
- PRESERVE_ORIGINAL_NAMES
- NULL_ORDERING
- SAFE_DIVISION
- HEX_LOWERCASE
- DATE_FORMAT
- DATEINT_FORMAT
- FORMAT_MAPPING
- UNESCAPED_SEQUENCES
- PSEUDOCOLUMNS
- PREFER_CTE_ALIAS_COLUMN
- FORCE_EARLY_ALIAS_REF_EXPANSION
- EXPAND_ALIAS_REFS_EARLY_ONLY_IN_GROUP_BY
- SUPPORTS_ORDER_BY_ALL
- HAS_DISTINCT_ARRAY_CONSTRUCTORS
- SUPPORTS_FIXED_SIZE_ARRAYS
- STRICT_JSON_PATH_SYNTAX
- ON_CONDITION_EMPTY_BEFORE_ERROR
- ARRAY_AGG_INCLUDES_NULLS
- PROMOTE_TO_INFERRED_DATETIME_TYPE
- SUPPORTS_VALUES_DEFAULT
- NUMBERS_CAN_BE_UNDERSCORE_SEPARATED
- REGEXP_EXTRACT_DEFAULT_GROUP
- SET_OP_DISTINCT_BY_DEFAULT
- CREATABLE_KIND_MAPPING
- DATE_PART_MAPPING
- TYPE_TO_EXPRESSIONS
- ANNOTATORS
- get_or_raise
- format_time
- settings
- normalize_identifier
- case_sensitive
- can_identify
- quote_identifier
- to_json_path
- parse
- parse_into
- generate
- transpile
- tokenize
- tokenizer
- jsonpath_tokenizer
- parser
- generator
498 class Tokenizer(tokens.Tokenizer): 499 IDENTIFIERS = [("[", "]"), '"'] 500 QUOTES = ["'", '"'] 501 HEX_STRINGS = [("0x", ""), ("0X", "")] 502 VAR_SINGLE_TOKENS = {"@", "$", "#"} 503 504 KEYWORDS = { 505 **tokens.Tokenizer.KEYWORDS, 506 "CLUSTERED INDEX": TokenType.INDEX, 507 "DATETIME2": TokenType.DATETIME2, 508 "DATETIMEOFFSET": TokenType.TIMESTAMPTZ, 509 "DECLARE": TokenType.DECLARE, 510 "EXEC": TokenType.COMMAND, 511 "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT, 512 "IMAGE": TokenType.IMAGE, 513 "MONEY": TokenType.MONEY, 514 "NONCLUSTERED INDEX": TokenType.INDEX, 515 "NTEXT": TokenType.TEXT, 516 "OPTION": TokenType.OPTION, 517 "OUTPUT": TokenType.RETURNING, 518 "PRINT": TokenType.COMMAND, 519 "PROC": TokenType.PROCEDURE, 520 "REAL": TokenType.FLOAT, 521 "ROWVERSION": TokenType.ROWVERSION, 522 "SMALLDATETIME": TokenType.SMALLDATETIME, 523 "SMALLMONEY": TokenType.SMALLMONEY, 524 "SQL_VARIANT": TokenType.VARIANT, 525 "SYSTEM_USER": TokenType.CURRENT_USER, 526 "TOP": TokenType.TOP, 527 "TIMESTAMP": TokenType.ROWVERSION, 528 "TINYINT": TokenType.UTINYINT, 529 "UNIQUEIDENTIFIER": TokenType.UNIQUEIDENTIFIER, 530 "UPDATE STATISTICS": TokenType.COMMAND, 531 "XML": TokenType.XML, 532 } 533 KEYWORDS.pop("/*+") 534 535 COMMANDS = {*tokens.Tokenizer.COMMANDS, TokenType.END}
Inherited Members
- sqlglot.tokens.Tokenizer
- Tokenizer
- SINGLE_TOKENS
- BIT_STRINGS
- BYTE_STRINGS
- RAW_STRINGS
- HEREDOC_STRINGS
- UNICODE_STRINGS
- STRING_ESCAPES
- IDENTIFIER_ESCAPES
- HEREDOC_TAG_IS_IDENTIFIER
- HEREDOC_STRING_ALTERNATIVE
- STRING_ESCAPES_ALLOWED_IN_RAW_STRINGS
- NESTED_COMMENTS
- HINT_START
- TOKENS_PRECEDING_HINT
- WHITE_SPACE
- COMMAND_PREFIX_TOKENS
- NUMERIC_LITERALS
- COMMENTS
- dialect
- use_rs_tokenizer
- reset
- tokenize
- tokenize_rs
- size
- sql
- tokens
537 class Parser(parser.Parser): 538 SET_REQUIRES_ASSIGNMENT_DELIMITER = False 539 LOG_DEFAULTS_TO_LN = True 540 ALTER_TABLE_ADD_REQUIRED_FOR_EACH_COLUMN = False 541 STRING_ALIASES = True 542 NO_PAREN_IF_COMMANDS = False 543 544 QUERY_MODIFIER_PARSERS = { 545 **parser.Parser.QUERY_MODIFIER_PARSERS, 546 TokenType.OPTION: lambda self: ("options", self._parse_options()), 547 } 548 549 FUNCTIONS = { 550 **parser.Parser.FUNCTIONS, 551 "CHARINDEX": lambda args: exp.StrPosition( 552 this=seq_get(args, 1), 553 substr=seq_get(args, 0), 554 position=seq_get(args, 2), 555 ), 556 "COUNT": lambda args: exp.Count( 557 this=seq_get(args, 0), expressions=args[1:], big_int=False 558 ), 559 "COUNT_BIG": lambda args: exp.Count( 560 this=seq_get(args, 0), expressions=args[1:], big_int=True 561 ), 562 "DATEADD": build_date_delta(exp.DateAdd, unit_mapping=DATE_DELTA_INTERVAL), 563 "DATEDIFF": _build_date_delta(exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL), 564 "DATENAME": _build_formatted_time(exp.TimeToStr, full_format_mapping=True), 565 "DATEPART": _build_formatted_time(exp.TimeToStr), 566 "DATETIMEFROMPARTS": _build_datetimefromparts, 567 "EOMONTH": _build_eomonth, 568 "FORMAT": _build_format, 569 "GETDATE": exp.CurrentTimestamp.from_arg_list, 570 "HASHBYTES": _build_hashbytes, 571 "ISNULL": build_coalesce, 572 "JSON_QUERY": _build_json_query, 573 "JSON_VALUE": parser.build_extract_json_with_path(exp.JSONExtractScalar), 574 "LEN": _build_with_arg_as_text(exp.Length), 575 "LEFT": _build_with_arg_as_text(exp.Left), 576 "RIGHT": _build_with_arg_as_text(exp.Right), 577 "PARSENAME": _build_parsename, 578 "REPLICATE": exp.Repeat.from_arg_list, 579 "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)), 580 "SYSDATETIME": exp.CurrentTimestamp.from_arg_list, 581 "SUSER_NAME": exp.CurrentUser.from_arg_list, 582 "SUSER_SNAME": exp.CurrentUser.from_arg_list, 583 "SYSTEM_USER": exp.CurrentUser.from_arg_list, 584 "TIMEFROMPARTS": _build_timefromparts, 585 "DATETRUNC": _build_datetrunc, 586 } 587 588 JOIN_HINTS = {"LOOP", "HASH", "MERGE", "REMOTE"} 589 590 PROCEDURE_OPTIONS = dict.fromkeys( 591 ("ENCRYPTION", "RECOMPILE", "SCHEMABINDING", "NATIVE_COMPILATION", "EXECUTE"), tuple() 592 ) 593 594 RETURNS_TABLE_TOKENS = parser.Parser.ID_VAR_TOKENS - { 595 TokenType.TABLE, 596 *parser.Parser.TYPE_TOKENS, 597 } 598 599 STATEMENT_PARSERS = { 600 **parser.Parser.STATEMENT_PARSERS, 601 TokenType.DECLARE: lambda self: self._parse_declare(), 602 } 603 604 RANGE_PARSERS = { 605 **parser.Parser.RANGE_PARSERS, 606 TokenType.DCOLON: lambda self, this: self.expression( 607 exp.ScopeResolution, 608 this=this, 609 expression=self._parse_function() or self._parse_var(any_token=True), 610 ), 611 } 612 613 NO_PAREN_FUNCTION_PARSERS = { 614 **parser.Parser.NO_PAREN_FUNCTION_PARSERS, 615 "NEXT": lambda self: self._parse_next_value_for(), 616 } 617 618 # The DCOLON (::) operator serves as a scope resolution (exp.ScopeResolution) operator in T-SQL 619 COLUMN_OPERATORS = { 620 **parser.Parser.COLUMN_OPERATORS, 621 TokenType.DCOLON: lambda self, this, to: self.expression(exp.Cast, this=this, to=to) 622 if isinstance(to, exp.DataType) and to.this != exp.DataType.Type.USERDEFINED 623 else self.expression(exp.ScopeResolution, this=this, expression=to), 624 } 625 626 def _parse_dcolon(self) -> t.Optional[exp.Expression]: 627 # We want to use _parse_types() if the first token after :: is a known type, 628 # otherwise we could parse something like x::varchar(max) into a function 629 if self._match_set(self.TYPE_TOKENS, advance=False): 630 return self._parse_types() 631 632 return self._parse_function() or self._parse_types() 633 634 def _parse_options(self) -> t.Optional[t.List[exp.Expression]]: 635 if not self._match(TokenType.OPTION): 636 return None 637 638 def _parse_option() -> t.Optional[exp.Expression]: 639 option = self._parse_var_from_options(OPTIONS) 640 if not option: 641 return None 642 643 self._match(TokenType.EQ) 644 return self.expression( 645 exp.QueryOption, this=option, expression=self._parse_primary_or_var() 646 ) 647 648 return self._parse_wrapped_csv(_parse_option) 649 650 def _parse_projections(self) -> t.List[exp.Expression]: 651 """ 652 T-SQL supports the syntax alias = expression in the SELECT's projection list, 653 so we transform all parsed Selects to convert their EQ projections into Aliases. 654 655 See: https://learn.microsoft.com/en-us/sql/t-sql/queries/select-clause-transact-sql?view=sql-server-ver16#syntax 656 """ 657 return [ 658 ( 659 exp.alias_(projection.expression, projection.this.this, copy=False) 660 if isinstance(projection, exp.EQ) and isinstance(projection.this, exp.Column) 661 else projection 662 ) 663 for projection in super()._parse_projections() 664 ] 665 666 def _parse_commit_or_rollback(self) -> exp.Commit | exp.Rollback: 667 """Applies to SQL Server and Azure SQL Database 668 COMMIT [ { TRAN | TRANSACTION } 669 [ transaction_name | @tran_name_variable ] ] 670 [ WITH ( DELAYED_DURABILITY = { OFF | ON } ) ] 671 672 ROLLBACK { TRAN | TRANSACTION } 673 [ transaction_name | @tran_name_variable 674 | savepoint_name | @savepoint_variable ] 675 """ 676 rollback = self._prev.token_type == TokenType.ROLLBACK 677 678 self._match_texts(("TRAN", "TRANSACTION")) 679 this = self._parse_id_var() 680 681 if rollback: 682 return self.expression(exp.Rollback, this=this) 683 684 durability = None 685 if self._match_pair(TokenType.WITH, TokenType.L_PAREN): 686 self._match_text_seq("DELAYED_DURABILITY") 687 self._match(TokenType.EQ) 688 689 if self._match_text_seq("OFF"): 690 durability = False 691 else: 692 self._match(TokenType.ON) 693 durability = True 694 695 self._match_r_paren() 696 697 return self.expression(exp.Commit, this=this, durability=durability) 698 699 def _parse_transaction(self) -> exp.Transaction | exp.Command: 700 """Applies to SQL Server and Azure SQL Database 701 BEGIN { TRAN | TRANSACTION } 702 [ { transaction_name | @tran_name_variable } 703 [ WITH MARK [ 'description' ] ] 704 ] 705 """ 706 if self._match_texts(("TRAN", "TRANSACTION")): 707 transaction = self.expression(exp.Transaction, this=self._parse_id_var()) 708 if self._match_text_seq("WITH", "MARK"): 709 transaction.set("mark", self._parse_string()) 710 711 return transaction 712 713 return self._parse_as_command(self._prev) 714 715 def _parse_returns(self) -> exp.ReturnsProperty: 716 table = self._parse_id_var(any_token=False, tokens=self.RETURNS_TABLE_TOKENS) 717 returns = super()._parse_returns() 718 returns.set("table", table) 719 return returns 720 721 def _parse_convert( 722 self, strict: bool, safe: t.Optional[bool] = None 723 ) -> t.Optional[exp.Expression]: 724 this = self._parse_types() 725 self._match(TokenType.COMMA) 726 args = [this, *self._parse_csv(self._parse_assignment)] 727 convert = exp.Convert.from_arg_list(args) 728 convert.set("safe", safe) 729 convert.set("strict", strict) 730 return convert 731 732 def _parse_user_defined_function( 733 self, kind: t.Optional[TokenType] = None 734 ) -> t.Optional[exp.Expression]: 735 this = super()._parse_user_defined_function(kind=kind) 736 737 if ( 738 kind == TokenType.FUNCTION 739 or isinstance(this, exp.UserDefinedFunction) 740 or self._match(TokenType.ALIAS, advance=False) 741 ): 742 return this 743 744 if not self._match(TokenType.WITH, advance=False): 745 expressions = self._parse_csv(self._parse_function_parameter) 746 else: 747 expressions = None 748 749 return self.expression(exp.UserDefinedFunction, this=this, expressions=expressions) 750 751 def _parse_id_var( 752 self, 753 any_token: bool = True, 754 tokens: t.Optional[t.Collection[TokenType]] = None, 755 ) -> t.Optional[exp.Expression]: 756 is_temporary = self._match(TokenType.HASH) 757 is_global = is_temporary and self._match(TokenType.HASH) 758 759 this = super()._parse_id_var(any_token=any_token, tokens=tokens) 760 if this: 761 if is_global: 762 this.set("global", True) 763 elif is_temporary: 764 this.set("temporary", True) 765 766 return this 767 768 def _parse_create(self) -> exp.Create | exp.Command: 769 create = super()._parse_create() 770 771 if isinstance(create, exp.Create): 772 table = create.this.this if isinstance(create.this, exp.Schema) else create.this 773 if isinstance(table, exp.Table) and table.this and table.this.args.get("temporary"): 774 if not create.args.get("properties"): 775 create.set("properties", exp.Properties(expressions=[])) 776 777 create.args["properties"].append("expressions", exp.TemporaryProperty()) 778 779 return create 780 781 def _parse_if(self) -> t.Optional[exp.Expression]: 782 index = self._index 783 784 if self._match_text_seq("OBJECT_ID"): 785 self._parse_wrapped_csv(self._parse_string) 786 if self._match_text_seq("IS", "NOT", "NULL") and self._match(TokenType.DROP): 787 return self._parse_drop(exists=True) 788 self._retreat(index) 789 790 return super()._parse_if() 791 792 def _parse_unique(self) -> exp.UniqueColumnConstraint: 793 if self._match_texts(("CLUSTERED", "NONCLUSTERED")): 794 this = self.CONSTRAINT_PARSERS[self._prev.text.upper()](self) 795 else: 796 this = self._parse_schema(self._parse_id_var(any_token=False)) 797 798 return self.expression(exp.UniqueColumnConstraint, this=this) 799 800 def _parse_partition(self) -> t.Optional[exp.Partition]: 801 if not self._match_text_seq("WITH", "(", "PARTITIONS"): 802 return None 803 804 def parse_range(): 805 low = self._parse_bitwise() 806 high = self._parse_bitwise() if self._match_text_seq("TO") else None 807 808 return ( 809 self.expression(exp.PartitionRange, this=low, expression=high) if high else low 810 ) 811 812 partition = self.expression( 813 exp.Partition, expressions=self._parse_wrapped_csv(parse_range) 814 ) 815 816 self._match_r_paren() 817 818 return partition 819 820 def _parse_declare(self) -> exp.Declare | exp.Command: 821 index = self._index 822 expressions = self._try_parse(partial(self._parse_csv, self._parse_declareitem)) 823 824 if not expressions or self._curr: 825 self._retreat(index) 826 return self._parse_as_command(self._prev) 827 828 return self.expression(exp.Declare, expressions=expressions) 829 830 def _parse_declareitem(self) -> t.Optional[exp.DeclareItem]: 831 var = self._parse_id_var() 832 if not var: 833 return None 834 835 value = None 836 self._match(TokenType.ALIAS) 837 if self._match(TokenType.TABLE): 838 data_type = self._parse_schema() 839 else: 840 data_type = self._parse_types() 841 if self._match(TokenType.EQ): 842 value = self._parse_bitwise() 843 844 return self.expression(exp.DeclareItem, this=var, kind=data_type, default=value)
Parser consumes a list of tokens produced by the Tokenizer and produces a parsed syntax tree.
Arguments:
- error_level: The desired error level. Default: ErrorLevel.IMMEDIATE
- error_message_context: The amount of context to capture from a query string when displaying the error message (in number of characters). Default: 100
- max_errors: Maximum number of error messages to include in a raised ParseError. This is only relevant if error_level is ErrorLevel.RAISE. Default: 3
Inherited Members
- sqlglot.parser.Parser
- Parser
- NO_PAREN_FUNCTIONS
- STRUCT_TYPE_TOKENS
- NESTED_TYPE_TOKENS
- ENUM_TYPE_TOKENS
- AGGREGATE_TYPE_TOKENS
- TYPE_TOKENS
- SIGNED_TO_UNSIGNED_TYPE_TOKEN
- SUBQUERY_PREDICATES
- RESERVED_TOKENS
- DB_CREATABLES
- CREATABLES
- ALTERABLES
- INTERVAL_VARS
- ALIAS_TOKENS
- ARRAY_CONSTRUCTORS
- COMMENT_TABLE_ALIAS_TOKENS
- UPDATE_ALIAS_TOKENS
- TRIM_TYPES
- FUNC_TOKENS
- CONJUNCTION
- ASSIGNMENT
- DISJUNCTION
- EQUALITY
- COMPARISON
- BITWISE
- TERM
- FACTOR
- EXPONENT
- TIMES
- TIMESTAMPS
- SET_OPERATIONS
- JOIN_METHODS
- JOIN_SIDES
- JOIN_KINDS
- LAMBDAS
- EXPRESSION_PARSERS
- UNARY_PARSERS
- STRING_PARSERS
- NUMERIC_PARSERS
- PRIMARY_PARSERS
- PLACEHOLDER_PARSERS
- PROPERTY_PARSERS
- CONSTRAINT_PARSERS
- ALTER_PARSERS
- ALTER_ALTER_PARSERS
- SCHEMA_UNNAMED_CONSTRAINTS
- INVALID_FUNC_NAME_TOKENS
- FUNCTIONS_WITH_ALIASED_ARGS
- KEY_VALUE_DEFINITIONS
- FUNCTION_PARSERS
- SET_PARSERS
- SHOW_PARSERS
- TYPE_LITERAL_PARSERS
- TYPE_CONVERTERS
- DDL_SELECT_TOKENS
- PRE_VOLATILE_TOKENS
- TRANSACTION_KIND
- TRANSACTION_CHARACTERISTICS
- CONFLICT_ACTIONS
- CREATE_SEQUENCE
- ISOLATED_LOADING_OPTIONS
- USABLES
- CAST_ACTIONS
- SCHEMA_BINDING_OPTIONS
- EXECUTE_AS_OPTIONS
- KEY_CONSTRAINT_OPTIONS
- INSERT_ALTERNATIVES
- CLONE_KEYWORDS
- HISTORICAL_DATA_PREFIX
- HISTORICAL_DATA_KIND
- OPCLASS_FOLLOW_KEYWORDS
- OPTYPE_FOLLOW_TOKENS
- TABLE_INDEX_HINT_TOKENS
- VIEW_ATTRIBUTES
- WINDOW_ALIAS_TOKENS
- WINDOW_BEFORE_PAREN_TOKENS
- WINDOW_SIDES
- JSON_KEY_VALUE_SEPARATOR_TOKENS
- FETCH_TOKENS
- ADD_CONSTRAINT_TOKENS
- DISTINCT_TOKENS
- NULL_TOKENS
- UNNEST_OFFSET_ALIAS_TOKENS
- SELECT_START_TOKENS
- COPY_INTO_VARLEN_OPTIONS
- IS_JSON_PREDICATE_KIND
- ODBC_DATETIME_LITERALS
- ON_CONDITION_TOKENS
- PRIVILEGE_FOLLOW_TOKENS
- DESCRIBE_STYLES
- ANALYZE_STYLES
- ANALYZE_EXPRESSION_PARSERS
- PARTITION_KEYWORDS
- AMBIGUOUS_ALIAS_TOKENS
- OPERATION_MODIFIERS
- STRICT_CAST
- PREFIXED_PIVOT_COLUMNS
- IDENTIFY_PIVOT_STRINGS
- TABLESAMPLE_CSV
- DEFAULT_SAMPLING_METHOD
- TRIM_PATTERN_FIRST
- MODIFIERS_ATTACHED_TO_SET_OP
- SET_OP_MODIFIERS
- JSON_ARROWS_REQUIRE_JSON_TYPE
- COLON_IS_VARIANT_EXTRACT
- VALUES_FOLLOWED_BY_PAREN
- SUPPORTS_IMPLICIT_UNNEST
- INTERVAL_SPANS
- SUPPORTS_PARTITION_SELECTION
- WRAPPED_TRANSFORM_COLUMN_CONSTRAINT
- OPTIONAL_ALIAS_TOKEN_CTE
- error_level
- error_message_context
- max_errors
- dialect
- reset
- parse
- parse_into
- check_errors
- raise_error
- expression
- validate_expression
- errors
- sql
846 class Generator(generator.Generator): 847 LIMIT_IS_TOP = True 848 QUERY_HINTS = False 849 RETURNING_END = False 850 NVL2_SUPPORTED = False 851 ALTER_TABLE_INCLUDE_COLUMN_KEYWORD = False 852 LIMIT_FETCH = "FETCH" 853 COMPUTED_COLUMN_WITH_TYPE = False 854 CTE_RECURSIVE_KEYWORD_REQUIRED = False 855 ENSURE_BOOLS = True 856 NULL_ORDERING_SUPPORTED = None 857 SUPPORTS_SINGLE_ARG_CONCAT = False 858 TABLESAMPLE_SEED_KEYWORD = "REPEATABLE" 859 SUPPORTS_SELECT_INTO = True 860 JSON_PATH_BRACKETED_KEY_SUPPORTED = False 861 SUPPORTS_TO_NUMBER = False 862 SET_OP_MODIFIERS = False 863 COPY_PARAMS_EQ_REQUIRED = True 864 PARSE_JSON_NAME = None 865 EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE = False 866 867 EXPRESSIONS_WITHOUT_NESTED_CTES = { 868 exp.Create, 869 exp.Delete, 870 exp.Insert, 871 exp.Intersect, 872 exp.Except, 873 exp.Merge, 874 exp.Select, 875 exp.Subquery, 876 exp.Union, 877 exp.Update, 878 } 879 880 SUPPORTED_JSON_PATH_PARTS = { 881 exp.JSONPathKey, 882 exp.JSONPathRoot, 883 exp.JSONPathSubscript, 884 } 885 886 TYPE_MAPPING = { 887 **generator.Generator.TYPE_MAPPING, 888 exp.DataType.Type.BOOLEAN: "BIT", 889 exp.DataType.Type.DATETIME2: "DATETIME2", 890 exp.DataType.Type.DECIMAL: "NUMERIC", 891 exp.DataType.Type.DOUBLE: "FLOAT", 892 exp.DataType.Type.INT: "INTEGER", 893 exp.DataType.Type.ROWVERSION: "ROWVERSION", 894 exp.DataType.Type.TEXT: "VARCHAR(MAX)", 895 exp.DataType.Type.TIMESTAMP: "DATETIME2", 896 exp.DataType.Type.TIMESTAMPNTZ: "DATETIME2", 897 exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET", 898 exp.DataType.Type.SMALLDATETIME: "SMALLDATETIME", 899 exp.DataType.Type.UTINYINT: "TINYINT", 900 exp.DataType.Type.VARIANT: "SQL_VARIANT", 901 } 902 903 TYPE_MAPPING.pop(exp.DataType.Type.NCHAR) 904 TYPE_MAPPING.pop(exp.DataType.Type.NVARCHAR) 905 906 TRANSFORMS = { 907 **generator.Generator.TRANSFORMS, 908 exp.AnyValue: any_value_to_max_sql, 909 exp.ArrayToString: rename_func("STRING_AGG"), 910 exp.AutoIncrementColumnConstraint: lambda *_: "IDENTITY", 911 exp.Chr: rename_func("CHAR"), 912 exp.DateAdd: date_delta_sql("DATEADD"), 913 exp.DateDiff: date_delta_sql("DATEDIFF"), 914 exp.CTE: transforms.preprocess([qualify_derived_table_outputs]), 915 exp.CurrentDate: rename_func("GETDATE"), 916 exp.CurrentTimestamp: rename_func("GETDATE"), 917 exp.DateStrToDate: datestrtodate_sql, 918 exp.Extract: rename_func("DATEPART"), 919 exp.GeneratedAsIdentityColumnConstraint: generatedasidentitycolumnconstraint_sql, 920 exp.GroupConcat: _string_agg_sql, 921 exp.If: rename_func("IIF"), 922 exp.JSONExtract: _json_extract_sql, 923 exp.JSONExtractScalar: _json_extract_sql, 924 exp.LastDay: lambda self, e: self.func("EOMONTH", e.this), 925 exp.Ln: rename_func("LOG"), 926 exp.Max: max_or_greatest, 927 exp.MD5: lambda self, e: self.func("HASHBYTES", exp.Literal.string("MD5"), e.this), 928 exp.Min: min_or_least, 929 exp.NumberToStr: _format_sql, 930 exp.Repeat: rename_func("REPLICATE"), 931 exp.Select: transforms.preprocess( 932 [ 933 transforms.eliminate_distinct_on, 934 transforms.eliminate_semi_and_anti_joins, 935 transforms.eliminate_qualify, 936 transforms.unnest_generate_date_array_using_recursive_cte, 937 ] 938 ), 939 exp.Stddev: rename_func("STDEV"), 940 exp.StrPosition: lambda self, e: strposition_sql( 941 self, e, func_name="CHARINDEX", supports_position=True 942 ), 943 exp.Subquery: transforms.preprocess([qualify_derived_table_outputs]), 944 exp.SHA: lambda self, e: self.func("HASHBYTES", exp.Literal.string("SHA1"), e.this), 945 exp.SHA2: lambda self, e: self.func( 946 "HASHBYTES", exp.Literal.string(f"SHA2_{e.args.get('length', 256)}"), e.this 947 ), 948 exp.TemporaryProperty: lambda self, e: "", 949 exp.TimeStrToTime: _timestrtotime_sql, 950 exp.TimeToStr: _format_sql, 951 exp.Trim: trim_sql, 952 exp.TsOrDsAdd: date_delta_sql("DATEADD", cast=True), 953 exp.TsOrDsDiff: date_delta_sql("DATEDIFF"), 954 exp.TimestampTrunc: lambda self, e: self.func("DATETRUNC", e.unit, e.this), 955 exp.DateFromParts: rename_func("DATEFROMPARTS"), 956 } 957 958 TRANSFORMS.pop(exp.ReturnsProperty) 959 960 PROPERTIES_LOCATION = { 961 **generator.Generator.PROPERTIES_LOCATION, 962 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 963 } 964 965 def scope_resolution(self, rhs: str, scope_name: str) -> str: 966 return f"{scope_name}::{rhs}" 967 968 def select_sql(self, expression: exp.Select) -> str: 969 if expression.args.get("offset"): 970 if not expression.args.get("order"): 971 # ORDER BY is required in order to use OFFSET in a query, so we use 972 # a noop order by, since we don't really care about the order. 973 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 974 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 975 976 limit = expression.args.get("limit") 977 if isinstance(limit, exp.Limit): 978 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 979 # we replace here because otherwise TOP would be generated in select_sql 980 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 981 982 return super().select_sql(expression) 983 984 def convert_sql(self, expression: exp.Convert) -> str: 985 name = "TRY_CONVERT" if expression.args.get("safe") else "CONVERT" 986 return self.func( 987 name, expression.this, expression.expression, expression.args.get("style") 988 ) 989 990 def queryoption_sql(self, expression: exp.QueryOption) -> str: 991 option = self.sql(expression, "this") 992 value = self.sql(expression, "expression") 993 if value: 994 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 995 return f"{option} {optional_equal_sign}{value}" 996 return option 997 998 def lateral_op(self, expression: exp.Lateral) -> str: 999 cross_apply = expression.args.get("cross_apply") 1000 if cross_apply is True: 1001 return "CROSS APPLY" 1002 if cross_apply is False: 1003 return "OUTER APPLY" 1004 1005 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 1006 self.unsupported("LATERAL clause is not supported.") 1007 return "LATERAL" 1008 1009 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 1010 this = expression.this 1011 split_count = len(this.name.split(".")) 1012 delimiter = expression.args.get("delimiter") 1013 part_index = expression.args.get("part_index") 1014 1015 if ( 1016 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 1017 or (delimiter and delimiter.name != ".") 1018 or not part_index 1019 or split_count > 4 1020 ): 1021 self.unsupported( 1022 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 1023 ) 1024 return "" 1025 1026 return self.func( 1027 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 1028 ) 1029 1030 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1031 nano = expression.args.get("nano") 1032 if nano is not None: 1033 nano.pop() 1034 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1035 1036 if expression.args.get("fractions") is None: 1037 expression.set("fractions", exp.Literal.number(0)) 1038 if expression.args.get("precision") is None: 1039 expression.set("precision", exp.Literal.number(0)) 1040 1041 return rename_func("TIMEFROMPARTS")(self, expression) 1042 1043 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1044 zone = expression.args.get("zone") 1045 if zone is not None: 1046 zone.pop() 1047 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1048 1049 nano = expression.args.get("nano") 1050 if nano is not None: 1051 nano.pop() 1052 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1053 1054 if expression.args.get("milli") is None: 1055 expression.set("milli", exp.Literal.number(0)) 1056 1057 return rename_func("DATETIMEFROMPARTS")(self, expression) 1058 1059 def setitem_sql(self, expression: exp.SetItem) -> str: 1060 this = expression.this 1061 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1062 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1063 return f"{self.sql(this.left)} {self.sql(this.right)}" 1064 1065 return super().setitem_sql(expression) 1066 1067 def boolean_sql(self, expression: exp.Boolean) -> str: 1068 if type(expression.parent) in BIT_TYPES or isinstance( 1069 expression.find_ancestor(exp.Values, exp.Select), exp.Values 1070 ): 1071 return "1" if expression.this else "0" 1072 1073 return "(1 = 1)" if expression.this else "(1 = 0)" 1074 1075 def is_sql(self, expression: exp.Is) -> str: 1076 if isinstance(expression.expression, exp.Boolean): 1077 return self.binary(expression, "=") 1078 return self.binary(expression, "IS") 1079 1080 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1081 sql = self.sql(expression, "this") 1082 properties = expression.args.get("properties") 1083 1084 if sql[:1] != "#" and any( 1085 isinstance(prop, exp.TemporaryProperty) 1086 for prop in (properties.expressions if properties else []) 1087 ): 1088 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1089 1090 return sql 1091 1092 def create_sql(self, expression: exp.Create) -> str: 1093 kind = expression.kind 1094 exists = expression.args.pop("exists", None) 1095 1096 like_property = expression.find(exp.LikeProperty) 1097 if like_property: 1098 ctas_expression = like_property.this 1099 else: 1100 ctas_expression = expression.expression 1101 1102 if kind == "VIEW": 1103 expression.this.set("catalog", None) 1104 with_ = expression.args.get("with") 1105 if ctas_expression and with_: 1106 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1107 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1108 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1109 ctas_expression.set("with", with_.pop()) 1110 1111 sql = super().create_sql(expression) 1112 1113 table = expression.find(exp.Table) 1114 1115 # Convert CTAS statement to SELECT .. INTO .. 1116 if kind == "TABLE" and ctas_expression: 1117 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1118 ctas_expression = ctas_expression.subquery() 1119 1120 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1121 select_into.set("into", exp.Into(this=table)) 1122 1123 if like_property: 1124 select_into.limit(0, copy=False) 1125 1126 sql = self.sql(select_into) 1127 1128 if exists: 1129 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1130 sql_with_ctes = self.prepend_ctes(expression, sql) 1131 sql_literal = self.sql(exp.Literal.string(sql_with_ctes)) 1132 if kind == "SCHEMA": 1133 return f"""IF NOT EXISTS (SELECT * FROM information_schema.schemata WHERE schema_name = {identifier}) EXEC({sql_literal})""" 1134 elif kind == "TABLE": 1135 assert table 1136 where = exp.and_( 1137 exp.column("table_name").eq(table.name), 1138 exp.column("table_schema").eq(table.db) if table.db else None, 1139 exp.column("table_catalog").eq(table.catalog) if table.catalog else None, 1140 ) 1141 return f"""IF NOT EXISTS (SELECT * FROM information_schema.tables WHERE {where}) EXEC({sql_literal})""" 1142 elif kind == "INDEX": 1143 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1144 return f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql_literal})""" 1145 elif expression.args.get("replace"): 1146 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1147 1148 return self.prepend_ctes(expression, sql) 1149 1150 def count_sql(self, expression: exp.Count) -> str: 1151 func_name = "COUNT_BIG" if expression.args.get("big_int") else "COUNT" 1152 return rename_func(func_name)(self, expression) 1153 1154 def offset_sql(self, expression: exp.Offset) -> str: 1155 return f"{super().offset_sql(expression)} ROWS" 1156 1157 def version_sql(self, expression: exp.Version) -> str: 1158 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1159 this = f"FOR {name}" 1160 expr = expression.expression 1161 kind = expression.text("kind") 1162 if kind in ("FROM", "BETWEEN"): 1163 args = expr.expressions 1164 sep = "TO" if kind == "FROM" else "AND" 1165 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1166 else: 1167 expr_sql = self.sql(expr) 1168 1169 expr_sql = f" {expr_sql}" if expr_sql else "" 1170 return f"{this} {kind}{expr_sql}" 1171 1172 def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str: 1173 table = expression.args.get("table") 1174 table = f"{table} " if table else "" 1175 return f"RETURNS {table}{self.sql(expression, 'this')}" 1176 1177 def returning_sql(self, expression: exp.Returning) -> str: 1178 into = self.sql(expression, "into") 1179 into = self.seg(f"INTO {into}") if into else "" 1180 return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}" 1181 1182 def transaction_sql(self, expression: exp.Transaction) -> str: 1183 this = self.sql(expression, "this") 1184 this = f" {this}" if this else "" 1185 mark = self.sql(expression, "mark") 1186 mark = f" WITH MARK {mark}" if mark else "" 1187 return f"BEGIN TRANSACTION{this}{mark}" 1188 1189 def commit_sql(self, expression: exp.Commit) -> str: 1190 this = self.sql(expression, "this") 1191 this = f" {this}" if this else "" 1192 durability = expression.args.get("durability") 1193 durability = ( 1194 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1195 if durability is not None 1196 else "" 1197 ) 1198 return f"COMMIT TRANSACTION{this}{durability}" 1199 1200 def rollback_sql(self, expression: exp.Rollback) -> str: 1201 this = self.sql(expression, "this") 1202 this = f" {this}" if this else "" 1203 return f"ROLLBACK TRANSACTION{this}" 1204 1205 def identifier_sql(self, expression: exp.Identifier) -> str: 1206 identifier = super().identifier_sql(expression) 1207 1208 if expression.args.get("global"): 1209 identifier = f"##{identifier}" 1210 elif expression.args.get("temporary"): 1211 identifier = f"#{identifier}" 1212 1213 return identifier 1214 1215 def constraint_sql(self, expression: exp.Constraint) -> str: 1216 this = self.sql(expression, "this") 1217 expressions = self.expressions(expression, flat=True, sep=" ") 1218 return f"CONSTRAINT {this} {expressions}" 1219 1220 def length_sql(self, expression: exp.Length) -> str: 1221 return self._uncast_text(expression, "LEN") 1222 1223 def right_sql(self, expression: exp.Right) -> str: 1224 return self._uncast_text(expression, "RIGHT") 1225 1226 def left_sql(self, expression: exp.Left) -> str: 1227 return self._uncast_text(expression, "LEFT") 1228 1229 def _uncast_text(self, expression: exp.Expression, name: str) -> str: 1230 this = expression.this 1231 if isinstance(this, exp.Cast) and this.is_type(exp.DataType.Type.TEXT): 1232 this_sql = self.sql(this, "this") 1233 else: 1234 this_sql = self.sql(this) 1235 expression_sql = self.sql(expression, "expression") 1236 return self.func(name, this_sql, expression_sql if expression_sql else None) 1237 1238 def partition_sql(self, expression: exp.Partition) -> str: 1239 return f"WITH (PARTITIONS({self.expressions(expression, flat=True)}))" 1240 1241 def alter_sql(self, expression: exp.Alter) -> str: 1242 action = seq_get(expression.args.get("actions") or [], 0) 1243 if isinstance(action, exp.AlterRename): 1244 return f"EXEC sp_rename '{self.sql(expression.this)}', '{action.this.name}'" 1245 return super().alter_sql(expression) 1246 1247 def drop_sql(self, expression: exp.Drop) -> str: 1248 if expression.args["kind"] == "VIEW": 1249 expression.this.set("catalog", None) 1250 return super().drop_sql(expression) 1251 1252 def declare_sql(self, expression: exp.Declare) -> str: 1253 return f"DECLARE {self.expressions(expression, flat=True)}" 1254 1255 def declareitem_sql(self, expression: exp.DeclareItem) -> str: 1256 variable = self.sql(expression, "this") 1257 default = self.sql(expression, "default") 1258 default = f" = {default}" if default else "" 1259 1260 kind = self.sql(expression, "kind") 1261 if isinstance(expression.args.get("kind"), exp.Schema): 1262 kind = f"TABLE {kind}" 1263 1264 return f"{variable} AS {kind}{default}" 1265 1266 def options_modifier(self, expression: exp.Expression) -> str: 1267 options = self.expressions(expression, key="options") 1268 return f" OPTION{self.wrap(options)}" if options else "" 1269 1270 def dpipe_sql(self, expression: exp.DPipe) -> str: 1271 return self.sql( 1272 reduce(lambda x, y: exp.Add(this=x, expression=y), expression.flatten()) 1273 ) 1274 1275 def isascii_sql(self, expression: exp.IsAscii) -> str: 1276 return f"(PATINDEX(CONVERT(VARCHAR(MAX), 0x255b5e002d7f5d25) COLLATE Latin1_General_BIN, {self.sql(expression.this)}) = 0)"
Generator converts a given syntax tree to the corresponding SQL string.
Arguments:
- pretty: Whether to format the produced SQL string. Default: False.
- identify: Determines when an identifier should be quoted. Possible values are: False (default): Never quote, except in cases where it's mandatory by the dialect. True or 'always': Always quote. 'safe': Only quote identifiers that are case insensitive.
- normalize: Whether to normalize identifiers to lowercase. Default: False.
- pad: The pad size in a formatted string. For example, this affects the indentation of a projection in a query, relative to its nesting level. Default: 2.
- indent: The indentation size in a formatted string. For example, this affects the
indentation of subqueries and filters under a
WHERE
clause. Default: 2. - normalize_functions: How to normalize function names. Possible values are: "upper" or True (default): Convert names to uppercase. "lower": Convert names to lowercase. False: Disables function name normalization.
- unsupported_level: Determines the generator's behavior when it encounters unsupported expressions. Default ErrorLevel.WARN.
- max_unsupported: Maximum number of unsupported messages to include in a raised UnsupportedError. This is only relevant if unsupported_level is ErrorLevel.RAISE. Default: 3
- leading_comma: Whether the comma is leading or trailing in select expressions. This is only relevant when generating in pretty mode. Default: False
- max_text_width: The max number of characters in a segment before creating new lines in pretty mode. The default is on the smaller end because the length only represents a segment and not the true line length. Default: 80
- comments: Whether to preserve comments in the output SQL code. Default: True
968 def select_sql(self, expression: exp.Select) -> str: 969 if expression.args.get("offset"): 970 if not expression.args.get("order"): 971 # ORDER BY is required in order to use OFFSET in a query, so we use 972 # a noop order by, since we don't really care about the order. 973 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 974 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 975 976 limit = expression.args.get("limit") 977 if isinstance(limit, exp.Limit): 978 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 979 # we replace here because otherwise TOP would be generated in select_sql 980 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 981 982 return super().select_sql(expression)
990 def queryoption_sql(self, expression: exp.QueryOption) -> str: 991 option = self.sql(expression, "this") 992 value = self.sql(expression, "expression") 993 if value: 994 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 995 return f"{option} {optional_equal_sign}{value}" 996 return option
998 def lateral_op(self, expression: exp.Lateral) -> str: 999 cross_apply = expression.args.get("cross_apply") 1000 if cross_apply is True: 1001 return "CROSS APPLY" 1002 if cross_apply is False: 1003 return "OUTER APPLY" 1004 1005 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 1006 self.unsupported("LATERAL clause is not supported.") 1007 return "LATERAL"
1009 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 1010 this = expression.this 1011 split_count = len(this.name.split(".")) 1012 delimiter = expression.args.get("delimiter") 1013 part_index = expression.args.get("part_index") 1014 1015 if ( 1016 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 1017 or (delimiter and delimiter.name != ".") 1018 or not part_index 1019 or split_count > 4 1020 ): 1021 self.unsupported( 1022 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 1023 ) 1024 return "" 1025 1026 return self.func( 1027 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 1028 )
1030 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1031 nano = expression.args.get("nano") 1032 if nano is not None: 1033 nano.pop() 1034 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1035 1036 if expression.args.get("fractions") is None: 1037 expression.set("fractions", exp.Literal.number(0)) 1038 if expression.args.get("precision") is None: 1039 expression.set("precision", exp.Literal.number(0)) 1040 1041 return rename_func("TIMEFROMPARTS")(self, expression)
1043 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1044 zone = expression.args.get("zone") 1045 if zone is not None: 1046 zone.pop() 1047 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1048 1049 nano = expression.args.get("nano") 1050 if nano is not None: 1051 nano.pop() 1052 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1053 1054 if expression.args.get("milli") is None: 1055 expression.set("milli", exp.Literal.number(0)) 1056 1057 return rename_func("DATETIMEFROMPARTS")(self, expression)
1059 def setitem_sql(self, expression: exp.SetItem) -> str: 1060 this = expression.this 1061 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1062 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1063 return f"{self.sql(this.left)} {self.sql(this.right)}" 1064 1065 return super().setitem_sql(expression)
1080 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1081 sql = self.sql(expression, "this") 1082 properties = expression.args.get("properties") 1083 1084 if sql[:1] != "#" and any( 1085 isinstance(prop, exp.TemporaryProperty) 1086 for prop in (properties.expressions if properties else []) 1087 ): 1088 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1089 1090 return sql
1092 def create_sql(self, expression: exp.Create) -> str: 1093 kind = expression.kind 1094 exists = expression.args.pop("exists", None) 1095 1096 like_property = expression.find(exp.LikeProperty) 1097 if like_property: 1098 ctas_expression = like_property.this 1099 else: 1100 ctas_expression = expression.expression 1101 1102 if kind == "VIEW": 1103 expression.this.set("catalog", None) 1104 with_ = expression.args.get("with") 1105 if ctas_expression and with_: 1106 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1107 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1108 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1109 ctas_expression.set("with", with_.pop()) 1110 1111 sql = super().create_sql(expression) 1112 1113 table = expression.find(exp.Table) 1114 1115 # Convert CTAS statement to SELECT .. INTO .. 1116 if kind == "TABLE" and ctas_expression: 1117 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1118 ctas_expression = ctas_expression.subquery() 1119 1120 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1121 select_into.set("into", exp.Into(this=table)) 1122 1123 if like_property: 1124 select_into.limit(0, copy=False) 1125 1126 sql = self.sql(select_into) 1127 1128 if exists: 1129 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1130 sql_with_ctes = self.prepend_ctes(expression, sql) 1131 sql_literal = self.sql(exp.Literal.string(sql_with_ctes)) 1132 if kind == "SCHEMA": 1133 return f"""IF NOT EXISTS (SELECT * FROM information_schema.schemata WHERE schema_name = {identifier}) EXEC({sql_literal})""" 1134 elif kind == "TABLE": 1135 assert table 1136 where = exp.and_( 1137 exp.column("table_name").eq(table.name), 1138 exp.column("table_schema").eq(table.db) if table.db else None, 1139 exp.column("table_catalog").eq(table.catalog) if table.catalog else None, 1140 ) 1141 return f"""IF NOT EXISTS (SELECT * FROM information_schema.tables WHERE {where}) EXEC({sql_literal})""" 1142 elif kind == "INDEX": 1143 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1144 return f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql_literal})""" 1145 elif expression.args.get("replace"): 1146 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1147 1148 return self.prepend_ctes(expression, sql)
1157 def version_sql(self, expression: exp.Version) -> str: 1158 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1159 this = f"FOR {name}" 1160 expr = expression.expression 1161 kind = expression.text("kind") 1162 if kind in ("FROM", "BETWEEN"): 1163 args = expr.expressions 1164 sep = "TO" if kind == "FROM" else "AND" 1165 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1166 else: 1167 expr_sql = self.sql(expr) 1168 1169 expr_sql = f" {expr_sql}" if expr_sql else "" 1170 return f"{this} {kind}{expr_sql}"
1189 def commit_sql(self, expression: exp.Commit) -> str: 1190 this = self.sql(expression, "this") 1191 this = f" {this}" if this else "" 1192 durability = expression.args.get("durability") 1193 durability = ( 1194 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1195 if durability is not None 1196 else "" 1197 ) 1198 return f"COMMIT TRANSACTION{this}{durability}"
1205 def identifier_sql(self, expression: exp.Identifier) -> str: 1206 identifier = super().identifier_sql(expression) 1207 1208 if expression.args.get("global"): 1209 identifier = f"##{identifier}" 1210 elif expression.args.get("temporary"): 1211 identifier = f"#{identifier}" 1212 1213 return identifier
1255 def declareitem_sql(self, expression: exp.DeclareItem) -> str: 1256 variable = self.sql(expression, "this") 1257 default = self.sql(expression, "default") 1258 default = f" = {default}" if default else "" 1259 1260 kind = self.sql(expression, "kind") 1261 if isinstance(expression.args.get("kind"), exp.Schema): 1262 kind = f"TABLE {kind}" 1263 1264 return f"{variable} AS {kind}{default}"
Inherited Members
- sqlglot.generator.Generator
- Generator
- IGNORE_NULLS_IN_FUNC
- LOCKING_READS_SUPPORTED
- WRAP_DERIVED_VALUES
- CREATE_FUNCTION_RETURN_AS
- MATCHED_BY_SOURCE
- SINGLE_STRING_INTERVAL
- INTERVAL_ALLOWS_PLURAL_FORM
- LIMIT_ONLY_LITERALS
- RENAME_TABLE_WITH_DB
- GROUPINGS_SEP
- INDEX_ON
- JOIN_HINTS
- TABLE_HINTS
- QUERY_HINT_SEP
- IS_BOOL_ALLOWED
- DUPLICATE_KEY_UPDATE_WITH_SET
- EXTRACT_ALLOWS_QUOTES
- TZ_TO_WITH_TIME_ZONE
- VALUES_AS_TABLE
- UNNEST_WITH_ORDINALITY
- AGGREGATE_FILTER_SUPPORTED
- SEMI_ANTI_JOIN_WITH_SIDE
- SUPPORTS_TABLE_COPY
- TABLESAMPLE_REQUIRES_PARENS
- TABLESAMPLE_SIZE_IS_ROWS
- TABLESAMPLE_KEYWORDS
- TABLESAMPLE_WITH_METHOD
- COLLATE_IS_FUNC
- DATA_TYPE_SPECIFIERS_ALLOWED
- LAST_DAY_SUPPORTS_DATE_PART
- SUPPORTS_TABLE_ALIAS_COLUMNS
- UNPIVOT_ALIASES_ARE_IDENTIFIERS
- JSON_KEY_VALUE_PAIR_SEP
- INSERT_OVERWRITE
- SUPPORTS_UNLOGGED_TABLES
- SUPPORTS_CREATE_TABLE_LIKE
- LIKE_PROPERTY_INSIDE_SCHEMA
- MULTI_ARG_DISTINCT
- JSON_TYPE_REQUIRED_FOR_EXTRACTION
- JSON_PATH_SINGLE_QUOTE_ESCAPE
- CAN_IMPLEMENT_ARRAY_ANY
- COPY_PARAMS_ARE_WRAPPED
- COPY_HAS_INTO_KEYWORD
- STAR_EXCEPT
- HEX_FUNC
- WITH_PROPERTIES_PREFIX
- QUOTE_JSON_PATH
- PAD_FILL_PATTERN_IS_REQUIRED
- SUPPORTS_EXPLODING_PROJECTIONS
- ARRAY_CONCAT_IS_VAR_LEN
- SUPPORTS_CONVERT_TIMEZONE
- SUPPORTS_MEDIAN
- SUPPORTS_UNIX_SECONDS
- ARRAY_SIZE_NAME
- ARRAY_SIZE_DIM_REQUIRED
- TIME_PART_SINGULARS
- TOKEN_MAPPING
- STRUCT_DELIMITER
- PARAMETER_TOKEN
- NAMED_PLACEHOLDER_TOKEN
- EXPRESSION_PRECEDES_PROPERTIES_CREATABLES
- RESERVED_KEYWORDS
- WITH_SEPARATED_COMMENTS
- EXCLUDE_COMMENTS
- UNWRAPPED_INTERVAL_VALUES
- PARAMETERIZABLE_TEXT_TYPES
- SENTINEL_LINE_BREAK
- pretty
- identify
- normalize
- pad
- unsupported_level
- max_unsupported
- leading_comma
- max_text_width
- comments
- dialect
- normalize_functions
- unsupported_messages
- generate
- preprocess
- unsupported
- sep
- seg
- pad_comment
- maybe_comment
- wrap
- no_identify
- normalize_func
- indent
- sql
- uncache_sql
- cache_sql
- characterset_sql
- column_parts
- column_sql
- columnposition_sql
- columndef_sql
- columnconstraint_sql
- computedcolumnconstraint_sql
- autoincrementcolumnconstraint_sql
- compresscolumnconstraint_sql
- generatedasidentitycolumnconstraint_sql
- generatedasrowcolumnconstraint_sql
- periodforsystemtimeconstraint_sql
- notnullcolumnconstraint_sql
- transformcolumnconstraint_sql
- primarykeycolumnconstraint_sql
- uniquecolumnconstraint_sql
- sequenceproperties_sql
- clone_sql
- describe_sql
- heredoc_sql
- prepend_ctes
- with_sql
- cte_sql
- tablealias_sql
- bitstring_sql
- hexstring_sql
- bytestring_sql
- unicodestring_sql
- rawstring_sql
- datatypeparam_sql
- datatype_sql
- directory_sql
- delete_sql
- set_operation
- set_operations
- fetch_sql
- filter_sql
- hint_sql
- indexparameters_sql
- index_sql
- hex_sql
- lowerhex_sql
- inputoutputformat_sql
- national_sql
- properties_sql
- root_properties
- properties
- with_properties
- locate_properties
- property_name
- property_sql
- likeproperty_sql
- fallbackproperty_sql
- journalproperty_sql
- freespaceproperty_sql
- checksumproperty_sql
- mergeblockratioproperty_sql
- datablocksizeproperty_sql
- blockcompressionproperty_sql
- isolatedloadingproperty_sql
- partitionboundspec_sql
- partitionedofproperty_sql
- lockingproperty_sql
- withdataproperty_sql
- withsystemversioningproperty_sql
- insert_sql
- introducer_sql
- kill_sql
- pseudotype_sql
- objectidentifier_sql
- onconflict_sql
- rowformatdelimitedproperty_sql
- withtablehint_sql
- indextablehint_sql
- historicaldata_sql
- table_parts
- table_sql
- tablesample_sql
- pivot_sql
- tuple_sql
- update_sql
- values_sql
- var_sql
- into_sql
- from_sql
- groupingsets_sql
- rollup_sql
- cube_sql
- group_sql
- having_sql
- connect_sql
- prior_sql
- join_sql
- lambda_sql
- lateral_sql
- limit_sql
- set_sql
- pragma_sql
- lock_sql
- literal_sql
- escape_str
- loaddata_sql
- null_sql
- order_sql
- withfill_sql
- cluster_sql
- distribute_sql
- sort_sql
- ordered_sql
- matchrecognizemeasure_sql
- matchrecognize_sql
- query_modifiers
- offset_limit_modifiers
- after_limit_modifiers
- schema_sql
- schema_columns_sql
- star_sql
- parameter_sql
- sessionparameter_sql
- placeholder_sql
- subquery_sql
- qualify_sql
- unnest_sql
- prewhere_sql
- where_sql
- window_sql
- partition_by_sql
- windowspec_sql
- withingroup_sql
- between_sql
- bracket_offset_expressions
- bracket_sql
- all_sql
- any_sql
- exists_sql
- case_sql
- nextvaluefor_sql
- extract_sql
- trim_sql
- convert_concat_args
- concat_sql
- concatws_sql
- check_sql
- foreignkey_sql
- primarykey_sql
- if_sql
- matchagainst_sql
- jsonkeyvalue_sql
- jsonpath_sql
- json_path_part
- formatjson_sql
- jsonobject_sql
- jsonobjectagg_sql
- jsonarray_sql
- jsonarrayagg_sql
- jsoncolumndef_sql
- jsonschema_sql
- jsontable_sql
- openjsoncolumndef_sql
- openjson_sql
- in_sql
- in_unnest_op
- interval_sql
- return_sql
- reference_sql
- anonymous_sql
- paren_sql
- neg_sql
- not_sql
- alias_sql
- pivotalias_sql
- aliases_sql
- atindex_sql
- attimezone_sql
- fromtimezone_sql
- add_sql
- and_sql
- or_sql
- xor_sql
- connector_sql
- bitwiseand_sql
- bitwiseleftshift_sql
- bitwisenot_sql
- bitwiseor_sql
- bitwiserightshift_sql
- bitwisexor_sql
- cast_sql
- currentdate_sql
- collate_sql
- command_sql
- comment_sql
- mergetreettlaction_sql
- mergetreettl_sql
- altercolumn_sql
- alterdiststyle_sql
- altersortkey_sql
- alterrename_sql
- renamecolumn_sql
- alterset_sql
- add_column_sql
- droppartition_sql
- addconstraint_sql
- distinct_sql
- ignorenulls_sql
- respectnulls_sql
- havingmax_sql
- intdiv_sql
- div_sql
- safedivide_sql
- overlaps_sql
- distance_sql
- dot_sql
- eq_sql
- propertyeq_sql
- escape_sql
- glob_sql
- gt_sql
- gte_sql
- ilike_sql
- ilikeany_sql
- like_sql
- likeany_sql
- similarto_sql
- lt_sql
- lte_sql
- mod_sql
- mul_sql
- neq_sql
- nullsafeeq_sql
- nullsafeneq_sql
- slice_sql
- sub_sql
- trycast_sql
- try_sql
- log_sql
- use_sql
- binary
- ceil_floor
- function_fallback_sql
- func
- format_args
- too_wide
- format_time
- expressions
- op_expressions
- naked_property
- tag_sql
- token_sql
- userdefinedfunction_sql
- joinhint_sql
- kwarg_sql
- when_sql
- whens_sql
- merge_sql
- tochar_sql
- tonumber_sql
- dictproperty_sql
- dictrange_sql
- dictsubproperty_sql
- duplicatekeyproperty_sql
- uniquekeyproperty_sql
- distributedbyproperty_sql
- oncluster_sql
- clusteredbyproperty_sql
- anyvalue_sql
- querytransform_sql
- indexconstraintoption_sql
- checkcolumnconstraint_sql
- indexcolumnconstraint_sql
- nvl2_sql
- comprehension_sql
- columnprefix_sql
- opclass_sql
- predict_sql
- forin_sql
- refresh_sql
- toarray_sql
- tsordstotime_sql
- tsordstotimestamp_sql
- tsordstodatetime_sql
- tsordstodate_sql
- unixdate_sql
- lastday_sql
- dateadd_sql
- arrayany_sql
- struct_sql
- partitionrange_sql
- truncatetable_sql
- copyparameter_sql
- credentials_sql
- copy_sql
- semicolon_sql
- datadeletionproperty_sql
- maskingpolicycolumnconstraint_sql
- gapfill_sql
- scoperesolution_sql
- parsejson_sql
- rand_sql
- changes_sql
- pad_sql
- summarize_sql
- explodinggenerateseries_sql
- arrayconcat_sql
- converttimezone_sql
- json_sql
- jsonvalue_sql
- conditionalinsert_sql
- multitableinserts_sql
- oncondition_sql
- jsonextractquote_sql
- jsonexists_sql
- arrayagg_sql
- apply_sql
- grant_sql
- grantprivilege_sql
- grantprincipal_sql
- columns_sql
- overlay_sql
- todouble_sql
- string_sql
- median_sql
- overflowtruncatebehavior_sql
- unixseconds_sql
- arraysize_sql
- attach_sql
- detach_sql
- attachoption_sql
- featuresattime_sql
- watermarkcolumnconstraint_sql
- encodeproperty_sql
- includeproperty_sql
- xmlelement_sql
- partitionbyrangeproperty_sql
- partitionbyrangepropertydynamic_sql
- unpivotcolumns_sql
- analyzesample_sql
- analyzestatistics_sql
- analyzehistogram_sql
- analyzedelete_sql
- analyzelistchainedrows_sql
- analyzevalidate_sql
- analyze_sql
- xmltable_sql
- xmlnamespace_sql