sqlglot.dialects.tsql
1from __future__ import annotations 2 3import datetime 4import re 5import typing as t 6from functools import partial, reduce 7 8from sqlglot import exp, generator, parser, tokens, transforms 9from sqlglot.dialects.dialect import ( 10 Dialect, 11 NormalizationStrategy, 12 any_value_to_max_sql, 13 date_delta_sql, 14 datestrtodate_sql, 15 generatedasidentitycolumnconstraint_sql, 16 max_or_greatest, 17 min_or_least, 18 build_date_delta, 19 rename_func, 20 trim_sql, 21 timestrtotime_sql, 22) 23from sqlglot.helper import seq_get 24from sqlglot.parser import build_coalesce 25from sqlglot.time import format_time 26from sqlglot.tokens import TokenType 27 28if t.TYPE_CHECKING: 29 from sqlglot._typing import E 30 31FULL_FORMAT_TIME_MAPPING = { 32 "weekday": "%A", 33 "dw": "%A", 34 "w": "%A", 35 "month": "%B", 36 "mm": "%B", 37 "m": "%B", 38} 39 40DATE_DELTA_INTERVAL = { 41 "year": "year", 42 "yyyy": "year", 43 "yy": "year", 44 "quarter": "quarter", 45 "qq": "quarter", 46 "q": "quarter", 47 "month": "month", 48 "mm": "month", 49 "m": "month", 50 "week": "week", 51 "ww": "week", 52 "wk": "week", 53 "day": "day", 54 "dd": "day", 55 "d": "day", 56} 57 58 59DATE_FMT_RE = re.compile("([dD]{1,2})|([mM]{1,2})|([yY]{1,4})|([hH]{1,2})|([sS]{1,2})") 60 61# N = Numeric, C=Currency 62TRANSPILE_SAFE_NUMBER_FMT = {"N", "C"} 63 64DEFAULT_START_DATE = datetime.date(1900, 1, 1) 65 66BIT_TYPES = {exp.EQ, exp.NEQ, exp.Is, exp.In, exp.Select, exp.Alias} 67 68# Unsupported options: 69# - OPTIMIZE FOR ( @variable_name { UNKNOWN | = <literal_constant> } [ , ...n ] ) 70# - TABLE HINT 71OPTIONS: parser.OPTIONS_TYPE = { 72 **dict.fromkeys( 73 ( 74 "DISABLE_OPTIMIZED_PLAN_FORCING", 75 "FAST", 76 "IGNORE_NONCLUSTERED_COLUMNSTORE_INDEX", 77 "LABEL", 78 "MAXDOP", 79 "MAXRECURSION", 80 "MAX_GRANT_PERCENT", 81 "MIN_GRANT_PERCENT", 82 "NO_PERFORMANCE_SPOOL", 83 "QUERYTRACEON", 84 "RECOMPILE", 85 ), 86 tuple(), 87 ), 88 "CONCAT": ("UNION",), 89 "DISABLE": ("EXTERNALPUSHDOWN", "SCALEOUTEXECUTION"), 90 "EXPAND": ("VIEWS",), 91 "FORCE": ("EXTERNALPUSHDOWN", "ORDER", "SCALEOUTEXECUTION"), 92 "HASH": ("GROUP", "JOIN", "UNION"), 93 "KEEP": ("PLAN",), 94 "KEEPFIXED": ("PLAN",), 95 "LOOP": ("JOIN",), 96 "MERGE": ("JOIN", "UNION"), 97 "OPTIMIZE": (("FOR", "UNKNOWN"),), 98 "ORDER": ("GROUP",), 99 "PARAMETERIZATION": ("FORCED", "SIMPLE"), 100 "ROBUST": ("PLAN",), 101 "USE": ("PLAN",), 102} 103 104OPTIONS_THAT_REQUIRE_EQUAL = ("MAX_GRANT_PERCENT", "MIN_GRANT_PERCENT", "LABEL") 105 106 107def _build_formatted_time( 108 exp_class: t.Type[E], full_format_mapping: t.Optional[bool] = None 109) -> t.Callable[[t.List], E]: 110 def _builder(args: t.List) -> E: 111 assert len(args) == 2 112 113 return exp_class( 114 this=exp.cast(args[1], exp.DataType.Type.DATETIME), 115 format=exp.Literal.string( 116 format_time( 117 args[0].name.lower(), 118 ( 119 {**TSQL.TIME_MAPPING, **FULL_FORMAT_TIME_MAPPING} 120 if full_format_mapping 121 else TSQL.TIME_MAPPING 122 ), 123 ) 124 ), 125 ) 126 127 return _builder 128 129 130def _build_format(args: t.List) -> exp.NumberToStr | exp.TimeToStr: 131 this = seq_get(args, 0) 132 fmt = seq_get(args, 1) 133 culture = seq_get(args, 2) 134 135 number_fmt = fmt and (fmt.name in TRANSPILE_SAFE_NUMBER_FMT or not DATE_FMT_RE.search(fmt.name)) 136 137 if number_fmt: 138 return exp.NumberToStr(this=this, format=fmt, culture=culture) 139 140 if fmt: 141 fmt = exp.Literal.string( 142 format_time(fmt.name, TSQL.FORMAT_TIME_MAPPING) 143 if len(fmt.name) == 1 144 else format_time(fmt.name, TSQL.TIME_MAPPING) 145 ) 146 147 return exp.TimeToStr(this=this, format=fmt, culture=culture) 148 149 150def _build_eomonth(args: t.List) -> exp.LastDay: 151 date = exp.TsOrDsToDate(this=seq_get(args, 0)) 152 month_lag = seq_get(args, 1) 153 154 if month_lag is None: 155 this: exp.Expression = date 156 else: 157 unit = DATE_DELTA_INTERVAL.get("month") 158 this = exp.DateAdd(this=date, expression=month_lag, unit=unit and exp.var(unit)) 159 160 return exp.LastDay(this=this) 161 162 163def _build_hashbytes(args: t.List) -> exp.Expression: 164 kind, data = args 165 kind = kind.name.upper() if kind.is_string else "" 166 167 if kind == "MD5": 168 args.pop(0) 169 return exp.MD5(this=data) 170 if kind in ("SHA", "SHA1"): 171 args.pop(0) 172 return exp.SHA(this=data) 173 if kind == "SHA2_256": 174 return exp.SHA2(this=data, length=exp.Literal.number(256)) 175 if kind == "SHA2_512": 176 return exp.SHA2(this=data, length=exp.Literal.number(512)) 177 178 return exp.func("HASHBYTES", *args) 179 180 181DATEPART_ONLY_FORMATS = {"DW", "WK", "HOUR", "QUARTER"} 182 183 184def _format_sql(self: TSQL.Generator, expression: exp.NumberToStr | exp.TimeToStr) -> str: 185 fmt = expression.args["format"] 186 187 if not isinstance(expression, exp.NumberToStr): 188 if fmt.is_string: 189 mapped_fmt = format_time(fmt.name, TSQL.INVERSE_TIME_MAPPING) 190 191 name = (mapped_fmt or "").upper() 192 if name in DATEPART_ONLY_FORMATS: 193 return self.func("DATEPART", name, expression.this) 194 195 fmt_sql = self.sql(exp.Literal.string(mapped_fmt)) 196 else: 197 fmt_sql = self.format_time(expression) or self.sql(fmt) 198 else: 199 fmt_sql = self.sql(fmt) 200 201 return self.func("FORMAT", expression.this, fmt_sql, expression.args.get("culture")) 202 203 204def _string_agg_sql(self: TSQL.Generator, expression: exp.GroupConcat) -> str: 205 this = expression.this 206 distinct = expression.find(exp.Distinct) 207 if distinct: 208 # exp.Distinct can appear below an exp.Order or an exp.GroupConcat expression 209 self.unsupported("T-SQL STRING_AGG doesn't support DISTINCT.") 210 this = distinct.pop().expressions[0] 211 212 order = "" 213 if isinstance(expression.this, exp.Order): 214 if expression.this.this: 215 this = expression.this.this.pop() 216 # Order has a leading space 217 order = f" WITHIN GROUP ({self.sql(expression.this)[1:]})" 218 219 separator = expression.args.get("separator") or exp.Literal.string(",") 220 return f"STRING_AGG({self.format_args(this, separator)}){order}" 221 222 223def _build_date_delta( 224 exp_class: t.Type[E], unit_mapping: t.Optional[t.Dict[str, str]] = None 225) -> t.Callable[[t.List], E]: 226 def _builder(args: t.List) -> E: 227 unit = seq_get(args, 0) 228 if unit and unit_mapping: 229 unit = exp.var(unit_mapping.get(unit.name.lower(), unit.name)) 230 231 start_date = seq_get(args, 1) 232 if start_date and start_date.is_number: 233 # Numeric types are valid DATETIME values 234 if start_date.is_int: 235 adds = DEFAULT_START_DATE + datetime.timedelta(days=int(start_date.this)) 236 start_date = exp.Literal.string(adds.strftime("%F")) 237 else: 238 # We currently don't handle float values, i.e. they're not converted to equivalent DATETIMEs. 239 # This is not a problem when generating T-SQL code, it is when transpiling to other dialects. 240 return exp_class(this=seq_get(args, 2), expression=start_date, unit=unit) 241 242 return exp_class( 243 this=exp.TimeStrToTime(this=seq_get(args, 2)), 244 expression=exp.TimeStrToTime(this=start_date), 245 unit=unit, 246 ) 247 248 return _builder 249 250 251def qualify_derived_table_outputs(expression: exp.Expression) -> exp.Expression: 252 """Ensures all (unnamed) output columns are aliased for CTEs and Subqueries.""" 253 alias = expression.args.get("alias") 254 255 if ( 256 isinstance(expression, (exp.CTE, exp.Subquery)) 257 and isinstance(alias, exp.TableAlias) 258 and not alias.columns 259 ): 260 from sqlglot.optimizer.qualify_columns import qualify_outputs 261 262 # We keep track of the unaliased column projection indexes instead of the expressions 263 # themselves, because the latter are going to be replaced by new nodes when the aliases 264 # are added and hence we won't be able to reach these newly added Alias parents 265 query = expression.this 266 unaliased_column_indexes = ( 267 i for i, c in enumerate(query.selects) if isinstance(c, exp.Column) and not c.alias 268 ) 269 270 qualify_outputs(query) 271 272 # Preserve the quoting information of columns for newly added Alias nodes 273 query_selects = query.selects 274 for select_index in unaliased_column_indexes: 275 alias = query_selects[select_index] 276 column = alias.this 277 if isinstance(column.this, exp.Identifier): 278 alias.args["alias"].set("quoted", column.this.quoted) 279 280 return expression 281 282 283# https://learn.microsoft.com/en-us/sql/t-sql/functions/datetimefromparts-transact-sql?view=sql-server-ver16#syntax 284def _build_datetimefromparts(args: t.List) -> exp.TimestampFromParts: 285 return exp.TimestampFromParts( 286 year=seq_get(args, 0), 287 month=seq_get(args, 1), 288 day=seq_get(args, 2), 289 hour=seq_get(args, 3), 290 min=seq_get(args, 4), 291 sec=seq_get(args, 5), 292 milli=seq_get(args, 6), 293 ) 294 295 296# https://learn.microsoft.com/en-us/sql/t-sql/functions/timefromparts-transact-sql?view=sql-server-ver16#syntax 297def _build_timefromparts(args: t.List) -> exp.TimeFromParts: 298 return exp.TimeFromParts( 299 hour=seq_get(args, 0), 300 min=seq_get(args, 1), 301 sec=seq_get(args, 2), 302 fractions=seq_get(args, 3), 303 precision=seq_get(args, 4), 304 ) 305 306 307def _build_with_arg_as_text( 308 klass: t.Type[exp.Expression], 309) -> t.Callable[[t.List[exp.Expression]], exp.Expression]: 310 def _parse(args: t.List[exp.Expression]) -> exp.Expression: 311 this = seq_get(args, 0) 312 313 if this and not this.is_string: 314 this = exp.cast(this, exp.DataType.Type.TEXT) 315 316 expression = seq_get(args, 1) 317 kwargs = {"this": this} 318 319 if expression: 320 kwargs["expression"] = expression 321 322 return klass(**kwargs) 323 324 return _parse 325 326 327# https://learn.microsoft.com/en-us/sql/t-sql/functions/parsename-transact-sql?view=sql-server-ver16 328def _build_parsename(args: t.List) -> exp.SplitPart | exp.Anonymous: 329 # PARSENAME(...) will be stored into exp.SplitPart if: 330 # - All args are literals 331 # - The part index (2nd arg) is <= 4 (max valid value, otherwise TSQL returns NULL) 332 if len(args) == 2 and all(isinstance(arg, exp.Literal) for arg in args): 333 this = args[0] 334 part_index = args[1] 335 split_count = len(this.name.split(".")) 336 if split_count <= 4: 337 return exp.SplitPart( 338 this=this, 339 delimiter=exp.Literal.string("."), 340 part_index=exp.Literal.number(split_count + 1 - part_index.to_py()), 341 ) 342 343 return exp.Anonymous(this="PARSENAME", expressions=args) 344 345 346def _build_json_query(args: t.List, dialect: Dialect) -> exp.JSONExtract: 347 if len(args) == 1: 348 # The default value for path is '$'. As a result, if you don't provide a 349 # value for path, JSON_QUERY returns the input expression. 350 args.append(exp.Literal.string("$")) 351 352 return parser.build_extract_json_with_path(exp.JSONExtract)(args, dialect) 353 354 355def _json_extract_sql( 356 self: TSQL.Generator, expression: exp.JSONExtract | exp.JSONExtractScalar 357) -> str: 358 json_query = self.func("JSON_QUERY", expression.this, expression.expression) 359 json_value = self.func("JSON_VALUE", expression.this, expression.expression) 360 return self.func("ISNULL", json_query, json_value) 361 362 363def _timestrtotime_sql(self: TSQL.Generator, expression: exp.TimeStrToTime): 364 sql = timestrtotime_sql(self, expression) 365 if expression.args.get("zone"): 366 # If there is a timezone, produce an expression like: 367 # CAST('2020-01-01 12:13:14-08:00' AS DATETIMEOFFSET) AT TIME ZONE 'UTC' 368 # If you dont have AT TIME ZONE 'UTC', wrapping that expression in another cast back to DATETIME2 just drops the timezone information 369 return self.sql(exp.AtTimeZone(this=sql, zone=exp.Literal.string("UTC"))) 370 return sql 371 372 373class TSQL(Dialect): 374 SUPPORTS_SEMI_ANTI_JOIN = False 375 LOG_BASE_FIRST = False 376 TYPED_DIVISION = True 377 CONCAT_COALESCE = True 378 NORMALIZATION_STRATEGY = NormalizationStrategy.CASE_INSENSITIVE 379 380 TIME_FORMAT = "'yyyy-mm-dd hh:mm:ss'" 381 382 TIME_MAPPING = { 383 "year": "%Y", 384 "dayofyear": "%j", 385 "day": "%d", 386 "dy": "%d", 387 "y": "%Y", 388 "week": "%W", 389 "ww": "%W", 390 "wk": "%W", 391 "hour": "%h", 392 "hh": "%I", 393 "minute": "%M", 394 "mi": "%M", 395 "n": "%M", 396 "second": "%S", 397 "ss": "%S", 398 "s": "%-S", 399 "millisecond": "%f", 400 "ms": "%f", 401 "weekday": "%w", 402 "dw": "%w", 403 "month": "%m", 404 "mm": "%M", 405 "m": "%-M", 406 "Y": "%Y", 407 "YYYY": "%Y", 408 "YY": "%y", 409 "MMMM": "%B", 410 "MMM": "%b", 411 "MM": "%m", 412 "M": "%-m", 413 "dddd": "%A", 414 "dd": "%d", 415 "d": "%-d", 416 "HH": "%H", 417 "H": "%-H", 418 "h": "%-I", 419 "ffffff": "%f", 420 "yyyy": "%Y", 421 "yy": "%y", 422 } 423 424 CONVERT_FORMAT_MAPPING = { 425 "0": "%b %d %Y %-I:%M%p", 426 "1": "%m/%d/%y", 427 "2": "%y.%m.%d", 428 "3": "%d/%m/%y", 429 "4": "%d.%m.%y", 430 "5": "%d-%m-%y", 431 "6": "%d %b %y", 432 "7": "%b %d, %y", 433 "8": "%H:%M:%S", 434 "9": "%b %d %Y %-I:%M:%S:%f%p", 435 "10": "mm-dd-yy", 436 "11": "yy/mm/dd", 437 "12": "yymmdd", 438 "13": "%d %b %Y %H:%M:ss:%f", 439 "14": "%H:%M:%S:%f", 440 "20": "%Y-%m-%d %H:%M:%S", 441 "21": "%Y-%m-%d %H:%M:%S.%f", 442 "22": "%m/%d/%y %-I:%M:%S %p", 443 "23": "%Y-%m-%d", 444 "24": "%H:%M:%S", 445 "25": "%Y-%m-%d %H:%M:%S.%f", 446 "100": "%b %d %Y %-I:%M%p", 447 "101": "%m/%d/%Y", 448 "102": "%Y.%m.%d", 449 "103": "%d/%m/%Y", 450 "104": "%d.%m.%Y", 451 "105": "%d-%m-%Y", 452 "106": "%d %b %Y", 453 "107": "%b %d, %Y", 454 "108": "%H:%M:%S", 455 "109": "%b %d %Y %-I:%M:%S:%f%p", 456 "110": "%m-%d-%Y", 457 "111": "%Y/%m/%d", 458 "112": "%Y%m%d", 459 "113": "%d %b %Y %H:%M:%S:%f", 460 "114": "%H:%M:%S:%f", 461 "120": "%Y-%m-%d %H:%M:%S", 462 "121": "%Y-%m-%d %H:%M:%S.%f", 463 } 464 465 FORMAT_TIME_MAPPING = { 466 "y": "%B %Y", 467 "d": "%m/%d/%Y", 468 "H": "%-H", 469 "h": "%-I", 470 "s": "%Y-%m-%d %H:%M:%S", 471 "D": "%A,%B,%Y", 472 "f": "%A,%B,%Y %-I:%M %p", 473 "F": "%A,%B,%Y %-I:%M:%S %p", 474 "g": "%m/%d/%Y %-I:%M %p", 475 "G": "%m/%d/%Y %-I:%M:%S %p", 476 "M": "%B %-d", 477 "m": "%B %-d", 478 "O": "%Y-%m-%dT%H:%M:%S", 479 "u": "%Y-%M-%D %H:%M:%S%z", 480 "U": "%A, %B %D, %Y %H:%M:%S%z", 481 "T": "%-I:%M:%S %p", 482 "t": "%-I:%M", 483 "Y": "%a %Y", 484 } 485 486 class Tokenizer(tokens.Tokenizer): 487 IDENTIFIERS = [("[", "]"), '"'] 488 QUOTES = ["'", '"'] 489 HEX_STRINGS = [("0x", ""), ("0X", "")] 490 VAR_SINGLE_TOKENS = {"@", "$", "#"} 491 492 KEYWORDS = { 493 **tokens.Tokenizer.KEYWORDS, 494 "CLUSTERED INDEX": TokenType.INDEX, 495 "DATETIME2": TokenType.DATETIME, 496 "DATETIMEOFFSET": TokenType.TIMESTAMPTZ, 497 "DECLARE": TokenType.DECLARE, 498 "EXEC": TokenType.COMMAND, 499 "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT, 500 "IMAGE": TokenType.IMAGE, 501 "MONEY": TokenType.MONEY, 502 "NONCLUSTERED INDEX": TokenType.INDEX, 503 "NTEXT": TokenType.TEXT, 504 "OPTION": TokenType.OPTION, 505 "OUTPUT": TokenType.RETURNING, 506 "PRINT": TokenType.COMMAND, 507 "PROC": TokenType.PROCEDURE, 508 "REAL": TokenType.FLOAT, 509 "ROWVERSION": TokenType.ROWVERSION, 510 "SMALLDATETIME": TokenType.DATETIME, 511 "SMALLMONEY": TokenType.SMALLMONEY, 512 "SQL_VARIANT": TokenType.VARIANT, 513 "SYSTEM_USER": TokenType.CURRENT_USER, 514 "TOP": TokenType.TOP, 515 "TIMESTAMP": TokenType.ROWVERSION, 516 "TINYINT": TokenType.UTINYINT, 517 "UNIQUEIDENTIFIER": TokenType.UNIQUEIDENTIFIER, 518 "UPDATE STATISTICS": TokenType.COMMAND, 519 "XML": TokenType.XML, 520 } 521 KEYWORDS.pop("/*+") 522 523 COMMANDS = {*tokens.Tokenizer.COMMANDS, TokenType.END} 524 525 class Parser(parser.Parser): 526 SET_REQUIRES_ASSIGNMENT_DELIMITER = False 527 LOG_DEFAULTS_TO_LN = True 528 ALTER_TABLE_ADD_REQUIRED_FOR_EACH_COLUMN = False 529 STRING_ALIASES = True 530 NO_PAREN_IF_COMMANDS = False 531 532 QUERY_MODIFIER_PARSERS = { 533 **parser.Parser.QUERY_MODIFIER_PARSERS, 534 TokenType.OPTION: lambda self: ("options", self._parse_options()), 535 } 536 537 FUNCTIONS = { 538 **parser.Parser.FUNCTIONS, 539 "CHARINDEX": lambda args: exp.StrPosition( 540 this=seq_get(args, 1), 541 substr=seq_get(args, 0), 542 position=seq_get(args, 2), 543 ), 544 "COUNT": lambda args: exp.Count( 545 this=seq_get(args, 0), expressions=args[1:], big_int=False 546 ), 547 "COUNT_BIG": lambda args: exp.Count( 548 this=seq_get(args, 0), expressions=args[1:], big_int=True 549 ), 550 "DATEADD": build_date_delta(exp.DateAdd, unit_mapping=DATE_DELTA_INTERVAL), 551 "DATEDIFF": _build_date_delta(exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL), 552 "DATENAME": _build_formatted_time(exp.TimeToStr, full_format_mapping=True), 553 "DATEPART": _build_formatted_time(exp.TimeToStr), 554 "DATETIMEFROMPARTS": _build_datetimefromparts, 555 "EOMONTH": _build_eomonth, 556 "FORMAT": _build_format, 557 "GETDATE": exp.CurrentTimestamp.from_arg_list, 558 "HASHBYTES": _build_hashbytes, 559 "ISNULL": build_coalesce, 560 "JSON_QUERY": _build_json_query, 561 "JSON_VALUE": parser.build_extract_json_with_path(exp.JSONExtractScalar), 562 "LEN": _build_with_arg_as_text(exp.Length), 563 "LEFT": _build_with_arg_as_text(exp.Left), 564 "RIGHT": _build_with_arg_as_text(exp.Right), 565 "PARSENAME": _build_parsename, 566 "REPLICATE": exp.Repeat.from_arg_list, 567 "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)), 568 "SYSDATETIME": exp.CurrentTimestamp.from_arg_list, 569 "SUSER_NAME": exp.CurrentUser.from_arg_list, 570 "SUSER_SNAME": exp.CurrentUser.from_arg_list, 571 "SYSTEM_USER": exp.CurrentUser.from_arg_list, 572 "TIMEFROMPARTS": _build_timefromparts, 573 } 574 575 JOIN_HINTS = {"LOOP", "HASH", "MERGE", "REMOTE"} 576 577 PROCEDURE_OPTIONS = dict.fromkeys( 578 ("ENCRYPTION", "RECOMPILE", "SCHEMABINDING", "NATIVE_COMPILATION", "EXECUTE"), tuple() 579 ) 580 581 RETURNS_TABLE_TOKENS = parser.Parser.ID_VAR_TOKENS - { 582 TokenType.TABLE, 583 *parser.Parser.TYPE_TOKENS, 584 } 585 586 STATEMENT_PARSERS = { 587 **parser.Parser.STATEMENT_PARSERS, 588 TokenType.DECLARE: lambda self: self._parse_declare(), 589 } 590 591 RANGE_PARSERS = { 592 **parser.Parser.RANGE_PARSERS, 593 TokenType.DCOLON: lambda self, this: self.expression( 594 exp.ScopeResolution, 595 this=this, 596 expression=self._parse_function() or self._parse_var(any_token=True), 597 ), 598 } 599 600 NO_PAREN_FUNCTION_PARSERS = { 601 **parser.Parser.NO_PAREN_FUNCTION_PARSERS, 602 "NEXT": lambda self: self._parse_next_value_for(), 603 } 604 605 # The DCOLON (::) operator serves as a scope resolution (exp.ScopeResolution) operator in T-SQL 606 COLUMN_OPERATORS = { 607 **parser.Parser.COLUMN_OPERATORS, 608 TokenType.DCOLON: lambda self, this, to: self.expression(exp.Cast, this=this, to=to) 609 if isinstance(to, exp.DataType) and to.this != exp.DataType.Type.USERDEFINED 610 else self.expression(exp.ScopeResolution, this=this, expression=to), 611 } 612 613 def _parse_dcolon(self) -> t.Optional[exp.Expression]: 614 # We want to use _parse_types() if the first token after :: is a known type, 615 # otherwise we could parse something like x::varchar(max) into a function 616 if self._match_set(self.TYPE_TOKENS, advance=False): 617 return self._parse_types() 618 619 return self._parse_function() or self._parse_types() 620 621 def _parse_options(self) -> t.Optional[t.List[exp.Expression]]: 622 if not self._match(TokenType.OPTION): 623 return None 624 625 def _parse_option() -> t.Optional[exp.Expression]: 626 option = self._parse_var_from_options(OPTIONS) 627 if not option: 628 return None 629 630 self._match(TokenType.EQ) 631 return self.expression( 632 exp.QueryOption, this=option, expression=self._parse_primary_or_var() 633 ) 634 635 return self._parse_wrapped_csv(_parse_option) 636 637 def _parse_projections(self) -> t.List[exp.Expression]: 638 """ 639 T-SQL supports the syntax alias = expression in the SELECT's projection list, 640 so we transform all parsed Selects to convert their EQ projections into Aliases. 641 642 See: https://learn.microsoft.com/en-us/sql/t-sql/queries/select-clause-transact-sql?view=sql-server-ver16#syntax 643 """ 644 return [ 645 ( 646 exp.alias_(projection.expression, projection.this.this, copy=False) 647 if isinstance(projection, exp.EQ) and isinstance(projection.this, exp.Column) 648 else projection 649 ) 650 for projection in super()._parse_projections() 651 ] 652 653 def _parse_commit_or_rollback(self) -> exp.Commit | exp.Rollback: 654 """Applies to SQL Server and Azure SQL Database 655 COMMIT [ { TRAN | TRANSACTION } 656 [ transaction_name | @tran_name_variable ] ] 657 [ WITH ( DELAYED_DURABILITY = { OFF | ON } ) ] 658 659 ROLLBACK { TRAN | TRANSACTION } 660 [ transaction_name | @tran_name_variable 661 | savepoint_name | @savepoint_variable ] 662 """ 663 rollback = self._prev.token_type == TokenType.ROLLBACK 664 665 self._match_texts(("TRAN", "TRANSACTION")) 666 this = self._parse_id_var() 667 668 if rollback: 669 return self.expression(exp.Rollback, this=this) 670 671 durability = None 672 if self._match_pair(TokenType.WITH, TokenType.L_PAREN): 673 self._match_text_seq("DELAYED_DURABILITY") 674 self._match(TokenType.EQ) 675 676 if self._match_text_seq("OFF"): 677 durability = False 678 else: 679 self._match(TokenType.ON) 680 durability = True 681 682 self._match_r_paren() 683 684 return self.expression(exp.Commit, this=this, durability=durability) 685 686 def _parse_transaction(self) -> exp.Transaction | exp.Command: 687 """Applies to SQL Server and Azure SQL Database 688 BEGIN { TRAN | TRANSACTION } 689 [ { transaction_name | @tran_name_variable } 690 [ WITH MARK [ 'description' ] ] 691 ] 692 """ 693 if self._match_texts(("TRAN", "TRANSACTION")): 694 transaction = self.expression(exp.Transaction, this=self._parse_id_var()) 695 if self._match_text_seq("WITH", "MARK"): 696 transaction.set("mark", self._parse_string()) 697 698 return transaction 699 700 return self._parse_as_command(self._prev) 701 702 def _parse_returns(self) -> exp.ReturnsProperty: 703 table = self._parse_id_var(any_token=False, tokens=self.RETURNS_TABLE_TOKENS) 704 returns = super()._parse_returns() 705 returns.set("table", table) 706 return returns 707 708 def _parse_convert( 709 self, strict: bool, safe: t.Optional[bool] = None 710 ) -> t.Optional[exp.Expression]: 711 this = self._parse_types() 712 self._match(TokenType.COMMA) 713 args = [this, *self._parse_csv(self._parse_assignment)] 714 convert = exp.Convert.from_arg_list(args) 715 convert.set("safe", safe) 716 convert.set("strict", strict) 717 return convert 718 719 def _parse_user_defined_function( 720 self, kind: t.Optional[TokenType] = None 721 ) -> t.Optional[exp.Expression]: 722 this = super()._parse_user_defined_function(kind=kind) 723 724 if ( 725 kind == TokenType.FUNCTION 726 or isinstance(this, exp.UserDefinedFunction) 727 or self._match(TokenType.ALIAS, advance=False) 728 ): 729 return this 730 731 if not self._match(TokenType.WITH, advance=False): 732 expressions = self._parse_csv(self._parse_function_parameter) 733 else: 734 expressions = None 735 736 return self.expression(exp.UserDefinedFunction, this=this, expressions=expressions) 737 738 def _parse_id_var( 739 self, 740 any_token: bool = True, 741 tokens: t.Optional[t.Collection[TokenType]] = None, 742 ) -> t.Optional[exp.Expression]: 743 is_temporary = self._match(TokenType.HASH) 744 is_global = is_temporary and self._match(TokenType.HASH) 745 746 this = super()._parse_id_var(any_token=any_token, tokens=tokens) 747 if this: 748 if is_global: 749 this.set("global", True) 750 elif is_temporary: 751 this.set("temporary", True) 752 753 return this 754 755 def _parse_create(self) -> exp.Create | exp.Command: 756 create = super()._parse_create() 757 758 if isinstance(create, exp.Create): 759 table = create.this.this if isinstance(create.this, exp.Schema) else create.this 760 if isinstance(table, exp.Table) and table.this.args.get("temporary"): 761 if not create.args.get("properties"): 762 create.set("properties", exp.Properties(expressions=[])) 763 764 create.args["properties"].append("expressions", exp.TemporaryProperty()) 765 766 return create 767 768 def _parse_if(self) -> t.Optional[exp.Expression]: 769 index = self._index 770 771 if self._match_text_seq("OBJECT_ID"): 772 self._parse_wrapped_csv(self._parse_string) 773 if self._match_text_seq("IS", "NOT", "NULL") and self._match(TokenType.DROP): 774 return self._parse_drop(exists=True) 775 self._retreat(index) 776 777 return super()._parse_if() 778 779 def _parse_unique(self) -> exp.UniqueColumnConstraint: 780 if self._match_texts(("CLUSTERED", "NONCLUSTERED")): 781 this = self.CONSTRAINT_PARSERS[self._prev.text.upper()](self) 782 else: 783 this = self._parse_schema(self._parse_id_var(any_token=False)) 784 785 return self.expression(exp.UniqueColumnConstraint, this=this) 786 787 def _parse_partition(self) -> t.Optional[exp.Partition]: 788 if not self._match_text_seq("WITH", "(", "PARTITIONS"): 789 return None 790 791 def parse_range(): 792 low = self._parse_bitwise() 793 high = self._parse_bitwise() if self._match_text_seq("TO") else None 794 795 return ( 796 self.expression(exp.PartitionRange, this=low, expression=high) if high else low 797 ) 798 799 partition = self.expression( 800 exp.Partition, expressions=self._parse_wrapped_csv(parse_range) 801 ) 802 803 self._match_r_paren() 804 805 return partition 806 807 def _parse_declare(self) -> exp.Declare | exp.Command: 808 index = self._index 809 expressions = self._try_parse(partial(self._parse_csv, self._parse_declareitem)) 810 811 if not expressions or self._curr: 812 self._retreat(index) 813 return self._parse_as_command(self._prev) 814 815 return self.expression(exp.Declare, expressions=expressions) 816 817 def _parse_declareitem(self) -> t.Optional[exp.DeclareItem]: 818 var = self._parse_id_var() 819 if not var: 820 return None 821 822 value = None 823 self._match(TokenType.ALIAS) 824 if self._match(TokenType.TABLE): 825 data_type = self._parse_schema() 826 else: 827 data_type = self._parse_types() 828 if self._match(TokenType.EQ): 829 value = self._parse_bitwise() 830 831 return self.expression(exp.DeclareItem, this=var, kind=data_type, default=value) 832 833 class Generator(generator.Generator): 834 LIMIT_IS_TOP = True 835 QUERY_HINTS = False 836 RETURNING_END = False 837 NVL2_SUPPORTED = False 838 ALTER_TABLE_INCLUDE_COLUMN_KEYWORD = False 839 LIMIT_FETCH = "FETCH" 840 COMPUTED_COLUMN_WITH_TYPE = False 841 CTE_RECURSIVE_KEYWORD_REQUIRED = False 842 ENSURE_BOOLS = True 843 NULL_ORDERING_SUPPORTED = None 844 SUPPORTS_SINGLE_ARG_CONCAT = False 845 TABLESAMPLE_SEED_KEYWORD = "REPEATABLE" 846 SUPPORTS_SELECT_INTO = True 847 JSON_PATH_BRACKETED_KEY_SUPPORTED = False 848 SUPPORTS_TO_NUMBER = False 849 SET_OP_MODIFIERS = False 850 COPY_PARAMS_EQ_REQUIRED = True 851 PARSE_JSON_NAME = None 852 EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE = False 853 854 EXPRESSIONS_WITHOUT_NESTED_CTES = { 855 exp.Create, 856 exp.Delete, 857 exp.Insert, 858 exp.Intersect, 859 exp.Except, 860 exp.Merge, 861 exp.Select, 862 exp.Subquery, 863 exp.Union, 864 exp.Update, 865 } 866 867 SUPPORTED_JSON_PATH_PARTS = { 868 exp.JSONPathKey, 869 exp.JSONPathRoot, 870 exp.JSONPathSubscript, 871 } 872 873 TYPE_MAPPING = { 874 **generator.Generator.TYPE_MAPPING, 875 exp.DataType.Type.BOOLEAN: "BIT", 876 exp.DataType.Type.DECIMAL: "NUMERIC", 877 exp.DataType.Type.DATETIME: "DATETIME2", 878 exp.DataType.Type.DOUBLE: "FLOAT", 879 exp.DataType.Type.INT: "INTEGER", 880 exp.DataType.Type.ROWVERSION: "ROWVERSION", 881 exp.DataType.Type.TEXT: "VARCHAR(MAX)", 882 exp.DataType.Type.TIMESTAMP: "DATETIME2", 883 exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET", 884 exp.DataType.Type.UTINYINT: "TINYINT", 885 exp.DataType.Type.VARIANT: "SQL_VARIANT", 886 } 887 888 TYPE_MAPPING.pop(exp.DataType.Type.NCHAR) 889 TYPE_MAPPING.pop(exp.DataType.Type.NVARCHAR) 890 891 TRANSFORMS = { 892 **generator.Generator.TRANSFORMS, 893 exp.AnyValue: any_value_to_max_sql, 894 exp.ArrayToString: rename_func("STRING_AGG"), 895 exp.AutoIncrementColumnConstraint: lambda *_: "IDENTITY", 896 exp.DateAdd: date_delta_sql("DATEADD"), 897 exp.DateDiff: date_delta_sql("DATEDIFF"), 898 exp.CTE: transforms.preprocess([qualify_derived_table_outputs]), 899 exp.CurrentDate: rename_func("GETDATE"), 900 exp.CurrentTimestamp: rename_func("GETDATE"), 901 exp.DateStrToDate: datestrtodate_sql, 902 exp.Extract: rename_func("DATEPART"), 903 exp.GeneratedAsIdentityColumnConstraint: generatedasidentitycolumnconstraint_sql, 904 exp.GroupConcat: _string_agg_sql, 905 exp.If: rename_func("IIF"), 906 exp.JSONExtract: _json_extract_sql, 907 exp.JSONExtractScalar: _json_extract_sql, 908 exp.LastDay: lambda self, e: self.func("EOMONTH", e.this), 909 exp.Ln: rename_func("LOG"), 910 exp.Max: max_or_greatest, 911 exp.MD5: lambda self, e: self.func("HASHBYTES", exp.Literal.string("MD5"), e.this), 912 exp.Min: min_or_least, 913 exp.NumberToStr: _format_sql, 914 exp.Repeat: rename_func("REPLICATE"), 915 exp.Select: transforms.preprocess( 916 [ 917 transforms.eliminate_distinct_on, 918 transforms.eliminate_semi_and_anti_joins, 919 transforms.eliminate_qualify, 920 transforms.unnest_generate_date_array_using_recursive_cte, 921 ] 922 ), 923 exp.Stddev: rename_func("STDEV"), 924 exp.StrPosition: lambda self, e: self.func( 925 "CHARINDEX", e.args.get("substr"), e.this, e.args.get("position") 926 ), 927 exp.Subquery: transforms.preprocess([qualify_derived_table_outputs]), 928 exp.SHA: lambda self, e: self.func("HASHBYTES", exp.Literal.string("SHA1"), e.this), 929 exp.SHA2: lambda self, e: self.func( 930 "HASHBYTES", exp.Literal.string(f"SHA2_{e.args.get('length', 256)}"), e.this 931 ), 932 exp.TemporaryProperty: lambda self, e: "", 933 exp.TimeStrToTime: _timestrtotime_sql, 934 exp.TimeToStr: _format_sql, 935 exp.Trim: trim_sql, 936 exp.TsOrDsAdd: date_delta_sql("DATEADD", cast=True), 937 exp.TsOrDsDiff: date_delta_sql("DATEDIFF"), 938 } 939 940 TRANSFORMS.pop(exp.ReturnsProperty) 941 942 PROPERTIES_LOCATION = { 943 **generator.Generator.PROPERTIES_LOCATION, 944 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 945 } 946 947 def scope_resolution(self, rhs: str, scope_name: str) -> str: 948 return f"{scope_name}::{rhs}" 949 950 def select_sql(self, expression: exp.Select) -> str: 951 if expression.args.get("offset"): 952 if not expression.args.get("order"): 953 # ORDER BY is required in order to use OFFSET in a query, so we use 954 # a noop order by, since we don't really care about the order. 955 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 956 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 957 958 limit = expression.args.get("limit") 959 if isinstance(limit, exp.Limit): 960 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 961 # we replace here because otherwise TOP would be generated in select_sql 962 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 963 964 return super().select_sql(expression) 965 966 def convert_sql(self, expression: exp.Convert) -> str: 967 name = "TRY_CONVERT" if expression.args.get("safe") else "CONVERT" 968 return self.func( 969 name, expression.this, expression.expression, expression.args.get("style") 970 ) 971 972 def queryoption_sql(self, expression: exp.QueryOption) -> str: 973 option = self.sql(expression, "this") 974 value = self.sql(expression, "expression") 975 if value: 976 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 977 return f"{option} {optional_equal_sign}{value}" 978 return option 979 980 def lateral_op(self, expression: exp.Lateral) -> str: 981 cross_apply = expression.args.get("cross_apply") 982 if cross_apply is True: 983 return "CROSS APPLY" 984 if cross_apply is False: 985 return "OUTER APPLY" 986 987 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 988 self.unsupported("LATERAL clause is not supported.") 989 return "LATERAL" 990 991 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 992 this = expression.this 993 split_count = len(this.name.split(".")) 994 delimiter = expression.args.get("delimiter") 995 part_index = expression.args.get("part_index") 996 997 if ( 998 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 999 or (delimiter and delimiter.name != ".") 1000 or not part_index 1001 or split_count > 4 1002 ): 1003 self.unsupported( 1004 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 1005 ) 1006 return "" 1007 1008 return self.func( 1009 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 1010 ) 1011 1012 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1013 nano = expression.args.get("nano") 1014 if nano is not None: 1015 nano.pop() 1016 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1017 1018 if expression.args.get("fractions") is None: 1019 expression.set("fractions", exp.Literal.number(0)) 1020 if expression.args.get("precision") is None: 1021 expression.set("precision", exp.Literal.number(0)) 1022 1023 return rename_func("TIMEFROMPARTS")(self, expression) 1024 1025 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1026 zone = expression.args.get("zone") 1027 if zone is not None: 1028 zone.pop() 1029 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1030 1031 nano = expression.args.get("nano") 1032 if nano is not None: 1033 nano.pop() 1034 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1035 1036 if expression.args.get("milli") is None: 1037 expression.set("milli", exp.Literal.number(0)) 1038 1039 return rename_func("DATETIMEFROMPARTS")(self, expression) 1040 1041 def setitem_sql(self, expression: exp.SetItem) -> str: 1042 this = expression.this 1043 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1044 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1045 return f"{self.sql(this.left)} {self.sql(this.right)}" 1046 1047 return super().setitem_sql(expression) 1048 1049 def boolean_sql(self, expression: exp.Boolean) -> str: 1050 if type(expression.parent) in BIT_TYPES or isinstance( 1051 expression.find_ancestor(exp.Values, exp.Select), exp.Values 1052 ): 1053 return "1" if expression.this else "0" 1054 1055 return "(1 = 1)" if expression.this else "(1 = 0)" 1056 1057 def is_sql(self, expression: exp.Is) -> str: 1058 if isinstance(expression.expression, exp.Boolean): 1059 return self.binary(expression, "=") 1060 return self.binary(expression, "IS") 1061 1062 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1063 sql = self.sql(expression, "this") 1064 properties = expression.args.get("properties") 1065 1066 if sql[:1] != "#" and any( 1067 isinstance(prop, exp.TemporaryProperty) 1068 for prop in (properties.expressions if properties else []) 1069 ): 1070 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1071 1072 return sql 1073 1074 def create_sql(self, expression: exp.Create) -> str: 1075 kind = expression.kind 1076 exists = expression.args.pop("exists", None) 1077 1078 like_property = expression.find(exp.LikeProperty) 1079 if like_property: 1080 ctas_expression = like_property.this 1081 else: 1082 ctas_expression = expression.expression 1083 1084 if kind == "VIEW": 1085 expression.this.set("catalog", None) 1086 with_ = expression.args.get("with") 1087 if ctas_expression and with_: 1088 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1089 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1090 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1091 ctas_expression.set("with", with_.pop()) 1092 1093 sql = super().create_sql(expression) 1094 1095 table = expression.find(exp.Table) 1096 1097 # Convert CTAS statement to SELECT .. INTO .. 1098 if kind == "TABLE" and ctas_expression: 1099 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1100 ctas_expression = ctas_expression.subquery() 1101 1102 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1103 select_into.set("into", exp.Into(this=table)) 1104 1105 if like_property: 1106 select_into.limit(0, copy=False) 1107 1108 sql = self.sql(select_into) 1109 1110 if exists: 1111 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1112 sql_with_ctes = self.prepend_ctes(expression, sql) 1113 sql_literal = self.sql(exp.Literal.string(sql_with_ctes)) 1114 if kind == "SCHEMA": 1115 return f"""IF NOT EXISTS (SELECT * FROM information_schema.schemata WHERE schema_name = {identifier}) EXEC({sql_literal})""" 1116 elif kind == "TABLE": 1117 assert table 1118 where = exp.and_( 1119 exp.column("table_name").eq(table.name), 1120 exp.column("table_schema").eq(table.db) if table.db else None, 1121 exp.column("table_catalog").eq(table.catalog) if table.catalog else None, 1122 ) 1123 return f"""IF NOT EXISTS (SELECT * FROM information_schema.tables WHERE {where}) EXEC({sql_literal})""" 1124 elif kind == "INDEX": 1125 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1126 return f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql_literal})""" 1127 elif expression.args.get("replace"): 1128 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1129 1130 return self.prepend_ctes(expression, sql) 1131 1132 def count_sql(self, expression: exp.Count) -> str: 1133 func_name = "COUNT_BIG" if expression.args.get("big_int") else "COUNT" 1134 return rename_func(func_name)(self, expression) 1135 1136 def offset_sql(self, expression: exp.Offset) -> str: 1137 return f"{super().offset_sql(expression)} ROWS" 1138 1139 def version_sql(self, expression: exp.Version) -> str: 1140 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1141 this = f"FOR {name}" 1142 expr = expression.expression 1143 kind = expression.text("kind") 1144 if kind in ("FROM", "BETWEEN"): 1145 args = expr.expressions 1146 sep = "TO" if kind == "FROM" else "AND" 1147 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1148 else: 1149 expr_sql = self.sql(expr) 1150 1151 expr_sql = f" {expr_sql}" if expr_sql else "" 1152 return f"{this} {kind}{expr_sql}" 1153 1154 def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str: 1155 table = expression.args.get("table") 1156 table = f"{table} " if table else "" 1157 return f"RETURNS {table}{self.sql(expression, 'this')}" 1158 1159 def returning_sql(self, expression: exp.Returning) -> str: 1160 into = self.sql(expression, "into") 1161 into = self.seg(f"INTO {into}") if into else "" 1162 return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}" 1163 1164 def transaction_sql(self, expression: exp.Transaction) -> str: 1165 this = self.sql(expression, "this") 1166 this = f" {this}" if this else "" 1167 mark = self.sql(expression, "mark") 1168 mark = f" WITH MARK {mark}" if mark else "" 1169 return f"BEGIN TRANSACTION{this}{mark}" 1170 1171 def commit_sql(self, expression: exp.Commit) -> str: 1172 this = self.sql(expression, "this") 1173 this = f" {this}" if this else "" 1174 durability = expression.args.get("durability") 1175 durability = ( 1176 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1177 if durability is not None 1178 else "" 1179 ) 1180 return f"COMMIT TRANSACTION{this}{durability}" 1181 1182 def rollback_sql(self, expression: exp.Rollback) -> str: 1183 this = self.sql(expression, "this") 1184 this = f" {this}" if this else "" 1185 return f"ROLLBACK TRANSACTION{this}" 1186 1187 def identifier_sql(self, expression: exp.Identifier) -> str: 1188 identifier = super().identifier_sql(expression) 1189 1190 if expression.args.get("global"): 1191 identifier = f"##{identifier}" 1192 elif expression.args.get("temporary"): 1193 identifier = f"#{identifier}" 1194 1195 return identifier 1196 1197 def constraint_sql(self, expression: exp.Constraint) -> str: 1198 this = self.sql(expression, "this") 1199 expressions = self.expressions(expression, flat=True, sep=" ") 1200 return f"CONSTRAINT {this} {expressions}" 1201 1202 def length_sql(self, expression: exp.Length) -> str: 1203 return self._uncast_text(expression, "LEN") 1204 1205 def right_sql(self, expression: exp.Right) -> str: 1206 return self._uncast_text(expression, "RIGHT") 1207 1208 def left_sql(self, expression: exp.Left) -> str: 1209 return self._uncast_text(expression, "LEFT") 1210 1211 def _uncast_text(self, expression: exp.Expression, name: str) -> str: 1212 this = expression.this 1213 if isinstance(this, exp.Cast) and this.is_type(exp.DataType.Type.TEXT): 1214 this_sql = self.sql(this, "this") 1215 else: 1216 this_sql = self.sql(this) 1217 expression_sql = self.sql(expression, "expression") 1218 return self.func(name, this_sql, expression_sql if expression_sql else None) 1219 1220 def partition_sql(self, expression: exp.Partition) -> str: 1221 return f"WITH (PARTITIONS({self.expressions(expression, flat=True)}))" 1222 1223 def alter_sql(self, expression: exp.Alter) -> str: 1224 action = seq_get(expression.args.get("actions") or [], 0) 1225 if isinstance(action, exp.AlterRename): 1226 return f"EXEC sp_rename '{self.sql(expression.this)}', '{action.this.name}'" 1227 return super().alter_sql(expression) 1228 1229 def drop_sql(self, expression: exp.Drop) -> str: 1230 if expression.args["kind"] == "VIEW": 1231 expression.this.set("catalog", None) 1232 return super().drop_sql(expression) 1233 1234 def declare_sql(self, expression: exp.Declare) -> str: 1235 return f"DECLARE {self.expressions(expression, flat=True)}" 1236 1237 def declareitem_sql(self, expression: exp.DeclareItem) -> str: 1238 variable = self.sql(expression, "this") 1239 default = self.sql(expression, "default") 1240 default = f" = {default}" if default else "" 1241 1242 kind = self.sql(expression, "kind") 1243 if isinstance(expression.args.get("kind"), exp.Schema): 1244 kind = f"TABLE {kind}" 1245 1246 return f"{variable} AS {kind}{default}" 1247 1248 def options_modifier(self, expression: exp.Expression) -> str: 1249 options = self.expressions(expression, key="options") 1250 return f" OPTION{self.wrap(options)}" if options else "" 1251 1252 def dpipe_sql(self, expression: exp.DPipe) -> str: 1253 return self.sql( 1254 reduce(lambda x, y: exp.Add(this=x, expression=y), expression.flatten()) 1255 )
252def qualify_derived_table_outputs(expression: exp.Expression) -> exp.Expression: 253 """Ensures all (unnamed) output columns are aliased for CTEs and Subqueries.""" 254 alias = expression.args.get("alias") 255 256 if ( 257 isinstance(expression, (exp.CTE, exp.Subquery)) 258 and isinstance(alias, exp.TableAlias) 259 and not alias.columns 260 ): 261 from sqlglot.optimizer.qualify_columns import qualify_outputs 262 263 # We keep track of the unaliased column projection indexes instead of the expressions 264 # themselves, because the latter are going to be replaced by new nodes when the aliases 265 # are added and hence we won't be able to reach these newly added Alias parents 266 query = expression.this 267 unaliased_column_indexes = ( 268 i for i, c in enumerate(query.selects) if isinstance(c, exp.Column) and not c.alias 269 ) 270 271 qualify_outputs(query) 272 273 # Preserve the quoting information of columns for newly added Alias nodes 274 query_selects = query.selects 275 for select_index in unaliased_column_indexes: 276 alias = query_selects[select_index] 277 column = alias.this 278 if isinstance(column.this, exp.Identifier): 279 alias.args["alias"].set("quoted", column.this.quoted) 280 281 return expression
Ensures all (unnamed) output columns are aliased for CTEs and Subqueries.
374class TSQL(Dialect): 375 SUPPORTS_SEMI_ANTI_JOIN = False 376 LOG_BASE_FIRST = False 377 TYPED_DIVISION = True 378 CONCAT_COALESCE = True 379 NORMALIZATION_STRATEGY = NormalizationStrategy.CASE_INSENSITIVE 380 381 TIME_FORMAT = "'yyyy-mm-dd hh:mm:ss'" 382 383 TIME_MAPPING = { 384 "year": "%Y", 385 "dayofyear": "%j", 386 "day": "%d", 387 "dy": "%d", 388 "y": "%Y", 389 "week": "%W", 390 "ww": "%W", 391 "wk": "%W", 392 "hour": "%h", 393 "hh": "%I", 394 "minute": "%M", 395 "mi": "%M", 396 "n": "%M", 397 "second": "%S", 398 "ss": "%S", 399 "s": "%-S", 400 "millisecond": "%f", 401 "ms": "%f", 402 "weekday": "%w", 403 "dw": "%w", 404 "month": "%m", 405 "mm": "%M", 406 "m": "%-M", 407 "Y": "%Y", 408 "YYYY": "%Y", 409 "YY": "%y", 410 "MMMM": "%B", 411 "MMM": "%b", 412 "MM": "%m", 413 "M": "%-m", 414 "dddd": "%A", 415 "dd": "%d", 416 "d": "%-d", 417 "HH": "%H", 418 "H": "%-H", 419 "h": "%-I", 420 "ffffff": "%f", 421 "yyyy": "%Y", 422 "yy": "%y", 423 } 424 425 CONVERT_FORMAT_MAPPING = { 426 "0": "%b %d %Y %-I:%M%p", 427 "1": "%m/%d/%y", 428 "2": "%y.%m.%d", 429 "3": "%d/%m/%y", 430 "4": "%d.%m.%y", 431 "5": "%d-%m-%y", 432 "6": "%d %b %y", 433 "7": "%b %d, %y", 434 "8": "%H:%M:%S", 435 "9": "%b %d %Y %-I:%M:%S:%f%p", 436 "10": "mm-dd-yy", 437 "11": "yy/mm/dd", 438 "12": "yymmdd", 439 "13": "%d %b %Y %H:%M:ss:%f", 440 "14": "%H:%M:%S:%f", 441 "20": "%Y-%m-%d %H:%M:%S", 442 "21": "%Y-%m-%d %H:%M:%S.%f", 443 "22": "%m/%d/%y %-I:%M:%S %p", 444 "23": "%Y-%m-%d", 445 "24": "%H:%M:%S", 446 "25": "%Y-%m-%d %H:%M:%S.%f", 447 "100": "%b %d %Y %-I:%M%p", 448 "101": "%m/%d/%Y", 449 "102": "%Y.%m.%d", 450 "103": "%d/%m/%Y", 451 "104": "%d.%m.%Y", 452 "105": "%d-%m-%Y", 453 "106": "%d %b %Y", 454 "107": "%b %d, %Y", 455 "108": "%H:%M:%S", 456 "109": "%b %d %Y %-I:%M:%S:%f%p", 457 "110": "%m-%d-%Y", 458 "111": "%Y/%m/%d", 459 "112": "%Y%m%d", 460 "113": "%d %b %Y %H:%M:%S:%f", 461 "114": "%H:%M:%S:%f", 462 "120": "%Y-%m-%d %H:%M:%S", 463 "121": "%Y-%m-%d %H:%M:%S.%f", 464 } 465 466 FORMAT_TIME_MAPPING = { 467 "y": "%B %Y", 468 "d": "%m/%d/%Y", 469 "H": "%-H", 470 "h": "%-I", 471 "s": "%Y-%m-%d %H:%M:%S", 472 "D": "%A,%B,%Y", 473 "f": "%A,%B,%Y %-I:%M %p", 474 "F": "%A,%B,%Y %-I:%M:%S %p", 475 "g": "%m/%d/%Y %-I:%M %p", 476 "G": "%m/%d/%Y %-I:%M:%S %p", 477 "M": "%B %-d", 478 "m": "%B %-d", 479 "O": "%Y-%m-%dT%H:%M:%S", 480 "u": "%Y-%M-%D %H:%M:%S%z", 481 "U": "%A, %B %D, %Y %H:%M:%S%z", 482 "T": "%-I:%M:%S %p", 483 "t": "%-I:%M", 484 "Y": "%a %Y", 485 } 486 487 class Tokenizer(tokens.Tokenizer): 488 IDENTIFIERS = [("[", "]"), '"'] 489 QUOTES = ["'", '"'] 490 HEX_STRINGS = [("0x", ""), ("0X", "")] 491 VAR_SINGLE_TOKENS = {"@", "$", "#"} 492 493 KEYWORDS = { 494 **tokens.Tokenizer.KEYWORDS, 495 "CLUSTERED INDEX": TokenType.INDEX, 496 "DATETIME2": TokenType.DATETIME, 497 "DATETIMEOFFSET": TokenType.TIMESTAMPTZ, 498 "DECLARE": TokenType.DECLARE, 499 "EXEC": TokenType.COMMAND, 500 "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT, 501 "IMAGE": TokenType.IMAGE, 502 "MONEY": TokenType.MONEY, 503 "NONCLUSTERED INDEX": TokenType.INDEX, 504 "NTEXT": TokenType.TEXT, 505 "OPTION": TokenType.OPTION, 506 "OUTPUT": TokenType.RETURNING, 507 "PRINT": TokenType.COMMAND, 508 "PROC": TokenType.PROCEDURE, 509 "REAL": TokenType.FLOAT, 510 "ROWVERSION": TokenType.ROWVERSION, 511 "SMALLDATETIME": TokenType.DATETIME, 512 "SMALLMONEY": TokenType.SMALLMONEY, 513 "SQL_VARIANT": TokenType.VARIANT, 514 "SYSTEM_USER": TokenType.CURRENT_USER, 515 "TOP": TokenType.TOP, 516 "TIMESTAMP": TokenType.ROWVERSION, 517 "TINYINT": TokenType.UTINYINT, 518 "UNIQUEIDENTIFIER": TokenType.UNIQUEIDENTIFIER, 519 "UPDATE STATISTICS": TokenType.COMMAND, 520 "XML": TokenType.XML, 521 } 522 KEYWORDS.pop("/*+") 523 524 COMMANDS = {*tokens.Tokenizer.COMMANDS, TokenType.END} 525 526 class Parser(parser.Parser): 527 SET_REQUIRES_ASSIGNMENT_DELIMITER = False 528 LOG_DEFAULTS_TO_LN = True 529 ALTER_TABLE_ADD_REQUIRED_FOR_EACH_COLUMN = False 530 STRING_ALIASES = True 531 NO_PAREN_IF_COMMANDS = False 532 533 QUERY_MODIFIER_PARSERS = { 534 **parser.Parser.QUERY_MODIFIER_PARSERS, 535 TokenType.OPTION: lambda self: ("options", self._parse_options()), 536 } 537 538 FUNCTIONS = { 539 **parser.Parser.FUNCTIONS, 540 "CHARINDEX": lambda args: exp.StrPosition( 541 this=seq_get(args, 1), 542 substr=seq_get(args, 0), 543 position=seq_get(args, 2), 544 ), 545 "COUNT": lambda args: exp.Count( 546 this=seq_get(args, 0), expressions=args[1:], big_int=False 547 ), 548 "COUNT_BIG": lambda args: exp.Count( 549 this=seq_get(args, 0), expressions=args[1:], big_int=True 550 ), 551 "DATEADD": build_date_delta(exp.DateAdd, unit_mapping=DATE_DELTA_INTERVAL), 552 "DATEDIFF": _build_date_delta(exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL), 553 "DATENAME": _build_formatted_time(exp.TimeToStr, full_format_mapping=True), 554 "DATEPART": _build_formatted_time(exp.TimeToStr), 555 "DATETIMEFROMPARTS": _build_datetimefromparts, 556 "EOMONTH": _build_eomonth, 557 "FORMAT": _build_format, 558 "GETDATE": exp.CurrentTimestamp.from_arg_list, 559 "HASHBYTES": _build_hashbytes, 560 "ISNULL": build_coalesce, 561 "JSON_QUERY": _build_json_query, 562 "JSON_VALUE": parser.build_extract_json_with_path(exp.JSONExtractScalar), 563 "LEN": _build_with_arg_as_text(exp.Length), 564 "LEFT": _build_with_arg_as_text(exp.Left), 565 "RIGHT": _build_with_arg_as_text(exp.Right), 566 "PARSENAME": _build_parsename, 567 "REPLICATE": exp.Repeat.from_arg_list, 568 "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)), 569 "SYSDATETIME": exp.CurrentTimestamp.from_arg_list, 570 "SUSER_NAME": exp.CurrentUser.from_arg_list, 571 "SUSER_SNAME": exp.CurrentUser.from_arg_list, 572 "SYSTEM_USER": exp.CurrentUser.from_arg_list, 573 "TIMEFROMPARTS": _build_timefromparts, 574 } 575 576 JOIN_HINTS = {"LOOP", "HASH", "MERGE", "REMOTE"} 577 578 PROCEDURE_OPTIONS = dict.fromkeys( 579 ("ENCRYPTION", "RECOMPILE", "SCHEMABINDING", "NATIVE_COMPILATION", "EXECUTE"), tuple() 580 ) 581 582 RETURNS_TABLE_TOKENS = parser.Parser.ID_VAR_TOKENS - { 583 TokenType.TABLE, 584 *parser.Parser.TYPE_TOKENS, 585 } 586 587 STATEMENT_PARSERS = { 588 **parser.Parser.STATEMENT_PARSERS, 589 TokenType.DECLARE: lambda self: self._parse_declare(), 590 } 591 592 RANGE_PARSERS = { 593 **parser.Parser.RANGE_PARSERS, 594 TokenType.DCOLON: lambda self, this: self.expression( 595 exp.ScopeResolution, 596 this=this, 597 expression=self._parse_function() or self._parse_var(any_token=True), 598 ), 599 } 600 601 NO_PAREN_FUNCTION_PARSERS = { 602 **parser.Parser.NO_PAREN_FUNCTION_PARSERS, 603 "NEXT": lambda self: self._parse_next_value_for(), 604 } 605 606 # The DCOLON (::) operator serves as a scope resolution (exp.ScopeResolution) operator in T-SQL 607 COLUMN_OPERATORS = { 608 **parser.Parser.COLUMN_OPERATORS, 609 TokenType.DCOLON: lambda self, this, to: self.expression(exp.Cast, this=this, to=to) 610 if isinstance(to, exp.DataType) and to.this != exp.DataType.Type.USERDEFINED 611 else self.expression(exp.ScopeResolution, this=this, expression=to), 612 } 613 614 def _parse_dcolon(self) -> t.Optional[exp.Expression]: 615 # We want to use _parse_types() if the first token after :: is a known type, 616 # otherwise we could parse something like x::varchar(max) into a function 617 if self._match_set(self.TYPE_TOKENS, advance=False): 618 return self._parse_types() 619 620 return self._parse_function() or self._parse_types() 621 622 def _parse_options(self) -> t.Optional[t.List[exp.Expression]]: 623 if not self._match(TokenType.OPTION): 624 return None 625 626 def _parse_option() -> t.Optional[exp.Expression]: 627 option = self._parse_var_from_options(OPTIONS) 628 if not option: 629 return None 630 631 self._match(TokenType.EQ) 632 return self.expression( 633 exp.QueryOption, this=option, expression=self._parse_primary_or_var() 634 ) 635 636 return self._parse_wrapped_csv(_parse_option) 637 638 def _parse_projections(self) -> t.List[exp.Expression]: 639 """ 640 T-SQL supports the syntax alias = expression in the SELECT's projection list, 641 so we transform all parsed Selects to convert their EQ projections into Aliases. 642 643 See: https://learn.microsoft.com/en-us/sql/t-sql/queries/select-clause-transact-sql?view=sql-server-ver16#syntax 644 """ 645 return [ 646 ( 647 exp.alias_(projection.expression, projection.this.this, copy=False) 648 if isinstance(projection, exp.EQ) and isinstance(projection.this, exp.Column) 649 else projection 650 ) 651 for projection in super()._parse_projections() 652 ] 653 654 def _parse_commit_or_rollback(self) -> exp.Commit | exp.Rollback: 655 """Applies to SQL Server and Azure SQL Database 656 COMMIT [ { TRAN | TRANSACTION } 657 [ transaction_name | @tran_name_variable ] ] 658 [ WITH ( DELAYED_DURABILITY = { OFF | ON } ) ] 659 660 ROLLBACK { TRAN | TRANSACTION } 661 [ transaction_name | @tran_name_variable 662 | savepoint_name | @savepoint_variable ] 663 """ 664 rollback = self._prev.token_type == TokenType.ROLLBACK 665 666 self._match_texts(("TRAN", "TRANSACTION")) 667 this = self._parse_id_var() 668 669 if rollback: 670 return self.expression(exp.Rollback, this=this) 671 672 durability = None 673 if self._match_pair(TokenType.WITH, TokenType.L_PAREN): 674 self._match_text_seq("DELAYED_DURABILITY") 675 self._match(TokenType.EQ) 676 677 if self._match_text_seq("OFF"): 678 durability = False 679 else: 680 self._match(TokenType.ON) 681 durability = True 682 683 self._match_r_paren() 684 685 return self.expression(exp.Commit, this=this, durability=durability) 686 687 def _parse_transaction(self) -> exp.Transaction | exp.Command: 688 """Applies to SQL Server and Azure SQL Database 689 BEGIN { TRAN | TRANSACTION } 690 [ { transaction_name | @tran_name_variable } 691 [ WITH MARK [ 'description' ] ] 692 ] 693 """ 694 if self._match_texts(("TRAN", "TRANSACTION")): 695 transaction = self.expression(exp.Transaction, this=self._parse_id_var()) 696 if self._match_text_seq("WITH", "MARK"): 697 transaction.set("mark", self._parse_string()) 698 699 return transaction 700 701 return self._parse_as_command(self._prev) 702 703 def _parse_returns(self) -> exp.ReturnsProperty: 704 table = self._parse_id_var(any_token=False, tokens=self.RETURNS_TABLE_TOKENS) 705 returns = super()._parse_returns() 706 returns.set("table", table) 707 return returns 708 709 def _parse_convert( 710 self, strict: bool, safe: t.Optional[bool] = None 711 ) -> t.Optional[exp.Expression]: 712 this = self._parse_types() 713 self._match(TokenType.COMMA) 714 args = [this, *self._parse_csv(self._parse_assignment)] 715 convert = exp.Convert.from_arg_list(args) 716 convert.set("safe", safe) 717 convert.set("strict", strict) 718 return convert 719 720 def _parse_user_defined_function( 721 self, kind: t.Optional[TokenType] = None 722 ) -> t.Optional[exp.Expression]: 723 this = super()._parse_user_defined_function(kind=kind) 724 725 if ( 726 kind == TokenType.FUNCTION 727 or isinstance(this, exp.UserDefinedFunction) 728 or self._match(TokenType.ALIAS, advance=False) 729 ): 730 return this 731 732 if not self._match(TokenType.WITH, advance=False): 733 expressions = self._parse_csv(self._parse_function_parameter) 734 else: 735 expressions = None 736 737 return self.expression(exp.UserDefinedFunction, this=this, expressions=expressions) 738 739 def _parse_id_var( 740 self, 741 any_token: bool = True, 742 tokens: t.Optional[t.Collection[TokenType]] = None, 743 ) -> t.Optional[exp.Expression]: 744 is_temporary = self._match(TokenType.HASH) 745 is_global = is_temporary and self._match(TokenType.HASH) 746 747 this = super()._parse_id_var(any_token=any_token, tokens=tokens) 748 if this: 749 if is_global: 750 this.set("global", True) 751 elif is_temporary: 752 this.set("temporary", True) 753 754 return this 755 756 def _parse_create(self) -> exp.Create | exp.Command: 757 create = super()._parse_create() 758 759 if isinstance(create, exp.Create): 760 table = create.this.this if isinstance(create.this, exp.Schema) else create.this 761 if isinstance(table, exp.Table) and table.this.args.get("temporary"): 762 if not create.args.get("properties"): 763 create.set("properties", exp.Properties(expressions=[])) 764 765 create.args["properties"].append("expressions", exp.TemporaryProperty()) 766 767 return create 768 769 def _parse_if(self) -> t.Optional[exp.Expression]: 770 index = self._index 771 772 if self._match_text_seq("OBJECT_ID"): 773 self._parse_wrapped_csv(self._parse_string) 774 if self._match_text_seq("IS", "NOT", "NULL") and self._match(TokenType.DROP): 775 return self._parse_drop(exists=True) 776 self._retreat(index) 777 778 return super()._parse_if() 779 780 def _parse_unique(self) -> exp.UniqueColumnConstraint: 781 if self._match_texts(("CLUSTERED", "NONCLUSTERED")): 782 this = self.CONSTRAINT_PARSERS[self._prev.text.upper()](self) 783 else: 784 this = self._parse_schema(self._parse_id_var(any_token=False)) 785 786 return self.expression(exp.UniqueColumnConstraint, this=this) 787 788 def _parse_partition(self) -> t.Optional[exp.Partition]: 789 if not self._match_text_seq("WITH", "(", "PARTITIONS"): 790 return None 791 792 def parse_range(): 793 low = self._parse_bitwise() 794 high = self._parse_bitwise() if self._match_text_seq("TO") else None 795 796 return ( 797 self.expression(exp.PartitionRange, this=low, expression=high) if high else low 798 ) 799 800 partition = self.expression( 801 exp.Partition, expressions=self._parse_wrapped_csv(parse_range) 802 ) 803 804 self._match_r_paren() 805 806 return partition 807 808 def _parse_declare(self) -> exp.Declare | exp.Command: 809 index = self._index 810 expressions = self._try_parse(partial(self._parse_csv, self._parse_declareitem)) 811 812 if not expressions or self._curr: 813 self._retreat(index) 814 return self._parse_as_command(self._prev) 815 816 return self.expression(exp.Declare, expressions=expressions) 817 818 def _parse_declareitem(self) -> t.Optional[exp.DeclareItem]: 819 var = self._parse_id_var() 820 if not var: 821 return None 822 823 value = None 824 self._match(TokenType.ALIAS) 825 if self._match(TokenType.TABLE): 826 data_type = self._parse_schema() 827 else: 828 data_type = self._parse_types() 829 if self._match(TokenType.EQ): 830 value = self._parse_bitwise() 831 832 return self.expression(exp.DeclareItem, this=var, kind=data_type, default=value) 833 834 class Generator(generator.Generator): 835 LIMIT_IS_TOP = True 836 QUERY_HINTS = False 837 RETURNING_END = False 838 NVL2_SUPPORTED = False 839 ALTER_TABLE_INCLUDE_COLUMN_KEYWORD = False 840 LIMIT_FETCH = "FETCH" 841 COMPUTED_COLUMN_WITH_TYPE = False 842 CTE_RECURSIVE_KEYWORD_REQUIRED = False 843 ENSURE_BOOLS = True 844 NULL_ORDERING_SUPPORTED = None 845 SUPPORTS_SINGLE_ARG_CONCAT = False 846 TABLESAMPLE_SEED_KEYWORD = "REPEATABLE" 847 SUPPORTS_SELECT_INTO = True 848 JSON_PATH_BRACKETED_KEY_SUPPORTED = False 849 SUPPORTS_TO_NUMBER = False 850 SET_OP_MODIFIERS = False 851 COPY_PARAMS_EQ_REQUIRED = True 852 PARSE_JSON_NAME = None 853 EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE = False 854 855 EXPRESSIONS_WITHOUT_NESTED_CTES = { 856 exp.Create, 857 exp.Delete, 858 exp.Insert, 859 exp.Intersect, 860 exp.Except, 861 exp.Merge, 862 exp.Select, 863 exp.Subquery, 864 exp.Union, 865 exp.Update, 866 } 867 868 SUPPORTED_JSON_PATH_PARTS = { 869 exp.JSONPathKey, 870 exp.JSONPathRoot, 871 exp.JSONPathSubscript, 872 } 873 874 TYPE_MAPPING = { 875 **generator.Generator.TYPE_MAPPING, 876 exp.DataType.Type.BOOLEAN: "BIT", 877 exp.DataType.Type.DECIMAL: "NUMERIC", 878 exp.DataType.Type.DATETIME: "DATETIME2", 879 exp.DataType.Type.DOUBLE: "FLOAT", 880 exp.DataType.Type.INT: "INTEGER", 881 exp.DataType.Type.ROWVERSION: "ROWVERSION", 882 exp.DataType.Type.TEXT: "VARCHAR(MAX)", 883 exp.DataType.Type.TIMESTAMP: "DATETIME2", 884 exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET", 885 exp.DataType.Type.UTINYINT: "TINYINT", 886 exp.DataType.Type.VARIANT: "SQL_VARIANT", 887 } 888 889 TYPE_MAPPING.pop(exp.DataType.Type.NCHAR) 890 TYPE_MAPPING.pop(exp.DataType.Type.NVARCHAR) 891 892 TRANSFORMS = { 893 **generator.Generator.TRANSFORMS, 894 exp.AnyValue: any_value_to_max_sql, 895 exp.ArrayToString: rename_func("STRING_AGG"), 896 exp.AutoIncrementColumnConstraint: lambda *_: "IDENTITY", 897 exp.DateAdd: date_delta_sql("DATEADD"), 898 exp.DateDiff: date_delta_sql("DATEDIFF"), 899 exp.CTE: transforms.preprocess([qualify_derived_table_outputs]), 900 exp.CurrentDate: rename_func("GETDATE"), 901 exp.CurrentTimestamp: rename_func("GETDATE"), 902 exp.DateStrToDate: datestrtodate_sql, 903 exp.Extract: rename_func("DATEPART"), 904 exp.GeneratedAsIdentityColumnConstraint: generatedasidentitycolumnconstraint_sql, 905 exp.GroupConcat: _string_agg_sql, 906 exp.If: rename_func("IIF"), 907 exp.JSONExtract: _json_extract_sql, 908 exp.JSONExtractScalar: _json_extract_sql, 909 exp.LastDay: lambda self, e: self.func("EOMONTH", e.this), 910 exp.Ln: rename_func("LOG"), 911 exp.Max: max_or_greatest, 912 exp.MD5: lambda self, e: self.func("HASHBYTES", exp.Literal.string("MD5"), e.this), 913 exp.Min: min_or_least, 914 exp.NumberToStr: _format_sql, 915 exp.Repeat: rename_func("REPLICATE"), 916 exp.Select: transforms.preprocess( 917 [ 918 transforms.eliminate_distinct_on, 919 transforms.eliminate_semi_and_anti_joins, 920 transforms.eliminate_qualify, 921 transforms.unnest_generate_date_array_using_recursive_cte, 922 ] 923 ), 924 exp.Stddev: rename_func("STDEV"), 925 exp.StrPosition: lambda self, e: self.func( 926 "CHARINDEX", e.args.get("substr"), e.this, e.args.get("position") 927 ), 928 exp.Subquery: transforms.preprocess([qualify_derived_table_outputs]), 929 exp.SHA: lambda self, e: self.func("HASHBYTES", exp.Literal.string("SHA1"), e.this), 930 exp.SHA2: lambda self, e: self.func( 931 "HASHBYTES", exp.Literal.string(f"SHA2_{e.args.get('length', 256)}"), e.this 932 ), 933 exp.TemporaryProperty: lambda self, e: "", 934 exp.TimeStrToTime: _timestrtotime_sql, 935 exp.TimeToStr: _format_sql, 936 exp.Trim: trim_sql, 937 exp.TsOrDsAdd: date_delta_sql("DATEADD", cast=True), 938 exp.TsOrDsDiff: date_delta_sql("DATEDIFF"), 939 } 940 941 TRANSFORMS.pop(exp.ReturnsProperty) 942 943 PROPERTIES_LOCATION = { 944 **generator.Generator.PROPERTIES_LOCATION, 945 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 946 } 947 948 def scope_resolution(self, rhs: str, scope_name: str) -> str: 949 return f"{scope_name}::{rhs}" 950 951 def select_sql(self, expression: exp.Select) -> str: 952 if expression.args.get("offset"): 953 if not expression.args.get("order"): 954 # ORDER BY is required in order to use OFFSET in a query, so we use 955 # a noop order by, since we don't really care about the order. 956 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 957 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 958 959 limit = expression.args.get("limit") 960 if isinstance(limit, exp.Limit): 961 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 962 # we replace here because otherwise TOP would be generated in select_sql 963 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 964 965 return super().select_sql(expression) 966 967 def convert_sql(self, expression: exp.Convert) -> str: 968 name = "TRY_CONVERT" if expression.args.get("safe") else "CONVERT" 969 return self.func( 970 name, expression.this, expression.expression, expression.args.get("style") 971 ) 972 973 def queryoption_sql(self, expression: exp.QueryOption) -> str: 974 option = self.sql(expression, "this") 975 value = self.sql(expression, "expression") 976 if value: 977 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 978 return f"{option} {optional_equal_sign}{value}" 979 return option 980 981 def lateral_op(self, expression: exp.Lateral) -> str: 982 cross_apply = expression.args.get("cross_apply") 983 if cross_apply is True: 984 return "CROSS APPLY" 985 if cross_apply is False: 986 return "OUTER APPLY" 987 988 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 989 self.unsupported("LATERAL clause is not supported.") 990 return "LATERAL" 991 992 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 993 this = expression.this 994 split_count = len(this.name.split(".")) 995 delimiter = expression.args.get("delimiter") 996 part_index = expression.args.get("part_index") 997 998 if ( 999 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 1000 or (delimiter and delimiter.name != ".") 1001 or not part_index 1002 or split_count > 4 1003 ): 1004 self.unsupported( 1005 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 1006 ) 1007 return "" 1008 1009 return self.func( 1010 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 1011 ) 1012 1013 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1014 nano = expression.args.get("nano") 1015 if nano is not None: 1016 nano.pop() 1017 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1018 1019 if expression.args.get("fractions") is None: 1020 expression.set("fractions", exp.Literal.number(0)) 1021 if expression.args.get("precision") is None: 1022 expression.set("precision", exp.Literal.number(0)) 1023 1024 return rename_func("TIMEFROMPARTS")(self, expression) 1025 1026 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1027 zone = expression.args.get("zone") 1028 if zone is not None: 1029 zone.pop() 1030 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1031 1032 nano = expression.args.get("nano") 1033 if nano is not None: 1034 nano.pop() 1035 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1036 1037 if expression.args.get("milli") is None: 1038 expression.set("milli", exp.Literal.number(0)) 1039 1040 return rename_func("DATETIMEFROMPARTS")(self, expression) 1041 1042 def setitem_sql(self, expression: exp.SetItem) -> str: 1043 this = expression.this 1044 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1045 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1046 return f"{self.sql(this.left)} {self.sql(this.right)}" 1047 1048 return super().setitem_sql(expression) 1049 1050 def boolean_sql(self, expression: exp.Boolean) -> str: 1051 if type(expression.parent) in BIT_TYPES or isinstance( 1052 expression.find_ancestor(exp.Values, exp.Select), exp.Values 1053 ): 1054 return "1" if expression.this else "0" 1055 1056 return "(1 = 1)" if expression.this else "(1 = 0)" 1057 1058 def is_sql(self, expression: exp.Is) -> str: 1059 if isinstance(expression.expression, exp.Boolean): 1060 return self.binary(expression, "=") 1061 return self.binary(expression, "IS") 1062 1063 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1064 sql = self.sql(expression, "this") 1065 properties = expression.args.get("properties") 1066 1067 if sql[:1] != "#" and any( 1068 isinstance(prop, exp.TemporaryProperty) 1069 for prop in (properties.expressions if properties else []) 1070 ): 1071 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1072 1073 return sql 1074 1075 def create_sql(self, expression: exp.Create) -> str: 1076 kind = expression.kind 1077 exists = expression.args.pop("exists", None) 1078 1079 like_property = expression.find(exp.LikeProperty) 1080 if like_property: 1081 ctas_expression = like_property.this 1082 else: 1083 ctas_expression = expression.expression 1084 1085 if kind == "VIEW": 1086 expression.this.set("catalog", None) 1087 with_ = expression.args.get("with") 1088 if ctas_expression and with_: 1089 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1090 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1091 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1092 ctas_expression.set("with", with_.pop()) 1093 1094 sql = super().create_sql(expression) 1095 1096 table = expression.find(exp.Table) 1097 1098 # Convert CTAS statement to SELECT .. INTO .. 1099 if kind == "TABLE" and ctas_expression: 1100 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1101 ctas_expression = ctas_expression.subquery() 1102 1103 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1104 select_into.set("into", exp.Into(this=table)) 1105 1106 if like_property: 1107 select_into.limit(0, copy=False) 1108 1109 sql = self.sql(select_into) 1110 1111 if exists: 1112 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1113 sql_with_ctes = self.prepend_ctes(expression, sql) 1114 sql_literal = self.sql(exp.Literal.string(sql_with_ctes)) 1115 if kind == "SCHEMA": 1116 return f"""IF NOT EXISTS (SELECT * FROM information_schema.schemata WHERE schema_name = {identifier}) EXEC({sql_literal})""" 1117 elif kind == "TABLE": 1118 assert table 1119 where = exp.and_( 1120 exp.column("table_name").eq(table.name), 1121 exp.column("table_schema").eq(table.db) if table.db else None, 1122 exp.column("table_catalog").eq(table.catalog) if table.catalog else None, 1123 ) 1124 return f"""IF NOT EXISTS (SELECT * FROM information_schema.tables WHERE {where}) EXEC({sql_literal})""" 1125 elif kind == "INDEX": 1126 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1127 return f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql_literal})""" 1128 elif expression.args.get("replace"): 1129 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1130 1131 return self.prepend_ctes(expression, sql) 1132 1133 def count_sql(self, expression: exp.Count) -> str: 1134 func_name = "COUNT_BIG" if expression.args.get("big_int") else "COUNT" 1135 return rename_func(func_name)(self, expression) 1136 1137 def offset_sql(self, expression: exp.Offset) -> str: 1138 return f"{super().offset_sql(expression)} ROWS" 1139 1140 def version_sql(self, expression: exp.Version) -> str: 1141 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1142 this = f"FOR {name}" 1143 expr = expression.expression 1144 kind = expression.text("kind") 1145 if kind in ("FROM", "BETWEEN"): 1146 args = expr.expressions 1147 sep = "TO" if kind == "FROM" else "AND" 1148 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1149 else: 1150 expr_sql = self.sql(expr) 1151 1152 expr_sql = f" {expr_sql}" if expr_sql else "" 1153 return f"{this} {kind}{expr_sql}" 1154 1155 def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str: 1156 table = expression.args.get("table") 1157 table = f"{table} " if table else "" 1158 return f"RETURNS {table}{self.sql(expression, 'this')}" 1159 1160 def returning_sql(self, expression: exp.Returning) -> str: 1161 into = self.sql(expression, "into") 1162 into = self.seg(f"INTO {into}") if into else "" 1163 return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}" 1164 1165 def transaction_sql(self, expression: exp.Transaction) -> str: 1166 this = self.sql(expression, "this") 1167 this = f" {this}" if this else "" 1168 mark = self.sql(expression, "mark") 1169 mark = f" WITH MARK {mark}" if mark else "" 1170 return f"BEGIN TRANSACTION{this}{mark}" 1171 1172 def commit_sql(self, expression: exp.Commit) -> str: 1173 this = self.sql(expression, "this") 1174 this = f" {this}" if this else "" 1175 durability = expression.args.get("durability") 1176 durability = ( 1177 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1178 if durability is not None 1179 else "" 1180 ) 1181 return f"COMMIT TRANSACTION{this}{durability}" 1182 1183 def rollback_sql(self, expression: exp.Rollback) -> str: 1184 this = self.sql(expression, "this") 1185 this = f" {this}" if this else "" 1186 return f"ROLLBACK TRANSACTION{this}" 1187 1188 def identifier_sql(self, expression: exp.Identifier) -> str: 1189 identifier = super().identifier_sql(expression) 1190 1191 if expression.args.get("global"): 1192 identifier = f"##{identifier}" 1193 elif expression.args.get("temporary"): 1194 identifier = f"#{identifier}" 1195 1196 return identifier 1197 1198 def constraint_sql(self, expression: exp.Constraint) -> str: 1199 this = self.sql(expression, "this") 1200 expressions = self.expressions(expression, flat=True, sep=" ") 1201 return f"CONSTRAINT {this} {expressions}" 1202 1203 def length_sql(self, expression: exp.Length) -> str: 1204 return self._uncast_text(expression, "LEN") 1205 1206 def right_sql(self, expression: exp.Right) -> str: 1207 return self._uncast_text(expression, "RIGHT") 1208 1209 def left_sql(self, expression: exp.Left) -> str: 1210 return self._uncast_text(expression, "LEFT") 1211 1212 def _uncast_text(self, expression: exp.Expression, name: str) -> str: 1213 this = expression.this 1214 if isinstance(this, exp.Cast) and this.is_type(exp.DataType.Type.TEXT): 1215 this_sql = self.sql(this, "this") 1216 else: 1217 this_sql = self.sql(this) 1218 expression_sql = self.sql(expression, "expression") 1219 return self.func(name, this_sql, expression_sql if expression_sql else None) 1220 1221 def partition_sql(self, expression: exp.Partition) -> str: 1222 return f"WITH (PARTITIONS({self.expressions(expression, flat=True)}))" 1223 1224 def alter_sql(self, expression: exp.Alter) -> str: 1225 action = seq_get(expression.args.get("actions") or [], 0) 1226 if isinstance(action, exp.AlterRename): 1227 return f"EXEC sp_rename '{self.sql(expression.this)}', '{action.this.name}'" 1228 return super().alter_sql(expression) 1229 1230 def drop_sql(self, expression: exp.Drop) -> str: 1231 if expression.args["kind"] == "VIEW": 1232 expression.this.set("catalog", None) 1233 return super().drop_sql(expression) 1234 1235 def declare_sql(self, expression: exp.Declare) -> str: 1236 return f"DECLARE {self.expressions(expression, flat=True)}" 1237 1238 def declareitem_sql(self, expression: exp.DeclareItem) -> str: 1239 variable = self.sql(expression, "this") 1240 default = self.sql(expression, "default") 1241 default = f" = {default}" if default else "" 1242 1243 kind = self.sql(expression, "kind") 1244 if isinstance(expression.args.get("kind"), exp.Schema): 1245 kind = f"TABLE {kind}" 1246 1247 return f"{variable} AS {kind}{default}" 1248 1249 def options_modifier(self, expression: exp.Expression) -> str: 1250 options = self.expressions(expression, key="options") 1251 return f" OPTION{self.wrap(options)}" if options else "" 1252 1253 def dpipe_sql(self, expression: exp.DPipe) -> str: 1254 return self.sql( 1255 reduce(lambda x, y: exp.Add(this=x, expression=y), expression.flatten()) 1256 )
Whether the base comes first in the LOG
function.
Possible values: True
, False
, None
(two arguments are not supported by LOG
)
Whether the behavior of a / b
depends on the types of a
and b
.
False means a / b
is always float division.
True means a / b
is integer division if both a
and b
are integers.
A NULL
arg in CONCAT
yields NULL
by default, but in some dialects it yields an empty string.
Specifies the strategy according to which identifiers should be normalized.
Associates this dialect's time formats with their equivalent Python strftime
formats.
Inherited Members
- sqlglot.dialects.dialect.Dialect
- Dialect
- INDEX_OFFSET
- WEEK_OFFSET
- UNNEST_COLUMN_ONLY
- ALIAS_POST_TABLESAMPLE
- TABLESAMPLE_SIZE_IS_PERCENT
- IDENTIFIERS_CAN_START_WITH_DIGIT
- DPIPE_IS_STRING_CONCAT
- STRICT_STRING_CONCAT
- SUPPORTS_USER_DEFINED_TYPES
- COPY_PARAMS_ARE_CSV
- NORMALIZE_FUNCTIONS
- PRESERVE_ORIGINAL_NAMES
- NULL_ORDERING
- SAFE_DIVISION
- HEX_LOWERCASE
- DATE_FORMAT
- DATEINT_FORMAT
- FORMAT_MAPPING
- UNESCAPED_SEQUENCES
- PSEUDOCOLUMNS
- PREFER_CTE_ALIAS_COLUMN
- FORCE_EARLY_ALIAS_REF_EXPANSION
- EXPAND_ALIAS_REFS_EARLY_ONLY_IN_GROUP_BY
- SUPPORTS_ORDER_BY_ALL
- HAS_DISTINCT_ARRAY_CONSTRUCTORS
- SUPPORTS_FIXED_SIZE_ARRAYS
- STRICT_JSON_PATH_SYNTAX
- ON_CONDITION_EMPTY_BEFORE_ERROR
- ARRAY_AGG_INCLUDES_NULLS
- PROMOTE_TO_INFERRED_DATETIME_TYPE
- SUPPORTS_VALUES_DEFAULT
- REGEXP_EXTRACT_DEFAULT_GROUP
- SET_OP_DISTINCT_BY_DEFAULT
- CREATABLE_KIND_MAPPING
- DATE_PART_MAPPING
- TYPE_TO_EXPRESSIONS
- ANNOTATORS
- get_or_raise
- format_time
- settings
- normalize_identifier
- case_sensitive
- can_identify
- quote_identifier
- to_json_path
- parse
- parse_into
- generate
- transpile
- tokenize
- tokenizer
- jsonpath_tokenizer
- parser
- generator
487 class Tokenizer(tokens.Tokenizer): 488 IDENTIFIERS = [("[", "]"), '"'] 489 QUOTES = ["'", '"'] 490 HEX_STRINGS = [("0x", ""), ("0X", "")] 491 VAR_SINGLE_TOKENS = {"@", "$", "#"} 492 493 KEYWORDS = { 494 **tokens.Tokenizer.KEYWORDS, 495 "CLUSTERED INDEX": TokenType.INDEX, 496 "DATETIME2": TokenType.DATETIME, 497 "DATETIMEOFFSET": TokenType.TIMESTAMPTZ, 498 "DECLARE": TokenType.DECLARE, 499 "EXEC": TokenType.COMMAND, 500 "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT, 501 "IMAGE": TokenType.IMAGE, 502 "MONEY": TokenType.MONEY, 503 "NONCLUSTERED INDEX": TokenType.INDEX, 504 "NTEXT": TokenType.TEXT, 505 "OPTION": TokenType.OPTION, 506 "OUTPUT": TokenType.RETURNING, 507 "PRINT": TokenType.COMMAND, 508 "PROC": TokenType.PROCEDURE, 509 "REAL": TokenType.FLOAT, 510 "ROWVERSION": TokenType.ROWVERSION, 511 "SMALLDATETIME": TokenType.DATETIME, 512 "SMALLMONEY": TokenType.SMALLMONEY, 513 "SQL_VARIANT": TokenType.VARIANT, 514 "SYSTEM_USER": TokenType.CURRENT_USER, 515 "TOP": TokenType.TOP, 516 "TIMESTAMP": TokenType.ROWVERSION, 517 "TINYINT": TokenType.UTINYINT, 518 "UNIQUEIDENTIFIER": TokenType.UNIQUEIDENTIFIER, 519 "UPDATE STATISTICS": TokenType.COMMAND, 520 "XML": TokenType.XML, 521 } 522 KEYWORDS.pop("/*+") 523 524 COMMANDS = {*tokens.Tokenizer.COMMANDS, TokenType.END}
Inherited Members
- sqlglot.tokens.Tokenizer
- Tokenizer
- SINGLE_TOKENS
- BIT_STRINGS
- BYTE_STRINGS
- RAW_STRINGS
- HEREDOC_STRINGS
- UNICODE_STRINGS
- STRING_ESCAPES
- IDENTIFIER_ESCAPES
- HEREDOC_TAG_IS_IDENTIFIER
- HEREDOC_STRING_ALTERNATIVE
- STRING_ESCAPES_ALLOWED_IN_RAW_STRINGS
- NESTED_COMMENTS
- HINT_START
- TOKENS_PRECEDING_HINT
- WHITE_SPACE
- COMMAND_PREFIX_TOKENS
- NUMERIC_LITERALS
- COMMENTS
- dialect
- reset
- tokenize
- tokenize_rs
- size
- sql
- tokens
526 class Parser(parser.Parser): 527 SET_REQUIRES_ASSIGNMENT_DELIMITER = False 528 LOG_DEFAULTS_TO_LN = True 529 ALTER_TABLE_ADD_REQUIRED_FOR_EACH_COLUMN = False 530 STRING_ALIASES = True 531 NO_PAREN_IF_COMMANDS = False 532 533 QUERY_MODIFIER_PARSERS = { 534 **parser.Parser.QUERY_MODIFIER_PARSERS, 535 TokenType.OPTION: lambda self: ("options", self._parse_options()), 536 } 537 538 FUNCTIONS = { 539 **parser.Parser.FUNCTIONS, 540 "CHARINDEX": lambda args: exp.StrPosition( 541 this=seq_get(args, 1), 542 substr=seq_get(args, 0), 543 position=seq_get(args, 2), 544 ), 545 "COUNT": lambda args: exp.Count( 546 this=seq_get(args, 0), expressions=args[1:], big_int=False 547 ), 548 "COUNT_BIG": lambda args: exp.Count( 549 this=seq_get(args, 0), expressions=args[1:], big_int=True 550 ), 551 "DATEADD": build_date_delta(exp.DateAdd, unit_mapping=DATE_DELTA_INTERVAL), 552 "DATEDIFF": _build_date_delta(exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL), 553 "DATENAME": _build_formatted_time(exp.TimeToStr, full_format_mapping=True), 554 "DATEPART": _build_formatted_time(exp.TimeToStr), 555 "DATETIMEFROMPARTS": _build_datetimefromparts, 556 "EOMONTH": _build_eomonth, 557 "FORMAT": _build_format, 558 "GETDATE": exp.CurrentTimestamp.from_arg_list, 559 "HASHBYTES": _build_hashbytes, 560 "ISNULL": build_coalesce, 561 "JSON_QUERY": _build_json_query, 562 "JSON_VALUE": parser.build_extract_json_with_path(exp.JSONExtractScalar), 563 "LEN": _build_with_arg_as_text(exp.Length), 564 "LEFT": _build_with_arg_as_text(exp.Left), 565 "RIGHT": _build_with_arg_as_text(exp.Right), 566 "PARSENAME": _build_parsename, 567 "REPLICATE": exp.Repeat.from_arg_list, 568 "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)), 569 "SYSDATETIME": exp.CurrentTimestamp.from_arg_list, 570 "SUSER_NAME": exp.CurrentUser.from_arg_list, 571 "SUSER_SNAME": exp.CurrentUser.from_arg_list, 572 "SYSTEM_USER": exp.CurrentUser.from_arg_list, 573 "TIMEFROMPARTS": _build_timefromparts, 574 } 575 576 JOIN_HINTS = {"LOOP", "HASH", "MERGE", "REMOTE"} 577 578 PROCEDURE_OPTIONS = dict.fromkeys( 579 ("ENCRYPTION", "RECOMPILE", "SCHEMABINDING", "NATIVE_COMPILATION", "EXECUTE"), tuple() 580 ) 581 582 RETURNS_TABLE_TOKENS = parser.Parser.ID_VAR_TOKENS - { 583 TokenType.TABLE, 584 *parser.Parser.TYPE_TOKENS, 585 } 586 587 STATEMENT_PARSERS = { 588 **parser.Parser.STATEMENT_PARSERS, 589 TokenType.DECLARE: lambda self: self._parse_declare(), 590 } 591 592 RANGE_PARSERS = { 593 **parser.Parser.RANGE_PARSERS, 594 TokenType.DCOLON: lambda self, this: self.expression( 595 exp.ScopeResolution, 596 this=this, 597 expression=self._parse_function() or self._parse_var(any_token=True), 598 ), 599 } 600 601 NO_PAREN_FUNCTION_PARSERS = { 602 **parser.Parser.NO_PAREN_FUNCTION_PARSERS, 603 "NEXT": lambda self: self._parse_next_value_for(), 604 } 605 606 # The DCOLON (::) operator serves as a scope resolution (exp.ScopeResolution) operator in T-SQL 607 COLUMN_OPERATORS = { 608 **parser.Parser.COLUMN_OPERATORS, 609 TokenType.DCOLON: lambda self, this, to: self.expression(exp.Cast, this=this, to=to) 610 if isinstance(to, exp.DataType) and to.this != exp.DataType.Type.USERDEFINED 611 else self.expression(exp.ScopeResolution, this=this, expression=to), 612 } 613 614 def _parse_dcolon(self) -> t.Optional[exp.Expression]: 615 # We want to use _parse_types() if the first token after :: is a known type, 616 # otherwise we could parse something like x::varchar(max) into a function 617 if self._match_set(self.TYPE_TOKENS, advance=False): 618 return self._parse_types() 619 620 return self._parse_function() or self._parse_types() 621 622 def _parse_options(self) -> t.Optional[t.List[exp.Expression]]: 623 if not self._match(TokenType.OPTION): 624 return None 625 626 def _parse_option() -> t.Optional[exp.Expression]: 627 option = self._parse_var_from_options(OPTIONS) 628 if not option: 629 return None 630 631 self._match(TokenType.EQ) 632 return self.expression( 633 exp.QueryOption, this=option, expression=self._parse_primary_or_var() 634 ) 635 636 return self._parse_wrapped_csv(_parse_option) 637 638 def _parse_projections(self) -> t.List[exp.Expression]: 639 """ 640 T-SQL supports the syntax alias = expression in the SELECT's projection list, 641 so we transform all parsed Selects to convert their EQ projections into Aliases. 642 643 See: https://learn.microsoft.com/en-us/sql/t-sql/queries/select-clause-transact-sql?view=sql-server-ver16#syntax 644 """ 645 return [ 646 ( 647 exp.alias_(projection.expression, projection.this.this, copy=False) 648 if isinstance(projection, exp.EQ) and isinstance(projection.this, exp.Column) 649 else projection 650 ) 651 for projection in super()._parse_projections() 652 ] 653 654 def _parse_commit_or_rollback(self) -> exp.Commit | exp.Rollback: 655 """Applies to SQL Server and Azure SQL Database 656 COMMIT [ { TRAN | TRANSACTION } 657 [ transaction_name | @tran_name_variable ] ] 658 [ WITH ( DELAYED_DURABILITY = { OFF | ON } ) ] 659 660 ROLLBACK { TRAN | TRANSACTION } 661 [ transaction_name | @tran_name_variable 662 | savepoint_name | @savepoint_variable ] 663 """ 664 rollback = self._prev.token_type == TokenType.ROLLBACK 665 666 self._match_texts(("TRAN", "TRANSACTION")) 667 this = self._parse_id_var() 668 669 if rollback: 670 return self.expression(exp.Rollback, this=this) 671 672 durability = None 673 if self._match_pair(TokenType.WITH, TokenType.L_PAREN): 674 self._match_text_seq("DELAYED_DURABILITY") 675 self._match(TokenType.EQ) 676 677 if self._match_text_seq("OFF"): 678 durability = False 679 else: 680 self._match(TokenType.ON) 681 durability = True 682 683 self._match_r_paren() 684 685 return self.expression(exp.Commit, this=this, durability=durability) 686 687 def _parse_transaction(self) -> exp.Transaction | exp.Command: 688 """Applies to SQL Server and Azure SQL Database 689 BEGIN { TRAN | TRANSACTION } 690 [ { transaction_name | @tran_name_variable } 691 [ WITH MARK [ 'description' ] ] 692 ] 693 """ 694 if self._match_texts(("TRAN", "TRANSACTION")): 695 transaction = self.expression(exp.Transaction, this=self._parse_id_var()) 696 if self._match_text_seq("WITH", "MARK"): 697 transaction.set("mark", self._parse_string()) 698 699 return transaction 700 701 return self._parse_as_command(self._prev) 702 703 def _parse_returns(self) -> exp.ReturnsProperty: 704 table = self._parse_id_var(any_token=False, tokens=self.RETURNS_TABLE_TOKENS) 705 returns = super()._parse_returns() 706 returns.set("table", table) 707 return returns 708 709 def _parse_convert( 710 self, strict: bool, safe: t.Optional[bool] = None 711 ) -> t.Optional[exp.Expression]: 712 this = self._parse_types() 713 self._match(TokenType.COMMA) 714 args = [this, *self._parse_csv(self._parse_assignment)] 715 convert = exp.Convert.from_arg_list(args) 716 convert.set("safe", safe) 717 convert.set("strict", strict) 718 return convert 719 720 def _parse_user_defined_function( 721 self, kind: t.Optional[TokenType] = None 722 ) -> t.Optional[exp.Expression]: 723 this = super()._parse_user_defined_function(kind=kind) 724 725 if ( 726 kind == TokenType.FUNCTION 727 or isinstance(this, exp.UserDefinedFunction) 728 or self._match(TokenType.ALIAS, advance=False) 729 ): 730 return this 731 732 if not self._match(TokenType.WITH, advance=False): 733 expressions = self._parse_csv(self._parse_function_parameter) 734 else: 735 expressions = None 736 737 return self.expression(exp.UserDefinedFunction, this=this, expressions=expressions) 738 739 def _parse_id_var( 740 self, 741 any_token: bool = True, 742 tokens: t.Optional[t.Collection[TokenType]] = None, 743 ) -> t.Optional[exp.Expression]: 744 is_temporary = self._match(TokenType.HASH) 745 is_global = is_temporary and self._match(TokenType.HASH) 746 747 this = super()._parse_id_var(any_token=any_token, tokens=tokens) 748 if this: 749 if is_global: 750 this.set("global", True) 751 elif is_temporary: 752 this.set("temporary", True) 753 754 return this 755 756 def _parse_create(self) -> exp.Create | exp.Command: 757 create = super()._parse_create() 758 759 if isinstance(create, exp.Create): 760 table = create.this.this if isinstance(create.this, exp.Schema) else create.this 761 if isinstance(table, exp.Table) and table.this.args.get("temporary"): 762 if not create.args.get("properties"): 763 create.set("properties", exp.Properties(expressions=[])) 764 765 create.args["properties"].append("expressions", exp.TemporaryProperty()) 766 767 return create 768 769 def _parse_if(self) -> t.Optional[exp.Expression]: 770 index = self._index 771 772 if self._match_text_seq("OBJECT_ID"): 773 self._parse_wrapped_csv(self._parse_string) 774 if self._match_text_seq("IS", "NOT", "NULL") and self._match(TokenType.DROP): 775 return self._parse_drop(exists=True) 776 self._retreat(index) 777 778 return super()._parse_if() 779 780 def _parse_unique(self) -> exp.UniqueColumnConstraint: 781 if self._match_texts(("CLUSTERED", "NONCLUSTERED")): 782 this = self.CONSTRAINT_PARSERS[self._prev.text.upper()](self) 783 else: 784 this = self._parse_schema(self._parse_id_var(any_token=False)) 785 786 return self.expression(exp.UniqueColumnConstraint, this=this) 787 788 def _parse_partition(self) -> t.Optional[exp.Partition]: 789 if not self._match_text_seq("WITH", "(", "PARTITIONS"): 790 return None 791 792 def parse_range(): 793 low = self._parse_bitwise() 794 high = self._parse_bitwise() if self._match_text_seq("TO") else None 795 796 return ( 797 self.expression(exp.PartitionRange, this=low, expression=high) if high else low 798 ) 799 800 partition = self.expression( 801 exp.Partition, expressions=self._parse_wrapped_csv(parse_range) 802 ) 803 804 self._match_r_paren() 805 806 return partition 807 808 def _parse_declare(self) -> exp.Declare | exp.Command: 809 index = self._index 810 expressions = self._try_parse(partial(self._parse_csv, self._parse_declareitem)) 811 812 if not expressions or self._curr: 813 self._retreat(index) 814 return self._parse_as_command(self._prev) 815 816 return self.expression(exp.Declare, expressions=expressions) 817 818 def _parse_declareitem(self) -> t.Optional[exp.DeclareItem]: 819 var = self._parse_id_var() 820 if not var: 821 return None 822 823 value = None 824 self._match(TokenType.ALIAS) 825 if self._match(TokenType.TABLE): 826 data_type = self._parse_schema() 827 else: 828 data_type = self._parse_types() 829 if self._match(TokenType.EQ): 830 value = self._parse_bitwise() 831 832 return self.expression(exp.DeclareItem, this=var, kind=data_type, default=value)
Parser consumes a list of tokens produced by the Tokenizer and produces a parsed syntax tree.
Arguments:
- error_level: The desired error level. Default: ErrorLevel.IMMEDIATE
- error_message_context: The amount of context to capture from a query string when displaying the error message (in number of characters). Default: 100
- max_errors: Maximum number of error messages to include in a raised ParseError. This is only relevant if error_level is ErrorLevel.RAISE. Default: 3
Inherited Members
- sqlglot.parser.Parser
- Parser
- NO_PAREN_FUNCTIONS
- STRUCT_TYPE_TOKENS
- NESTED_TYPE_TOKENS
- ENUM_TYPE_TOKENS
- AGGREGATE_TYPE_TOKENS
- TYPE_TOKENS
- SIGNED_TO_UNSIGNED_TYPE_TOKEN
- SUBQUERY_PREDICATES
- RESERVED_TOKENS
- DB_CREATABLES
- CREATABLES
- ALTERABLES
- INTERVAL_VARS
- ALIAS_TOKENS
- ARRAY_CONSTRUCTORS
- COMMENT_TABLE_ALIAS_TOKENS
- UPDATE_ALIAS_TOKENS
- TRIM_TYPES
- FUNC_TOKENS
- CONJUNCTION
- ASSIGNMENT
- DISJUNCTION
- EQUALITY
- COMPARISON
- BITWISE
- TERM
- FACTOR
- EXPONENT
- TIMES
- TIMESTAMPS
- SET_OPERATIONS
- JOIN_METHODS
- JOIN_SIDES
- JOIN_KINDS
- LAMBDAS
- EXPRESSION_PARSERS
- UNARY_PARSERS
- STRING_PARSERS
- NUMERIC_PARSERS
- PRIMARY_PARSERS
- PLACEHOLDER_PARSERS
- PROPERTY_PARSERS
- CONSTRAINT_PARSERS
- ALTER_PARSERS
- ALTER_ALTER_PARSERS
- SCHEMA_UNNAMED_CONSTRAINTS
- INVALID_FUNC_NAME_TOKENS
- FUNCTIONS_WITH_ALIASED_ARGS
- KEY_VALUE_DEFINITIONS
- FUNCTION_PARSERS
- SET_PARSERS
- SHOW_PARSERS
- TYPE_LITERAL_PARSERS
- TYPE_CONVERTERS
- DDL_SELECT_TOKENS
- PRE_VOLATILE_TOKENS
- TRANSACTION_KIND
- TRANSACTION_CHARACTERISTICS
- CONFLICT_ACTIONS
- CREATE_SEQUENCE
- ISOLATED_LOADING_OPTIONS
- USABLES
- CAST_ACTIONS
- SCHEMA_BINDING_OPTIONS
- EXECUTE_AS_OPTIONS
- KEY_CONSTRAINT_OPTIONS
- INSERT_ALTERNATIVES
- CLONE_KEYWORDS
- HISTORICAL_DATA_PREFIX
- HISTORICAL_DATA_KIND
- OPCLASS_FOLLOW_KEYWORDS
- OPTYPE_FOLLOW_TOKENS
- TABLE_INDEX_HINT_TOKENS
- VIEW_ATTRIBUTES
- WINDOW_ALIAS_TOKENS
- WINDOW_BEFORE_PAREN_TOKENS
- WINDOW_SIDES
- JSON_KEY_VALUE_SEPARATOR_TOKENS
- FETCH_TOKENS
- ADD_CONSTRAINT_TOKENS
- DISTINCT_TOKENS
- NULL_TOKENS
- UNNEST_OFFSET_ALIAS_TOKENS
- SELECT_START_TOKENS
- COPY_INTO_VARLEN_OPTIONS
- IS_JSON_PREDICATE_KIND
- ODBC_DATETIME_LITERALS
- ON_CONDITION_TOKENS
- PRIVILEGE_FOLLOW_TOKENS
- DESCRIBE_STYLES
- OPERATION_MODIFIERS
- STRICT_CAST
- PREFIXED_PIVOT_COLUMNS
- IDENTIFY_PIVOT_STRINGS
- TABLESAMPLE_CSV
- DEFAULT_SAMPLING_METHOD
- TRIM_PATTERN_FIRST
- MODIFIERS_ATTACHED_TO_SET_OP
- SET_OP_MODIFIERS
- JSON_ARROWS_REQUIRE_JSON_TYPE
- COLON_IS_VARIANT_EXTRACT
- VALUES_FOLLOWED_BY_PAREN
- SUPPORTS_IMPLICIT_UNNEST
- INTERVAL_SPANS
- SUPPORTS_PARTITION_SELECTION
- WRAPPED_TRANSFORM_COLUMN_CONSTRAINT
- error_level
- error_message_context
- max_errors
- dialect
- reset
- parse
- parse_into
- check_errors
- raise_error
- expression
- validate_expression
- errors
- sql
834 class Generator(generator.Generator): 835 LIMIT_IS_TOP = True 836 QUERY_HINTS = False 837 RETURNING_END = False 838 NVL2_SUPPORTED = False 839 ALTER_TABLE_INCLUDE_COLUMN_KEYWORD = False 840 LIMIT_FETCH = "FETCH" 841 COMPUTED_COLUMN_WITH_TYPE = False 842 CTE_RECURSIVE_KEYWORD_REQUIRED = False 843 ENSURE_BOOLS = True 844 NULL_ORDERING_SUPPORTED = None 845 SUPPORTS_SINGLE_ARG_CONCAT = False 846 TABLESAMPLE_SEED_KEYWORD = "REPEATABLE" 847 SUPPORTS_SELECT_INTO = True 848 JSON_PATH_BRACKETED_KEY_SUPPORTED = False 849 SUPPORTS_TO_NUMBER = False 850 SET_OP_MODIFIERS = False 851 COPY_PARAMS_EQ_REQUIRED = True 852 PARSE_JSON_NAME = None 853 EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE = False 854 855 EXPRESSIONS_WITHOUT_NESTED_CTES = { 856 exp.Create, 857 exp.Delete, 858 exp.Insert, 859 exp.Intersect, 860 exp.Except, 861 exp.Merge, 862 exp.Select, 863 exp.Subquery, 864 exp.Union, 865 exp.Update, 866 } 867 868 SUPPORTED_JSON_PATH_PARTS = { 869 exp.JSONPathKey, 870 exp.JSONPathRoot, 871 exp.JSONPathSubscript, 872 } 873 874 TYPE_MAPPING = { 875 **generator.Generator.TYPE_MAPPING, 876 exp.DataType.Type.BOOLEAN: "BIT", 877 exp.DataType.Type.DECIMAL: "NUMERIC", 878 exp.DataType.Type.DATETIME: "DATETIME2", 879 exp.DataType.Type.DOUBLE: "FLOAT", 880 exp.DataType.Type.INT: "INTEGER", 881 exp.DataType.Type.ROWVERSION: "ROWVERSION", 882 exp.DataType.Type.TEXT: "VARCHAR(MAX)", 883 exp.DataType.Type.TIMESTAMP: "DATETIME2", 884 exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET", 885 exp.DataType.Type.UTINYINT: "TINYINT", 886 exp.DataType.Type.VARIANT: "SQL_VARIANT", 887 } 888 889 TYPE_MAPPING.pop(exp.DataType.Type.NCHAR) 890 TYPE_MAPPING.pop(exp.DataType.Type.NVARCHAR) 891 892 TRANSFORMS = { 893 **generator.Generator.TRANSFORMS, 894 exp.AnyValue: any_value_to_max_sql, 895 exp.ArrayToString: rename_func("STRING_AGG"), 896 exp.AutoIncrementColumnConstraint: lambda *_: "IDENTITY", 897 exp.DateAdd: date_delta_sql("DATEADD"), 898 exp.DateDiff: date_delta_sql("DATEDIFF"), 899 exp.CTE: transforms.preprocess([qualify_derived_table_outputs]), 900 exp.CurrentDate: rename_func("GETDATE"), 901 exp.CurrentTimestamp: rename_func("GETDATE"), 902 exp.DateStrToDate: datestrtodate_sql, 903 exp.Extract: rename_func("DATEPART"), 904 exp.GeneratedAsIdentityColumnConstraint: generatedasidentitycolumnconstraint_sql, 905 exp.GroupConcat: _string_agg_sql, 906 exp.If: rename_func("IIF"), 907 exp.JSONExtract: _json_extract_sql, 908 exp.JSONExtractScalar: _json_extract_sql, 909 exp.LastDay: lambda self, e: self.func("EOMONTH", e.this), 910 exp.Ln: rename_func("LOG"), 911 exp.Max: max_or_greatest, 912 exp.MD5: lambda self, e: self.func("HASHBYTES", exp.Literal.string("MD5"), e.this), 913 exp.Min: min_or_least, 914 exp.NumberToStr: _format_sql, 915 exp.Repeat: rename_func("REPLICATE"), 916 exp.Select: transforms.preprocess( 917 [ 918 transforms.eliminate_distinct_on, 919 transforms.eliminate_semi_and_anti_joins, 920 transforms.eliminate_qualify, 921 transforms.unnest_generate_date_array_using_recursive_cte, 922 ] 923 ), 924 exp.Stddev: rename_func("STDEV"), 925 exp.StrPosition: lambda self, e: self.func( 926 "CHARINDEX", e.args.get("substr"), e.this, e.args.get("position") 927 ), 928 exp.Subquery: transforms.preprocess([qualify_derived_table_outputs]), 929 exp.SHA: lambda self, e: self.func("HASHBYTES", exp.Literal.string("SHA1"), e.this), 930 exp.SHA2: lambda self, e: self.func( 931 "HASHBYTES", exp.Literal.string(f"SHA2_{e.args.get('length', 256)}"), e.this 932 ), 933 exp.TemporaryProperty: lambda self, e: "", 934 exp.TimeStrToTime: _timestrtotime_sql, 935 exp.TimeToStr: _format_sql, 936 exp.Trim: trim_sql, 937 exp.TsOrDsAdd: date_delta_sql("DATEADD", cast=True), 938 exp.TsOrDsDiff: date_delta_sql("DATEDIFF"), 939 } 940 941 TRANSFORMS.pop(exp.ReturnsProperty) 942 943 PROPERTIES_LOCATION = { 944 **generator.Generator.PROPERTIES_LOCATION, 945 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 946 } 947 948 def scope_resolution(self, rhs: str, scope_name: str) -> str: 949 return f"{scope_name}::{rhs}" 950 951 def select_sql(self, expression: exp.Select) -> str: 952 if expression.args.get("offset"): 953 if not expression.args.get("order"): 954 # ORDER BY is required in order to use OFFSET in a query, so we use 955 # a noop order by, since we don't really care about the order. 956 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 957 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 958 959 limit = expression.args.get("limit") 960 if isinstance(limit, exp.Limit): 961 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 962 # we replace here because otherwise TOP would be generated in select_sql 963 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 964 965 return super().select_sql(expression) 966 967 def convert_sql(self, expression: exp.Convert) -> str: 968 name = "TRY_CONVERT" if expression.args.get("safe") else "CONVERT" 969 return self.func( 970 name, expression.this, expression.expression, expression.args.get("style") 971 ) 972 973 def queryoption_sql(self, expression: exp.QueryOption) -> str: 974 option = self.sql(expression, "this") 975 value = self.sql(expression, "expression") 976 if value: 977 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 978 return f"{option} {optional_equal_sign}{value}" 979 return option 980 981 def lateral_op(self, expression: exp.Lateral) -> str: 982 cross_apply = expression.args.get("cross_apply") 983 if cross_apply is True: 984 return "CROSS APPLY" 985 if cross_apply is False: 986 return "OUTER APPLY" 987 988 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 989 self.unsupported("LATERAL clause is not supported.") 990 return "LATERAL" 991 992 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 993 this = expression.this 994 split_count = len(this.name.split(".")) 995 delimiter = expression.args.get("delimiter") 996 part_index = expression.args.get("part_index") 997 998 if ( 999 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 1000 or (delimiter and delimiter.name != ".") 1001 or not part_index 1002 or split_count > 4 1003 ): 1004 self.unsupported( 1005 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 1006 ) 1007 return "" 1008 1009 return self.func( 1010 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 1011 ) 1012 1013 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1014 nano = expression.args.get("nano") 1015 if nano is not None: 1016 nano.pop() 1017 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1018 1019 if expression.args.get("fractions") is None: 1020 expression.set("fractions", exp.Literal.number(0)) 1021 if expression.args.get("precision") is None: 1022 expression.set("precision", exp.Literal.number(0)) 1023 1024 return rename_func("TIMEFROMPARTS")(self, expression) 1025 1026 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1027 zone = expression.args.get("zone") 1028 if zone is not None: 1029 zone.pop() 1030 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1031 1032 nano = expression.args.get("nano") 1033 if nano is not None: 1034 nano.pop() 1035 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1036 1037 if expression.args.get("milli") is None: 1038 expression.set("milli", exp.Literal.number(0)) 1039 1040 return rename_func("DATETIMEFROMPARTS")(self, expression) 1041 1042 def setitem_sql(self, expression: exp.SetItem) -> str: 1043 this = expression.this 1044 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1045 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1046 return f"{self.sql(this.left)} {self.sql(this.right)}" 1047 1048 return super().setitem_sql(expression) 1049 1050 def boolean_sql(self, expression: exp.Boolean) -> str: 1051 if type(expression.parent) in BIT_TYPES or isinstance( 1052 expression.find_ancestor(exp.Values, exp.Select), exp.Values 1053 ): 1054 return "1" if expression.this else "0" 1055 1056 return "(1 = 1)" if expression.this else "(1 = 0)" 1057 1058 def is_sql(self, expression: exp.Is) -> str: 1059 if isinstance(expression.expression, exp.Boolean): 1060 return self.binary(expression, "=") 1061 return self.binary(expression, "IS") 1062 1063 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1064 sql = self.sql(expression, "this") 1065 properties = expression.args.get("properties") 1066 1067 if sql[:1] != "#" and any( 1068 isinstance(prop, exp.TemporaryProperty) 1069 for prop in (properties.expressions if properties else []) 1070 ): 1071 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1072 1073 return sql 1074 1075 def create_sql(self, expression: exp.Create) -> str: 1076 kind = expression.kind 1077 exists = expression.args.pop("exists", None) 1078 1079 like_property = expression.find(exp.LikeProperty) 1080 if like_property: 1081 ctas_expression = like_property.this 1082 else: 1083 ctas_expression = expression.expression 1084 1085 if kind == "VIEW": 1086 expression.this.set("catalog", None) 1087 with_ = expression.args.get("with") 1088 if ctas_expression and with_: 1089 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1090 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1091 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1092 ctas_expression.set("with", with_.pop()) 1093 1094 sql = super().create_sql(expression) 1095 1096 table = expression.find(exp.Table) 1097 1098 # Convert CTAS statement to SELECT .. INTO .. 1099 if kind == "TABLE" and ctas_expression: 1100 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1101 ctas_expression = ctas_expression.subquery() 1102 1103 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1104 select_into.set("into", exp.Into(this=table)) 1105 1106 if like_property: 1107 select_into.limit(0, copy=False) 1108 1109 sql = self.sql(select_into) 1110 1111 if exists: 1112 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1113 sql_with_ctes = self.prepend_ctes(expression, sql) 1114 sql_literal = self.sql(exp.Literal.string(sql_with_ctes)) 1115 if kind == "SCHEMA": 1116 return f"""IF NOT EXISTS (SELECT * FROM information_schema.schemata WHERE schema_name = {identifier}) EXEC({sql_literal})""" 1117 elif kind == "TABLE": 1118 assert table 1119 where = exp.and_( 1120 exp.column("table_name").eq(table.name), 1121 exp.column("table_schema").eq(table.db) if table.db else None, 1122 exp.column("table_catalog").eq(table.catalog) if table.catalog else None, 1123 ) 1124 return f"""IF NOT EXISTS (SELECT * FROM information_schema.tables WHERE {where}) EXEC({sql_literal})""" 1125 elif kind == "INDEX": 1126 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1127 return f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql_literal})""" 1128 elif expression.args.get("replace"): 1129 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1130 1131 return self.prepend_ctes(expression, sql) 1132 1133 def count_sql(self, expression: exp.Count) -> str: 1134 func_name = "COUNT_BIG" if expression.args.get("big_int") else "COUNT" 1135 return rename_func(func_name)(self, expression) 1136 1137 def offset_sql(self, expression: exp.Offset) -> str: 1138 return f"{super().offset_sql(expression)} ROWS" 1139 1140 def version_sql(self, expression: exp.Version) -> str: 1141 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1142 this = f"FOR {name}" 1143 expr = expression.expression 1144 kind = expression.text("kind") 1145 if kind in ("FROM", "BETWEEN"): 1146 args = expr.expressions 1147 sep = "TO" if kind == "FROM" else "AND" 1148 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1149 else: 1150 expr_sql = self.sql(expr) 1151 1152 expr_sql = f" {expr_sql}" if expr_sql else "" 1153 return f"{this} {kind}{expr_sql}" 1154 1155 def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str: 1156 table = expression.args.get("table") 1157 table = f"{table} " if table else "" 1158 return f"RETURNS {table}{self.sql(expression, 'this')}" 1159 1160 def returning_sql(self, expression: exp.Returning) -> str: 1161 into = self.sql(expression, "into") 1162 into = self.seg(f"INTO {into}") if into else "" 1163 return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}" 1164 1165 def transaction_sql(self, expression: exp.Transaction) -> str: 1166 this = self.sql(expression, "this") 1167 this = f" {this}" if this else "" 1168 mark = self.sql(expression, "mark") 1169 mark = f" WITH MARK {mark}" if mark else "" 1170 return f"BEGIN TRANSACTION{this}{mark}" 1171 1172 def commit_sql(self, expression: exp.Commit) -> str: 1173 this = self.sql(expression, "this") 1174 this = f" {this}" if this else "" 1175 durability = expression.args.get("durability") 1176 durability = ( 1177 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1178 if durability is not None 1179 else "" 1180 ) 1181 return f"COMMIT TRANSACTION{this}{durability}" 1182 1183 def rollback_sql(self, expression: exp.Rollback) -> str: 1184 this = self.sql(expression, "this") 1185 this = f" {this}" if this else "" 1186 return f"ROLLBACK TRANSACTION{this}" 1187 1188 def identifier_sql(self, expression: exp.Identifier) -> str: 1189 identifier = super().identifier_sql(expression) 1190 1191 if expression.args.get("global"): 1192 identifier = f"##{identifier}" 1193 elif expression.args.get("temporary"): 1194 identifier = f"#{identifier}" 1195 1196 return identifier 1197 1198 def constraint_sql(self, expression: exp.Constraint) -> str: 1199 this = self.sql(expression, "this") 1200 expressions = self.expressions(expression, flat=True, sep=" ") 1201 return f"CONSTRAINT {this} {expressions}" 1202 1203 def length_sql(self, expression: exp.Length) -> str: 1204 return self._uncast_text(expression, "LEN") 1205 1206 def right_sql(self, expression: exp.Right) -> str: 1207 return self._uncast_text(expression, "RIGHT") 1208 1209 def left_sql(self, expression: exp.Left) -> str: 1210 return self._uncast_text(expression, "LEFT") 1211 1212 def _uncast_text(self, expression: exp.Expression, name: str) -> str: 1213 this = expression.this 1214 if isinstance(this, exp.Cast) and this.is_type(exp.DataType.Type.TEXT): 1215 this_sql = self.sql(this, "this") 1216 else: 1217 this_sql = self.sql(this) 1218 expression_sql = self.sql(expression, "expression") 1219 return self.func(name, this_sql, expression_sql if expression_sql else None) 1220 1221 def partition_sql(self, expression: exp.Partition) -> str: 1222 return f"WITH (PARTITIONS({self.expressions(expression, flat=True)}))" 1223 1224 def alter_sql(self, expression: exp.Alter) -> str: 1225 action = seq_get(expression.args.get("actions") or [], 0) 1226 if isinstance(action, exp.AlterRename): 1227 return f"EXEC sp_rename '{self.sql(expression.this)}', '{action.this.name}'" 1228 return super().alter_sql(expression) 1229 1230 def drop_sql(self, expression: exp.Drop) -> str: 1231 if expression.args["kind"] == "VIEW": 1232 expression.this.set("catalog", None) 1233 return super().drop_sql(expression) 1234 1235 def declare_sql(self, expression: exp.Declare) -> str: 1236 return f"DECLARE {self.expressions(expression, flat=True)}" 1237 1238 def declareitem_sql(self, expression: exp.DeclareItem) -> str: 1239 variable = self.sql(expression, "this") 1240 default = self.sql(expression, "default") 1241 default = f" = {default}" if default else "" 1242 1243 kind = self.sql(expression, "kind") 1244 if isinstance(expression.args.get("kind"), exp.Schema): 1245 kind = f"TABLE {kind}" 1246 1247 return f"{variable} AS {kind}{default}" 1248 1249 def options_modifier(self, expression: exp.Expression) -> str: 1250 options = self.expressions(expression, key="options") 1251 return f" OPTION{self.wrap(options)}" if options else "" 1252 1253 def dpipe_sql(self, expression: exp.DPipe) -> str: 1254 return self.sql( 1255 reduce(lambda x, y: exp.Add(this=x, expression=y), expression.flatten()) 1256 )
Generator converts a given syntax tree to the corresponding SQL string.
Arguments:
- pretty: Whether to format the produced SQL string. Default: False.
- identify: Determines when an identifier should be quoted. Possible values are: False (default): Never quote, except in cases where it's mandatory by the dialect. True or 'always': Always quote. 'safe': Only quote identifiers that are case insensitive.
- normalize: Whether to normalize identifiers to lowercase. Default: False.
- pad: The pad size in a formatted string. For example, this affects the indentation of a projection in a query, relative to its nesting level. Default: 2.
- indent: The indentation size in a formatted string. For example, this affects the
indentation of subqueries and filters under a
WHERE
clause. Default: 2. - normalize_functions: How to normalize function names. Possible values are: "upper" or True (default): Convert names to uppercase. "lower": Convert names to lowercase. False: Disables function name normalization.
- unsupported_level: Determines the generator's behavior when it encounters unsupported expressions. Default ErrorLevel.WARN.
- max_unsupported: Maximum number of unsupported messages to include in a raised UnsupportedError. This is only relevant if unsupported_level is ErrorLevel.RAISE. Default: 3
- leading_comma: Whether the comma is leading or trailing in select expressions. This is only relevant when generating in pretty mode. Default: False
- max_text_width: The max number of characters in a segment before creating new lines in pretty mode. The default is on the smaller end because the length only represents a segment and not the true line length. Default: 80
- comments: Whether to preserve comments in the output SQL code. Default: True
951 def select_sql(self, expression: exp.Select) -> str: 952 if expression.args.get("offset"): 953 if not expression.args.get("order"): 954 # ORDER BY is required in order to use OFFSET in a query, so we use 955 # a noop order by, since we don't really care about the order. 956 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 957 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 958 959 limit = expression.args.get("limit") 960 if isinstance(limit, exp.Limit): 961 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 962 # we replace here because otherwise TOP would be generated in select_sql 963 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 964 965 return super().select_sql(expression)
973 def queryoption_sql(self, expression: exp.QueryOption) -> str: 974 option = self.sql(expression, "this") 975 value = self.sql(expression, "expression") 976 if value: 977 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 978 return f"{option} {optional_equal_sign}{value}" 979 return option
981 def lateral_op(self, expression: exp.Lateral) -> str: 982 cross_apply = expression.args.get("cross_apply") 983 if cross_apply is True: 984 return "CROSS APPLY" 985 if cross_apply is False: 986 return "OUTER APPLY" 987 988 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 989 self.unsupported("LATERAL clause is not supported.") 990 return "LATERAL"
992 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 993 this = expression.this 994 split_count = len(this.name.split(".")) 995 delimiter = expression.args.get("delimiter") 996 part_index = expression.args.get("part_index") 997 998 if ( 999 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 1000 or (delimiter and delimiter.name != ".") 1001 or not part_index 1002 or split_count > 4 1003 ): 1004 self.unsupported( 1005 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 1006 ) 1007 return "" 1008 1009 return self.func( 1010 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 1011 )
1013 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1014 nano = expression.args.get("nano") 1015 if nano is not None: 1016 nano.pop() 1017 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1018 1019 if expression.args.get("fractions") is None: 1020 expression.set("fractions", exp.Literal.number(0)) 1021 if expression.args.get("precision") is None: 1022 expression.set("precision", exp.Literal.number(0)) 1023 1024 return rename_func("TIMEFROMPARTS")(self, expression)
1026 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1027 zone = expression.args.get("zone") 1028 if zone is not None: 1029 zone.pop() 1030 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1031 1032 nano = expression.args.get("nano") 1033 if nano is not None: 1034 nano.pop() 1035 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1036 1037 if expression.args.get("milli") is None: 1038 expression.set("milli", exp.Literal.number(0)) 1039 1040 return rename_func("DATETIMEFROMPARTS")(self, expression)
1042 def setitem_sql(self, expression: exp.SetItem) -> str: 1043 this = expression.this 1044 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1045 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1046 return f"{self.sql(this.left)} {self.sql(this.right)}" 1047 1048 return super().setitem_sql(expression)
1063 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1064 sql = self.sql(expression, "this") 1065 properties = expression.args.get("properties") 1066 1067 if sql[:1] != "#" and any( 1068 isinstance(prop, exp.TemporaryProperty) 1069 for prop in (properties.expressions if properties else []) 1070 ): 1071 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1072 1073 return sql
1075 def create_sql(self, expression: exp.Create) -> str: 1076 kind = expression.kind 1077 exists = expression.args.pop("exists", None) 1078 1079 like_property = expression.find(exp.LikeProperty) 1080 if like_property: 1081 ctas_expression = like_property.this 1082 else: 1083 ctas_expression = expression.expression 1084 1085 if kind == "VIEW": 1086 expression.this.set("catalog", None) 1087 with_ = expression.args.get("with") 1088 if ctas_expression and with_: 1089 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1090 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1091 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1092 ctas_expression.set("with", with_.pop()) 1093 1094 sql = super().create_sql(expression) 1095 1096 table = expression.find(exp.Table) 1097 1098 # Convert CTAS statement to SELECT .. INTO .. 1099 if kind == "TABLE" and ctas_expression: 1100 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1101 ctas_expression = ctas_expression.subquery() 1102 1103 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1104 select_into.set("into", exp.Into(this=table)) 1105 1106 if like_property: 1107 select_into.limit(0, copy=False) 1108 1109 sql = self.sql(select_into) 1110 1111 if exists: 1112 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1113 sql_with_ctes = self.prepend_ctes(expression, sql) 1114 sql_literal = self.sql(exp.Literal.string(sql_with_ctes)) 1115 if kind == "SCHEMA": 1116 return f"""IF NOT EXISTS (SELECT * FROM information_schema.schemata WHERE schema_name = {identifier}) EXEC({sql_literal})""" 1117 elif kind == "TABLE": 1118 assert table 1119 where = exp.and_( 1120 exp.column("table_name").eq(table.name), 1121 exp.column("table_schema").eq(table.db) if table.db else None, 1122 exp.column("table_catalog").eq(table.catalog) if table.catalog else None, 1123 ) 1124 return f"""IF NOT EXISTS (SELECT * FROM information_schema.tables WHERE {where}) EXEC({sql_literal})""" 1125 elif kind == "INDEX": 1126 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1127 return f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql_literal})""" 1128 elif expression.args.get("replace"): 1129 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1130 1131 return self.prepend_ctes(expression, sql)
1140 def version_sql(self, expression: exp.Version) -> str: 1141 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1142 this = f"FOR {name}" 1143 expr = expression.expression 1144 kind = expression.text("kind") 1145 if kind in ("FROM", "BETWEEN"): 1146 args = expr.expressions 1147 sep = "TO" if kind == "FROM" else "AND" 1148 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1149 else: 1150 expr_sql = self.sql(expr) 1151 1152 expr_sql = f" {expr_sql}" if expr_sql else "" 1153 return f"{this} {kind}{expr_sql}"
1172 def commit_sql(self, expression: exp.Commit) -> str: 1173 this = self.sql(expression, "this") 1174 this = f" {this}" if this else "" 1175 durability = expression.args.get("durability") 1176 durability = ( 1177 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1178 if durability is not None 1179 else "" 1180 ) 1181 return f"COMMIT TRANSACTION{this}{durability}"
1188 def identifier_sql(self, expression: exp.Identifier) -> str: 1189 identifier = super().identifier_sql(expression) 1190 1191 if expression.args.get("global"): 1192 identifier = f"##{identifier}" 1193 elif expression.args.get("temporary"): 1194 identifier = f"#{identifier}" 1195 1196 return identifier
1238 def declareitem_sql(self, expression: exp.DeclareItem) -> str: 1239 variable = self.sql(expression, "this") 1240 default = self.sql(expression, "default") 1241 default = f" = {default}" if default else "" 1242 1243 kind = self.sql(expression, "kind") 1244 if isinstance(expression.args.get("kind"), exp.Schema): 1245 kind = f"TABLE {kind}" 1246 1247 return f"{variable} AS {kind}{default}"
Inherited Members
- sqlglot.generator.Generator
- Generator
- IGNORE_NULLS_IN_FUNC
- LOCKING_READS_SUPPORTED
- WRAP_DERIVED_VALUES
- CREATE_FUNCTION_RETURN_AS
- MATCHED_BY_SOURCE
- SINGLE_STRING_INTERVAL
- INTERVAL_ALLOWS_PLURAL_FORM
- LIMIT_ONLY_LITERALS
- RENAME_TABLE_WITH_DB
- GROUPINGS_SEP
- INDEX_ON
- JOIN_HINTS
- TABLE_HINTS
- QUERY_HINT_SEP
- IS_BOOL_ALLOWED
- DUPLICATE_KEY_UPDATE_WITH_SET
- EXTRACT_ALLOWS_QUOTES
- TZ_TO_WITH_TIME_ZONE
- VALUES_AS_TABLE
- UNNEST_WITH_ORDINALITY
- AGGREGATE_FILTER_SUPPORTED
- SEMI_ANTI_JOIN_WITH_SIDE
- SUPPORTS_TABLE_COPY
- TABLESAMPLE_REQUIRES_PARENS
- TABLESAMPLE_SIZE_IS_ROWS
- TABLESAMPLE_KEYWORDS
- TABLESAMPLE_WITH_METHOD
- COLLATE_IS_FUNC
- DATA_TYPE_SPECIFIERS_ALLOWED
- LAST_DAY_SUPPORTS_DATE_PART
- SUPPORTS_TABLE_ALIAS_COLUMNS
- UNPIVOT_ALIASES_ARE_IDENTIFIERS
- JSON_KEY_VALUE_PAIR_SEP
- INSERT_OVERWRITE
- SUPPORTS_UNLOGGED_TABLES
- SUPPORTS_CREATE_TABLE_LIKE
- LIKE_PROPERTY_INSIDE_SCHEMA
- MULTI_ARG_DISTINCT
- JSON_TYPE_REQUIRED_FOR_EXTRACTION
- JSON_PATH_SINGLE_QUOTE_ESCAPE
- CAN_IMPLEMENT_ARRAY_ANY
- COPY_PARAMS_ARE_WRAPPED
- COPY_HAS_INTO_KEYWORD
- STAR_EXCEPT
- HEX_FUNC
- WITH_PROPERTIES_PREFIX
- QUOTE_JSON_PATH
- PAD_FILL_PATTERN_IS_REQUIRED
- SUPPORTS_EXPLODING_PROJECTIONS
- ARRAY_CONCAT_IS_VAR_LEN
- SUPPORTS_CONVERT_TIMEZONE
- SUPPORTS_MEDIAN
- SUPPORTS_UNIX_SECONDS
- ARRAY_SIZE_NAME
- ARRAY_SIZE_DIM_REQUIRED
- TIME_PART_SINGULARS
- TOKEN_MAPPING
- STRUCT_DELIMITER
- PARAMETER_TOKEN
- NAMED_PLACEHOLDER_TOKEN
- EXPRESSION_PRECEDES_PROPERTIES_CREATABLES
- RESERVED_KEYWORDS
- WITH_SEPARATED_COMMENTS
- EXCLUDE_COMMENTS
- UNWRAPPED_INTERVAL_VALUES
- PARAMETERIZABLE_TEXT_TYPES
- SENTINEL_LINE_BREAK
- pretty
- identify
- normalize
- pad
- unsupported_level
- max_unsupported
- leading_comma
- max_text_width
- comments
- dialect
- normalize_functions
- unsupported_messages
- generate
- preprocess
- unsupported
- sep
- seg
- pad_comment
- maybe_comment
- wrap
- no_identify
- normalize_func
- indent
- sql
- uncache_sql
- cache_sql
- characterset_sql
- column_parts
- column_sql
- columnposition_sql
- columndef_sql
- columnconstraint_sql
- computedcolumnconstraint_sql
- autoincrementcolumnconstraint_sql
- compresscolumnconstraint_sql
- generatedasidentitycolumnconstraint_sql
- generatedasrowcolumnconstraint_sql
- periodforsystemtimeconstraint_sql
- notnullcolumnconstraint_sql
- transformcolumnconstraint_sql
- primarykeycolumnconstraint_sql
- uniquecolumnconstraint_sql
- sequenceproperties_sql
- clone_sql
- describe_sql
- heredoc_sql
- prepend_ctes
- with_sql
- cte_sql
- tablealias_sql
- bitstring_sql
- hexstring_sql
- bytestring_sql
- unicodestring_sql
- rawstring_sql
- datatypeparam_sql
- datatype_sql
- directory_sql
- delete_sql
- set_operation
- set_operations
- fetch_sql
- filter_sql
- hint_sql
- indexparameters_sql
- index_sql
- hex_sql
- lowerhex_sql
- inputoutputformat_sql
- national_sql
- properties_sql
- root_properties
- properties
- with_properties
- locate_properties
- property_name
- property_sql
- likeproperty_sql
- fallbackproperty_sql
- journalproperty_sql
- freespaceproperty_sql
- checksumproperty_sql
- mergeblockratioproperty_sql
- datablocksizeproperty_sql
- blockcompressionproperty_sql
- isolatedloadingproperty_sql
- partitionboundspec_sql
- partitionedofproperty_sql
- lockingproperty_sql
- withdataproperty_sql
- withsystemversioningproperty_sql
- insert_sql
- introducer_sql
- kill_sql
- pseudotype_sql
- objectidentifier_sql
- onconflict_sql
- rowformatdelimitedproperty_sql
- withtablehint_sql
- indextablehint_sql
- historicaldata_sql
- table_parts
- table_sql
- tablesample_sql
- pivot_sql
- tuple_sql
- update_sql
- values_sql
- var_sql
- into_sql
- from_sql
- groupingsets_sql
- rollup_sql
- cube_sql
- group_sql
- having_sql
- connect_sql
- prior_sql
- join_sql
- lambda_sql
- lateral_sql
- limit_sql
- set_sql
- pragma_sql
- lock_sql
- literal_sql
- escape_str
- loaddata_sql
- null_sql
- order_sql
- withfill_sql
- cluster_sql
- distribute_sql
- sort_sql
- ordered_sql
- matchrecognizemeasure_sql
- matchrecognize_sql
- query_modifiers
- offset_limit_modifiers
- after_limit_modifiers
- schema_sql
- schema_columns_sql
- star_sql
- parameter_sql
- sessionparameter_sql
- placeholder_sql
- subquery_sql
- qualify_sql
- unnest_sql
- prewhere_sql
- where_sql
- window_sql
- partition_by_sql
- windowspec_sql
- withingroup_sql
- between_sql
- bracket_offset_expressions
- bracket_sql
- all_sql
- any_sql
- exists_sql
- case_sql
- nextvaluefor_sql
- extract_sql
- trim_sql
- convert_concat_args
- concat_sql
- concatws_sql
- check_sql
- foreignkey_sql
- primarykey_sql
- if_sql
- matchagainst_sql
- jsonkeyvalue_sql
- jsonpath_sql
- json_path_part
- formatjson_sql
- jsonobject_sql
- jsonobjectagg_sql
- jsonarray_sql
- jsonarrayagg_sql
- jsoncolumndef_sql
- jsonschema_sql
- jsontable_sql
- openjsoncolumndef_sql
- openjson_sql
- in_sql
- in_unnest_op
- interval_sql
- return_sql
- reference_sql
- anonymous_sql
- paren_sql
- neg_sql
- not_sql
- alias_sql
- pivotalias_sql
- aliases_sql
- atindex_sql
- attimezone_sql
- fromtimezone_sql
- add_sql
- and_sql
- or_sql
- xor_sql
- connector_sql
- bitwiseand_sql
- bitwiseleftshift_sql
- bitwisenot_sql
- bitwiseor_sql
- bitwiserightshift_sql
- bitwisexor_sql
- cast_sql
- currentdate_sql
- collate_sql
- command_sql
- comment_sql
- mergetreettlaction_sql
- mergetreettl_sql
- altercolumn_sql
- alterdiststyle_sql
- altersortkey_sql
- alterrename_sql
- renamecolumn_sql
- alterset_sql
- add_column_sql
- droppartition_sql
- addconstraint_sql
- distinct_sql
- ignorenulls_sql
- respectnulls_sql
- havingmax_sql
- intdiv_sql
- div_sql
- overlaps_sql
- distance_sql
- dot_sql
- eq_sql
- propertyeq_sql
- escape_sql
- glob_sql
- gt_sql
- gte_sql
- ilike_sql
- ilikeany_sql
- like_sql
- likeany_sql
- similarto_sql
- lt_sql
- lte_sql
- mod_sql
- mul_sql
- neq_sql
- nullsafeeq_sql
- nullsafeneq_sql
- slice_sql
- sub_sql
- trycast_sql
- try_sql
- log_sql
- use_sql
- binary
- function_fallback_sql
- func
- format_args
- too_wide
- format_time
- expressions
- op_expressions
- naked_property
- tag_sql
- token_sql
- userdefinedfunction_sql
- joinhint_sql
- kwarg_sql
- when_sql
- merge_sql
- tochar_sql
- tonumber_sql
- dictproperty_sql
- dictrange_sql
- dictsubproperty_sql
- duplicatekeyproperty_sql
- distributedbyproperty_sql
- oncluster_sql
- clusteredbyproperty_sql
- anyvalue_sql
- querytransform_sql
- indexconstraintoption_sql
- checkcolumnconstraint_sql
- indexcolumnconstraint_sql
- nvl2_sql
- comprehension_sql
- columnprefix_sql
- opclass_sql
- predict_sql
- forin_sql
- refresh_sql
- toarray_sql
- tsordstotime_sql
- tsordstotimestamp_sql
- tsordstodatetime_sql
- tsordstodate_sql
- unixdate_sql
- lastday_sql
- dateadd_sql
- arrayany_sql
- struct_sql
- partitionrange_sql
- truncatetable_sql
- copyparameter_sql
- credentials_sql
- copy_sql
- semicolon_sql
- datadeletionproperty_sql
- maskingpolicycolumnconstraint_sql
- gapfill_sql
- scoperesolution_sql
- parsejson_sql
- rand_sql
- changes_sql
- pad_sql
- summarize_sql
- explodinggenerateseries_sql
- arrayconcat_sql
- converttimezone_sql
- json_sql
- jsonvalue_sql
- conditionalinsert_sql
- multitableinserts_sql
- oncondition_sql
- jsonexists_sql
- arrayagg_sql
- apply_sql
- grant_sql
- grantprivilege_sql
- grantprincipal_sql
- columns_sql
- overlay_sql
- todouble_sql
- string_sql
- median_sql
- overflowtruncatebehavior_sql
- unixseconds_sql
- arraysize_sql
- attach_sql
- detach_sql
- attachoption_sql
- featuresattime_sql
- watermarkcolumnconstraint_sql
- encodeproperty_sql
- includeproperty_sql