sqlglot.dialects.tsql
1from __future__ import annotations 2 3import datetime 4import re 5import typing as t 6from functools import partial, reduce 7 8from sqlglot import exp, generator, parser, tokens, transforms 9from sqlglot.dialects.dialect import ( 10 Dialect, 11 NormalizationStrategy, 12 any_value_to_max_sql, 13 build_date_delta, 14 date_delta_sql, 15 datestrtodate_sql, 16 generatedasidentitycolumnconstraint_sql, 17 max_or_greatest, 18 min_or_least, 19 rename_func, 20 strposition_sql, 21 timestrtotime_sql, 22 trim_sql, 23) 24from sqlglot.helper import seq_get 25from sqlglot.parser import build_coalesce 26from sqlglot.time import format_time 27from sqlglot.tokens import TokenType 28 29if t.TYPE_CHECKING: 30 from sqlglot._typing import E 31 32FULL_FORMAT_TIME_MAPPING = { 33 "weekday": "%A", 34 "dw": "%A", 35 "w": "%A", 36 "month": "%B", 37 "mm": "%B", 38 "m": "%B", 39} 40 41DATE_DELTA_INTERVAL = { 42 "year": "year", 43 "yyyy": "year", 44 "yy": "year", 45 "quarter": "quarter", 46 "qq": "quarter", 47 "q": "quarter", 48 "month": "month", 49 "mm": "month", 50 "m": "month", 51 "week": "week", 52 "ww": "week", 53 "wk": "week", 54 "day": "day", 55 "dd": "day", 56 "d": "day", 57} 58 59 60DATE_FMT_RE = re.compile("([dD]{1,2})|([mM]{1,2})|([yY]{1,4})|([hH]{1,2})|([sS]{1,2})") 61 62# N = Numeric, C=Currency 63TRANSPILE_SAFE_NUMBER_FMT = {"N", "C"} 64 65DEFAULT_START_DATE = datetime.date(1900, 1, 1) 66 67BIT_TYPES = {exp.EQ, exp.NEQ, exp.Is, exp.In, exp.Select, exp.Alias} 68 69# Unsupported options: 70# - OPTIMIZE FOR ( @variable_name { UNKNOWN | = <literal_constant> } [ , ...n ] ) 71# - TABLE HINT 72OPTIONS: parser.OPTIONS_TYPE = { 73 **dict.fromkeys( 74 ( 75 "DISABLE_OPTIMIZED_PLAN_FORCING", 76 "FAST", 77 "IGNORE_NONCLUSTERED_COLUMNSTORE_INDEX", 78 "LABEL", 79 "MAXDOP", 80 "MAXRECURSION", 81 "MAX_GRANT_PERCENT", 82 "MIN_GRANT_PERCENT", 83 "NO_PERFORMANCE_SPOOL", 84 "QUERYTRACEON", 85 "RECOMPILE", 86 ), 87 tuple(), 88 ), 89 "CONCAT": ("UNION",), 90 "DISABLE": ("EXTERNALPUSHDOWN", "SCALEOUTEXECUTION"), 91 "EXPAND": ("VIEWS",), 92 "FORCE": ("EXTERNALPUSHDOWN", "ORDER", "SCALEOUTEXECUTION"), 93 "HASH": ("GROUP", "JOIN", "UNION"), 94 "KEEP": ("PLAN",), 95 "KEEPFIXED": ("PLAN",), 96 "LOOP": ("JOIN",), 97 "MERGE": ("JOIN", "UNION"), 98 "OPTIMIZE": (("FOR", "UNKNOWN"),), 99 "ORDER": ("GROUP",), 100 "PARAMETERIZATION": ("FORCED", "SIMPLE"), 101 "ROBUST": ("PLAN",), 102 "USE": ("PLAN",), 103} 104 105 106XML_OPTIONS: parser.OPTIONS_TYPE = { 107 **dict.fromkeys( 108 ( 109 "AUTO", 110 "EXPLICIT", 111 "TYPE", 112 ), 113 tuple(), 114 ), 115 "ELEMENTS": ( 116 "XSINIL", 117 "ABSENT", 118 ), 119 "BINARY": ("BASE64",), 120} 121 122 123OPTIONS_THAT_REQUIRE_EQUAL = ("MAX_GRANT_PERCENT", "MIN_GRANT_PERCENT", "LABEL") 124 125 126def _build_formatted_time( 127 exp_class: t.Type[E], full_format_mapping: t.Optional[bool] = None 128) -> t.Callable[[t.List], E]: 129 def _builder(args: t.List) -> E: 130 fmt = seq_get(args, 0) 131 if isinstance(fmt, exp.Expression): 132 fmt = exp.Literal.string( 133 format_time( 134 fmt.name.lower(), 135 ( 136 {**TSQL.TIME_MAPPING, **FULL_FORMAT_TIME_MAPPING} 137 if full_format_mapping 138 else TSQL.TIME_MAPPING 139 ), 140 ) 141 ) 142 143 this = seq_get(args, 1) 144 if isinstance(this, exp.Expression): 145 this = exp.cast(this, exp.DataType.Type.DATETIME2) 146 147 return exp_class(this=this, format=fmt) 148 149 return _builder 150 151 152def _build_format(args: t.List) -> exp.NumberToStr | exp.TimeToStr: 153 this = seq_get(args, 0) 154 fmt = seq_get(args, 1) 155 culture = seq_get(args, 2) 156 157 number_fmt = fmt and (fmt.name in TRANSPILE_SAFE_NUMBER_FMT or not DATE_FMT_RE.search(fmt.name)) 158 159 if number_fmt: 160 return exp.NumberToStr(this=this, format=fmt, culture=culture) 161 162 if fmt: 163 fmt = exp.Literal.string( 164 format_time(fmt.name, TSQL.FORMAT_TIME_MAPPING) 165 if len(fmt.name) == 1 166 else format_time(fmt.name, TSQL.TIME_MAPPING) 167 ) 168 169 return exp.TimeToStr(this=this, format=fmt, culture=culture) 170 171 172def _build_eomonth(args: t.List) -> exp.LastDay: 173 date = exp.TsOrDsToDate(this=seq_get(args, 0)) 174 month_lag = seq_get(args, 1) 175 176 if month_lag is None: 177 this: exp.Expression = date 178 else: 179 unit = DATE_DELTA_INTERVAL.get("month") 180 this = exp.DateAdd(this=date, expression=month_lag, unit=unit and exp.var(unit)) 181 182 return exp.LastDay(this=this) 183 184 185def _build_hashbytes(args: t.List) -> exp.Expression: 186 kind, data = args 187 kind = kind.name.upper() if kind.is_string else "" 188 189 if kind == "MD5": 190 args.pop(0) 191 return exp.MD5(this=data) 192 if kind in ("SHA", "SHA1"): 193 args.pop(0) 194 return exp.SHA(this=data) 195 if kind == "SHA2_256": 196 return exp.SHA2(this=data, length=exp.Literal.number(256)) 197 if kind == "SHA2_512": 198 return exp.SHA2(this=data, length=exp.Literal.number(512)) 199 200 return exp.func("HASHBYTES", *args) 201 202 203DATEPART_ONLY_FORMATS = {"DW", "WK", "HOUR", "QUARTER"} 204 205 206def _format_sql(self: TSQL.Generator, expression: exp.NumberToStr | exp.TimeToStr) -> str: 207 fmt = expression.args["format"] 208 209 if not isinstance(expression, exp.NumberToStr): 210 if fmt.is_string: 211 mapped_fmt = format_time(fmt.name, TSQL.INVERSE_TIME_MAPPING) 212 213 name = (mapped_fmt or "").upper() 214 if name in DATEPART_ONLY_FORMATS: 215 return self.func("DATEPART", name, expression.this) 216 217 fmt_sql = self.sql(exp.Literal.string(mapped_fmt)) 218 else: 219 fmt_sql = self.format_time(expression) or self.sql(fmt) 220 else: 221 fmt_sql = self.sql(fmt) 222 223 return self.func("FORMAT", expression.this, fmt_sql, expression.args.get("culture")) 224 225 226def _string_agg_sql(self: TSQL.Generator, expression: exp.GroupConcat) -> str: 227 this = expression.this 228 distinct = expression.find(exp.Distinct) 229 if distinct: 230 # exp.Distinct can appear below an exp.Order or an exp.GroupConcat expression 231 self.unsupported("T-SQL STRING_AGG doesn't support DISTINCT.") 232 this = distinct.pop().expressions[0] 233 234 order = "" 235 if isinstance(expression.this, exp.Order): 236 if expression.this.this: 237 this = expression.this.this.pop() 238 # Order has a leading space 239 order = f" WITHIN GROUP ({self.sql(expression.this)[1:]})" 240 241 separator = expression.args.get("separator") or exp.Literal.string(",") 242 return f"STRING_AGG({self.format_args(this, separator)}){order}" 243 244 245def _build_date_delta( 246 exp_class: t.Type[E], unit_mapping: t.Optional[t.Dict[str, str]] = None 247) -> t.Callable[[t.List], E]: 248 def _builder(args: t.List) -> E: 249 unit = seq_get(args, 0) 250 if unit and unit_mapping: 251 unit = exp.var(unit_mapping.get(unit.name.lower(), unit.name)) 252 253 start_date = seq_get(args, 1) 254 if start_date and start_date.is_number: 255 # Numeric types are valid DATETIME values 256 if start_date.is_int: 257 adds = DEFAULT_START_DATE + datetime.timedelta(days=start_date.to_py()) 258 start_date = exp.Literal.string(adds.strftime("%F")) 259 else: 260 # We currently don't handle float values, i.e. they're not converted to equivalent DATETIMEs. 261 # This is not a problem when generating T-SQL code, it is when transpiling to other dialects. 262 return exp_class(this=seq_get(args, 2), expression=start_date, unit=unit) 263 264 return exp_class( 265 this=exp.TimeStrToTime(this=seq_get(args, 2)), 266 expression=exp.TimeStrToTime(this=start_date), 267 unit=unit, 268 ) 269 270 return _builder 271 272 273def qualify_derived_table_outputs(expression: exp.Expression) -> exp.Expression: 274 """Ensures all (unnamed) output columns are aliased for CTEs and Subqueries.""" 275 alias = expression.args.get("alias") 276 277 if ( 278 isinstance(expression, (exp.CTE, exp.Subquery)) 279 and isinstance(alias, exp.TableAlias) 280 and not alias.columns 281 ): 282 from sqlglot.optimizer.qualify_columns import qualify_outputs 283 284 # We keep track of the unaliased column projection indexes instead of the expressions 285 # themselves, because the latter are going to be replaced by new nodes when the aliases 286 # are added and hence we won't be able to reach these newly added Alias parents 287 query = expression.this 288 unaliased_column_indexes = ( 289 i for i, c in enumerate(query.selects) if isinstance(c, exp.Column) and not c.alias 290 ) 291 292 qualify_outputs(query) 293 294 # Preserve the quoting information of columns for newly added Alias nodes 295 query_selects = query.selects 296 for select_index in unaliased_column_indexes: 297 alias = query_selects[select_index] 298 column = alias.this 299 if isinstance(column.this, exp.Identifier): 300 alias.args["alias"].set("quoted", column.this.quoted) 301 302 return expression 303 304 305# https://learn.microsoft.com/en-us/sql/t-sql/functions/datetimefromparts-transact-sql?view=sql-server-ver16#syntax 306def _build_datetimefromparts(args: t.List) -> exp.TimestampFromParts: 307 return exp.TimestampFromParts( 308 year=seq_get(args, 0), 309 month=seq_get(args, 1), 310 day=seq_get(args, 2), 311 hour=seq_get(args, 3), 312 min=seq_get(args, 4), 313 sec=seq_get(args, 5), 314 milli=seq_get(args, 6), 315 ) 316 317 318# https://learn.microsoft.com/en-us/sql/t-sql/functions/timefromparts-transact-sql?view=sql-server-ver16#syntax 319def _build_timefromparts(args: t.List) -> exp.TimeFromParts: 320 return exp.TimeFromParts( 321 hour=seq_get(args, 0), 322 min=seq_get(args, 1), 323 sec=seq_get(args, 2), 324 fractions=seq_get(args, 3), 325 precision=seq_get(args, 4), 326 ) 327 328 329def _build_with_arg_as_text( 330 klass: t.Type[exp.Expression], 331) -> t.Callable[[t.List[exp.Expression]], exp.Expression]: 332 def _parse(args: t.List[exp.Expression]) -> exp.Expression: 333 this = seq_get(args, 0) 334 335 if this and not this.is_string: 336 this = exp.cast(this, exp.DataType.Type.TEXT) 337 338 expression = seq_get(args, 1) 339 kwargs = {"this": this} 340 341 if expression: 342 kwargs["expression"] = expression 343 344 return klass(**kwargs) 345 346 return _parse 347 348 349# https://learn.microsoft.com/en-us/sql/t-sql/functions/parsename-transact-sql?view=sql-server-ver16 350def _build_parsename(args: t.List) -> exp.SplitPart | exp.Anonymous: 351 # PARSENAME(...) will be stored into exp.SplitPart if: 352 # - All args are literals 353 # - The part index (2nd arg) is <= 4 (max valid value, otherwise TSQL returns NULL) 354 if len(args) == 2 and all(isinstance(arg, exp.Literal) for arg in args): 355 this = args[0] 356 part_index = args[1] 357 split_count = len(this.name.split(".")) 358 if split_count <= 4: 359 return exp.SplitPart( 360 this=this, 361 delimiter=exp.Literal.string("."), 362 part_index=exp.Literal.number(split_count + 1 - part_index.to_py()), 363 ) 364 365 return exp.Anonymous(this="PARSENAME", expressions=args) 366 367 368def _build_json_query(args: t.List, dialect: Dialect) -> exp.JSONExtract: 369 if len(args) == 1: 370 # The default value for path is '$'. As a result, if you don't provide a 371 # value for path, JSON_QUERY returns the input expression. 372 args.append(exp.Literal.string("$")) 373 374 return parser.build_extract_json_with_path(exp.JSONExtract)(args, dialect) 375 376 377def _json_extract_sql( 378 self: TSQL.Generator, expression: exp.JSONExtract | exp.JSONExtractScalar 379) -> str: 380 json_query = self.func("JSON_QUERY", expression.this, expression.expression) 381 json_value = self.func("JSON_VALUE", expression.this, expression.expression) 382 return self.func("ISNULL", json_query, json_value) 383 384 385def _timestrtotime_sql(self: TSQL.Generator, expression: exp.TimeStrToTime): 386 sql = timestrtotime_sql(self, expression) 387 if expression.args.get("zone"): 388 # If there is a timezone, produce an expression like: 389 # CAST('2020-01-01 12:13:14-08:00' AS DATETIMEOFFSET) AT TIME ZONE 'UTC' 390 # If you dont have AT TIME ZONE 'UTC', wrapping that expression in another cast back to DATETIME2 just drops the timezone information 391 return self.sql(exp.AtTimeZone(this=sql, zone=exp.Literal.string("UTC"))) 392 return sql 393 394 395def _build_datetrunc(args: t.List) -> exp.TimestampTrunc: 396 unit = seq_get(args, 0) 397 this = seq_get(args, 1) 398 399 if this and this.is_string: 400 this = exp.cast(this, exp.DataType.Type.DATETIME2) 401 402 return exp.TimestampTrunc(this=this, unit=unit) 403 404 405class TSQL(Dialect): 406 SUPPORTS_SEMI_ANTI_JOIN = False 407 LOG_BASE_FIRST = False 408 TYPED_DIVISION = True 409 CONCAT_COALESCE = True 410 NORMALIZATION_STRATEGY = NormalizationStrategy.CASE_INSENSITIVE 411 ALTER_TABLE_ADD_REQUIRED_FOR_EACH_COLUMN = False 412 413 TIME_FORMAT = "'yyyy-mm-dd hh:mm:ss'" 414 415 TIME_MAPPING = { 416 "year": "%Y", 417 "dayofyear": "%j", 418 "day": "%d", 419 "dy": "%d", 420 "y": "%Y", 421 "week": "%W", 422 "ww": "%W", 423 "wk": "%W", 424 "hour": "%h", 425 "hh": "%I", 426 "minute": "%M", 427 "mi": "%M", 428 "n": "%M", 429 "second": "%S", 430 "ss": "%S", 431 "s": "%-S", 432 "millisecond": "%f", 433 "ms": "%f", 434 "weekday": "%w", 435 "dw": "%w", 436 "month": "%m", 437 "mm": "%M", 438 "m": "%-M", 439 "Y": "%Y", 440 "YYYY": "%Y", 441 "YY": "%y", 442 "MMMM": "%B", 443 "MMM": "%b", 444 "MM": "%m", 445 "M": "%-m", 446 "dddd": "%A", 447 "dd": "%d", 448 "d": "%-d", 449 "HH": "%H", 450 "H": "%-H", 451 "h": "%-I", 452 "ffffff": "%f", 453 "yyyy": "%Y", 454 "yy": "%y", 455 } 456 457 CONVERT_FORMAT_MAPPING = { 458 "0": "%b %d %Y %-I:%M%p", 459 "1": "%m/%d/%y", 460 "2": "%y.%m.%d", 461 "3": "%d/%m/%y", 462 "4": "%d.%m.%y", 463 "5": "%d-%m-%y", 464 "6": "%d %b %y", 465 "7": "%b %d, %y", 466 "8": "%H:%M:%S", 467 "9": "%b %d %Y %-I:%M:%S:%f%p", 468 "10": "mm-dd-yy", 469 "11": "yy/mm/dd", 470 "12": "yymmdd", 471 "13": "%d %b %Y %H:%M:ss:%f", 472 "14": "%H:%M:%S:%f", 473 "20": "%Y-%m-%d %H:%M:%S", 474 "21": "%Y-%m-%d %H:%M:%S.%f", 475 "22": "%m/%d/%y %-I:%M:%S %p", 476 "23": "%Y-%m-%d", 477 "24": "%H:%M:%S", 478 "25": "%Y-%m-%d %H:%M:%S.%f", 479 "100": "%b %d %Y %-I:%M%p", 480 "101": "%m/%d/%Y", 481 "102": "%Y.%m.%d", 482 "103": "%d/%m/%Y", 483 "104": "%d.%m.%Y", 484 "105": "%d-%m-%Y", 485 "106": "%d %b %Y", 486 "107": "%b %d, %Y", 487 "108": "%H:%M:%S", 488 "109": "%b %d %Y %-I:%M:%S:%f%p", 489 "110": "%m-%d-%Y", 490 "111": "%Y/%m/%d", 491 "112": "%Y%m%d", 492 "113": "%d %b %Y %H:%M:%S:%f", 493 "114": "%H:%M:%S:%f", 494 "120": "%Y-%m-%d %H:%M:%S", 495 "121": "%Y-%m-%d %H:%M:%S.%f", 496 "126": "%Y-%m-%dT%H:%M:%S.%f", 497 } 498 499 FORMAT_TIME_MAPPING = { 500 "y": "%B %Y", 501 "d": "%m/%d/%Y", 502 "H": "%-H", 503 "h": "%-I", 504 "s": "%Y-%m-%d %H:%M:%S", 505 "D": "%A,%B,%Y", 506 "f": "%A,%B,%Y %-I:%M %p", 507 "F": "%A,%B,%Y %-I:%M:%S %p", 508 "g": "%m/%d/%Y %-I:%M %p", 509 "G": "%m/%d/%Y %-I:%M:%S %p", 510 "M": "%B %-d", 511 "m": "%B %-d", 512 "O": "%Y-%m-%dT%H:%M:%S", 513 "u": "%Y-%M-%D %H:%M:%S%z", 514 "U": "%A, %B %D, %Y %H:%M:%S%z", 515 "T": "%-I:%M:%S %p", 516 "t": "%-I:%M", 517 "Y": "%a %Y", 518 } 519 520 class Tokenizer(tokens.Tokenizer): 521 IDENTIFIERS = [("[", "]"), '"'] 522 QUOTES = ["'", '"'] 523 HEX_STRINGS = [("0x", ""), ("0X", "")] 524 VAR_SINGLE_TOKENS = {"@", "$", "#"} 525 526 KEYWORDS = { 527 **tokens.Tokenizer.KEYWORDS, 528 "CLUSTERED INDEX": TokenType.INDEX, 529 "DATETIME2": TokenType.DATETIME2, 530 "DATETIMEOFFSET": TokenType.TIMESTAMPTZ, 531 "DECLARE": TokenType.DECLARE, 532 "EXEC": TokenType.COMMAND, 533 "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT, 534 "GO": TokenType.COMMAND, 535 "IMAGE": TokenType.IMAGE, 536 "MONEY": TokenType.MONEY, 537 "NONCLUSTERED INDEX": TokenType.INDEX, 538 "NTEXT": TokenType.TEXT, 539 "OPTION": TokenType.OPTION, 540 "OUTPUT": TokenType.RETURNING, 541 "PRINT": TokenType.COMMAND, 542 "PROC": TokenType.PROCEDURE, 543 "REAL": TokenType.FLOAT, 544 "ROWVERSION": TokenType.ROWVERSION, 545 "SMALLDATETIME": TokenType.SMALLDATETIME, 546 "SMALLMONEY": TokenType.SMALLMONEY, 547 "SQL_VARIANT": TokenType.VARIANT, 548 "SYSTEM_USER": TokenType.CURRENT_USER, 549 "TOP": TokenType.TOP, 550 "TIMESTAMP": TokenType.ROWVERSION, 551 "TINYINT": TokenType.UTINYINT, 552 "UNIQUEIDENTIFIER": TokenType.UUID, 553 "UPDATE STATISTICS": TokenType.COMMAND, 554 "XML": TokenType.XML, 555 } 556 KEYWORDS.pop("/*+") 557 558 COMMANDS = {*tokens.Tokenizer.COMMANDS, TokenType.END} 559 560 class Parser(parser.Parser): 561 SET_REQUIRES_ASSIGNMENT_DELIMITER = False 562 LOG_DEFAULTS_TO_LN = True 563 STRING_ALIASES = True 564 NO_PAREN_IF_COMMANDS = False 565 566 QUERY_MODIFIER_PARSERS = { 567 **parser.Parser.QUERY_MODIFIER_PARSERS, 568 TokenType.OPTION: lambda self: ("options", self._parse_options()), 569 TokenType.FOR: lambda self: ("for", self._parse_for()), 570 } 571 572 # T-SQL does not allow BEGIN to be used as an identifier 573 ID_VAR_TOKENS = parser.Parser.ID_VAR_TOKENS - {TokenType.BEGIN} 574 ALIAS_TOKENS = parser.Parser.ALIAS_TOKENS - {TokenType.BEGIN} 575 TABLE_ALIAS_TOKENS = parser.Parser.TABLE_ALIAS_TOKENS - {TokenType.BEGIN} 576 COMMENT_TABLE_ALIAS_TOKENS = parser.Parser.COMMENT_TABLE_ALIAS_TOKENS - {TokenType.BEGIN} 577 UPDATE_ALIAS_TOKENS = parser.Parser.UPDATE_ALIAS_TOKENS - {TokenType.BEGIN} 578 579 FUNCTIONS = { 580 **parser.Parser.FUNCTIONS, 581 "CHARINDEX": lambda args: exp.StrPosition( 582 this=seq_get(args, 1), 583 substr=seq_get(args, 0), 584 position=seq_get(args, 2), 585 ), 586 "COUNT": lambda args: exp.Count( 587 this=seq_get(args, 0), expressions=args[1:], big_int=False 588 ), 589 "COUNT_BIG": lambda args: exp.Count( 590 this=seq_get(args, 0), expressions=args[1:], big_int=True 591 ), 592 "DATEADD": build_date_delta(exp.DateAdd, unit_mapping=DATE_DELTA_INTERVAL), 593 "DATEDIFF": _build_date_delta(exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL), 594 "DATENAME": _build_formatted_time(exp.TimeToStr, full_format_mapping=True), 595 "DATEPART": _build_formatted_time(exp.TimeToStr), 596 "DATETIMEFROMPARTS": _build_datetimefromparts, 597 "EOMONTH": _build_eomonth, 598 "FORMAT": _build_format, 599 "GETDATE": exp.CurrentTimestamp.from_arg_list, 600 "HASHBYTES": _build_hashbytes, 601 "ISNULL": lambda args: build_coalesce(args=args, is_null=True), 602 "JSON_QUERY": _build_json_query, 603 "JSON_VALUE": parser.build_extract_json_with_path(exp.JSONExtractScalar), 604 "LEN": _build_with_arg_as_text(exp.Length), 605 "LEFT": _build_with_arg_as_text(exp.Left), 606 "NEWID": exp.Uuid.from_arg_list, 607 "RIGHT": _build_with_arg_as_text(exp.Right), 608 "PARSENAME": _build_parsename, 609 "REPLICATE": exp.Repeat.from_arg_list, 610 "SCHEMA_NAME": exp.CurrentSchema.from_arg_list, 611 "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)), 612 "SYSDATETIME": exp.CurrentTimestamp.from_arg_list, 613 "SUSER_NAME": exp.CurrentUser.from_arg_list, 614 "SUSER_SNAME": exp.CurrentUser.from_arg_list, 615 "SYSTEM_USER": exp.CurrentUser.from_arg_list, 616 "TIMEFROMPARTS": _build_timefromparts, 617 "DATETRUNC": _build_datetrunc, 618 } 619 620 JOIN_HINTS = {"LOOP", "HASH", "MERGE", "REMOTE"} 621 622 PROCEDURE_OPTIONS = dict.fromkeys( 623 ("ENCRYPTION", "RECOMPILE", "SCHEMABINDING", "NATIVE_COMPILATION", "EXECUTE"), tuple() 624 ) 625 626 COLUMN_DEFINITION_MODES = {"OUT", "OUTPUT", "READ_ONLY"} 627 628 RETURNS_TABLE_TOKENS = parser.Parser.ID_VAR_TOKENS - { 629 TokenType.TABLE, 630 *parser.Parser.TYPE_TOKENS, 631 } 632 633 STATEMENT_PARSERS = { 634 **parser.Parser.STATEMENT_PARSERS, 635 TokenType.DECLARE: lambda self: self._parse_declare(), 636 } 637 638 RANGE_PARSERS = { 639 **parser.Parser.RANGE_PARSERS, 640 TokenType.DCOLON: lambda self, this: self.expression( 641 exp.ScopeResolution, 642 this=this, 643 expression=self._parse_function() or self._parse_var(any_token=True), 644 ), 645 } 646 647 NO_PAREN_FUNCTION_PARSERS = { 648 **parser.Parser.NO_PAREN_FUNCTION_PARSERS, 649 "NEXT": lambda self: self._parse_next_value_for(), 650 } 651 652 # The DCOLON (::) operator serves as a scope resolution (exp.ScopeResolution) operator in T-SQL 653 COLUMN_OPERATORS = { 654 **parser.Parser.COLUMN_OPERATORS, 655 TokenType.DCOLON: lambda self, this, to: self.expression(exp.Cast, this=this, to=to) 656 if isinstance(to, exp.DataType) and to.this != exp.DataType.Type.USERDEFINED 657 else self.expression(exp.ScopeResolution, this=this, expression=to), 658 } 659 660 def _parse_alter_table_set(self) -> exp.AlterSet: 661 return self._parse_wrapped(super()._parse_alter_table_set) 662 663 def _parse_wrapped_select(self, table: bool = False) -> t.Optional[exp.Expression]: 664 if self._match(TokenType.MERGE): 665 comments = self._prev_comments 666 merge = self._parse_merge() 667 merge.add_comments(comments, prepend=True) 668 return merge 669 670 return super()._parse_wrapped_select(table=table) 671 672 def _parse_dcolon(self) -> t.Optional[exp.Expression]: 673 # We want to use _parse_types() if the first token after :: is a known type, 674 # otherwise we could parse something like x::varchar(max) into a function 675 if self._match_set(self.TYPE_TOKENS, advance=False): 676 return self._parse_types() 677 678 return self._parse_function() or self._parse_types() 679 680 def _parse_options(self) -> t.Optional[t.List[exp.Expression]]: 681 if not self._match(TokenType.OPTION): 682 return None 683 684 def _parse_option() -> t.Optional[exp.Expression]: 685 option = self._parse_var_from_options(OPTIONS) 686 if not option: 687 return None 688 689 self._match(TokenType.EQ) 690 return self.expression( 691 exp.QueryOption, this=option, expression=self._parse_primary_or_var() 692 ) 693 694 return self._parse_wrapped_csv(_parse_option) 695 696 def _parse_xml_key_value_option(self) -> exp.XMLKeyValueOption: 697 this = self._parse_primary_or_var() 698 if self._match(TokenType.L_PAREN, advance=False): 699 expression = self._parse_wrapped(self._parse_string) 700 else: 701 expression = None 702 703 return exp.XMLKeyValueOption(this=this, expression=expression) 704 705 def _parse_for(self) -> t.Optional[t.List[exp.Expression]]: 706 if not self._match_pair(TokenType.FOR, TokenType.XML): 707 return None 708 709 def _parse_for_xml() -> t.Optional[exp.Expression]: 710 return self.expression( 711 exp.QueryOption, 712 this=self._parse_var_from_options(XML_OPTIONS, raise_unmatched=False) 713 or self._parse_xml_key_value_option(), 714 ) 715 716 return self._parse_csv(_parse_for_xml) 717 718 def _parse_projections(self) -> t.List[exp.Expression]: 719 """ 720 T-SQL supports the syntax alias = expression in the SELECT's projection list, 721 so we transform all parsed Selects to convert their EQ projections into Aliases. 722 723 See: https://learn.microsoft.com/en-us/sql/t-sql/queries/select-clause-transact-sql?view=sql-server-ver16#syntax 724 """ 725 return [ 726 ( 727 exp.alias_(projection.expression, projection.this.this, copy=False) 728 if isinstance(projection, exp.EQ) and isinstance(projection.this, exp.Column) 729 else projection 730 ) 731 for projection in super()._parse_projections() 732 ] 733 734 def _parse_commit_or_rollback(self) -> exp.Commit | exp.Rollback: 735 """Applies to SQL Server and Azure SQL Database 736 COMMIT [ { TRAN | TRANSACTION } 737 [ transaction_name | @tran_name_variable ] ] 738 [ WITH ( DELAYED_DURABILITY = { OFF | ON } ) ] 739 740 ROLLBACK { TRAN | TRANSACTION } 741 [ transaction_name | @tran_name_variable 742 | savepoint_name | @savepoint_variable ] 743 """ 744 rollback = self._prev.token_type == TokenType.ROLLBACK 745 746 self._match_texts(("TRAN", "TRANSACTION")) 747 this = self._parse_id_var() 748 749 if rollback: 750 return self.expression(exp.Rollback, this=this) 751 752 durability = None 753 if self._match_pair(TokenType.WITH, TokenType.L_PAREN): 754 self._match_text_seq("DELAYED_DURABILITY") 755 self._match(TokenType.EQ) 756 757 if self._match_text_seq("OFF"): 758 durability = False 759 else: 760 self._match(TokenType.ON) 761 durability = True 762 763 self._match_r_paren() 764 765 return self.expression(exp.Commit, this=this, durability=durability) 766 767 def _parse_transaction(self) -> exp.Transaction | exp.Command: 768 """Applies to SQL Server and Azure SQL Database 769 BEGIN { TRAN | TRANSACTION } 770 [ { transaction_name | @tran_name_variable } 771 [ WITH MARK [ 'description' ] ] 772 ] 773 """ 774 if self._match_texts(("TRAN", "TRANSACTION")): 775 transaction = self.expression(exp.Transaction, this=self._parse_id_var()) 776 if self._match_text_seq("WITH", "MARK"): 777 transaction.set("mark", self._parse_string()) 778 779 return transaction 780 781 return self._parse_as_command(self._prev) 782 783 def _parse_returns(self) -> exp.ReturnsProperty: 784 table = self._parse_id_var(any_token=False, tokens=self.RETURNS_TABLE_TOKENS) 785 returns = super()._parse_returns() 786 returns.set("table", table) 787 return returns 788 789 def _parse_convert( 790 self, strict: bool, safe: t.Optional[bool] = None 791 ) -> t.Optional[exp.Expression]: 792 this = self._parse_types() 793 self._match(TokenType.COMMA) 794 args = [this, *self._parse_csv(self._parse_assignment)] 795 convert = exp.Convert.from_arg_list(args) 796 convert.set("safe", safe) 797 convert.set("strict", strict) 798 return convert 799 800 def _parse_column_def( 801 self, this: t.Optional[exp.Expression], computed_column: bool = True 802 ) -> t.Optional[exp.Expression]: 803 this = super()._parse_column_def(this=this, computed_column=computed_column) 804 if not this: 805 return None 806 if self._match(TokenType.EQ): 807 this.set("default", self._parse_disjunction()) 808 if self._match_texts(self.COLUMN_DEFINITION_MODES): 809 this.set("output", self._prev.text) 810 return this 811 812 def _parse_user_defined_function( 813 self, kind: t.Optional[TokenType] = None 814 ) -> t.Optional[exp.Expression]: 815 this = super()._parse_user_defined_function(kind=kind) 816 817 if ( 818 kind == TokenType.FUNCTION 819 or isinstance(this, exp.UserDefinedFunction) 820 or self._match(TokenType.ALIAS, advance=False) 821 ): 822 return this 823 824 if not self._match(TokenType.WITH, advance=False): 825 expressions = self._parse_csv(self._parse_function_parameter) 826 else: 827 expressions = None 828 829 return self.expression(exp.UserDefinedFunction, this=this, expressions=expressions) 830 831 def _parse_into(self) -> t.Optional[exp.Into]: 832 into = super()._parse_into() 833 834 table = isinstance(into, exp.Into) and into.find(exp.Table) 835 if isinstance(table, exp.Table): 836 table_identifier = table.this 837 if table_identifier.args.get("temporary"): 838 # Promote the temporary property from the Identifier to the Into expression 839 t.cast(exp.Into, into).set("temporary", True) 840 841 return into 842 843 def _parse_id_var( 844 self, 845 any_token: bool = True, 846 tokens: t.Optional[t.Collection[TokenType]] = None, 847 ) -> t.Optional[exp.Expression]: 848 is_temporary = self._match(TokenType.HASH) 849 is_global = is_temporary and self._match(TokenType.HASH) 850 851 this = super()._parse_id_var(any_token=any_token, tokens=tokens) 852 if this: 853 if is_global: 854 this.set("global", True) 855 elif is_temporary: 856 this.set("temporary", True) 857 858 return this 859 860 def _parse_create(self) -> exp.Create | exp.Command: 861 create = super()._parse_create() 862 863 if isinstance(create, exp.Create): 864 table = create.this.this if isinstance(create.this, exp.Schema) else create.this 865 if isinstance(table, exp.Table) and table.this and table.this.args.get("temporary"): 866 if not create.args.get("properties"): 867 create.set("properties", exp.Properties(expressions=[])) 868 869 create.args["properties"].append("expressions", exp.TemporaryProperty()) 870 871 return create 872 873 def _parse_if(self) -> t.Optional[exp.Expression]: 874 index = self._index 875 876 if self._match_text_seq("OBJECT_ID"): 877 self._parse_wrapped_csv(self._parse_string) 878 if self._match_text_seq("IS", "NOT", "NULL") and self._match(TokenType.DROP): 879 return self._parse_drop(exists=True) 880 self._retreat(index) 881 882 return super()._parse_if() 883 884 def _parse_unique(self) -> exp.UniqueColumnConstraint: 885 if self._match_texts(("CLUSTERED", "NONCLUSTERED")): 886 this = self.CONSTRAINT_PARSERS[self._prev.text.upper()](self) 887 else: 888 this = self._parse_schema(self._parse_id_var(any_token=False)) 889 890 return self.expression(exp.UniqueColumnConstraint, this=this) 891 892 def _parse_partition(self) -> t.Optional[exp.Partition]: 893 if not self._match_text_seq("WITH", "(", "PARTITIONS"): 894 return None 895 896 def parse_range(): 897 low = self._parse_bitwise() 898 high = self._parse_bitwise() if self._match_text_seq("TO") else None 899 900 return ( 901 self.expression(exp.PartitionRange, this=low, expression=high) if high else low 902 ) 903 904 partition = self.expression( 905 exp.Partition, expressions=self._parse_wrapped_csv(parse_range) 906 ) 907 908 self._match_r_paren() 909 910 return partition 911 912 def _parse_declare(self) -> exp.Declare | exp.Command: 913 index = self._index 914 expressions = self._try_parse(partial(self._parse_csv, self._parse_declareitem)) 915 916 if not expressions or self._curr: 917 self._retreat(index) 918 return self._parse_as_command(self._prev) 919 920 return self.expression(exp.Declare, expressions=expressions) 921 922 def _parse_declareitem(self) -> t.Optional[exp.DeclareItem]: 923 var = self._parse_id_var() 924 if not var: 925 return None 926 927 value = None 928 self._match(TokenType.ALIAS) 929 if self._match(TokenType.TABLE): 930 data_type = self._parse_schema() 931 else: 932 data_type = self._parse_types() 933 if self._match(TokenType.EQ): 934 value = self._parse_bitwise() 935 936 return self.expression(exp.DeclareItem, this=var, kind=data_type, default=value) 937 938 def _parse_alter_table_alter(self) -> t.Optional[exp.Expression]: 939 expression = super()._parse_alter_table_alter() 940 941 if expression is not None: 942 collation = expression.args.get("collate") 943 if isinstance(collation, exp.Column) and isinstance(collation.this, exp.Identifier): 944 identifier = collation.this 945 collation.set("this", exp.Var(this=identifier.name)) 946 947 return expression 948 949 class Generator(generator.Generator): 950 LIMIT_IS_TOP = True 951 QUERY_HINTS = False 952 RETURNING_END = False 953 NVL2_SUPPORTED = False 954 ALTER_TABLE_INCLUDE_COLUMN_KEYWORD = False 955 LIMIT_FETCH = "FETCH" 956 COMPUTED_COLUMN_WITH_TYPE = False 957 CTE_RECURSIVE_KEYWORD_REQUIRED = False 958 ENSURE_BOOLS = True 959 NULL_ORDERING_SUPPORTED = None 960 SUPPORTS_SINGLE_ARG_CONCAT = False 961 TABLESAMPLE_SEED_KEYWORD = "REPEATABLE" 962 SUPPORTS_SELECT_INTO = True 963 JSON_PATH_BRACKETED_KEY_SUPPORTED = False 964 SUPPORTS_TO_NUMBER = False 965 SET_OP_MODIFIERS = False 966 COPY_PARAMS_EQ_REQUIRED = True 967 PARSE_JSON_NAME = None 968 EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE = False 969 ALTER_SET_WRAPPED = True 970 ALTER_SET_TYPE = "" 971 972 EXPRESSIONS_WITHOUT_NESTED_CTES = { 973 exp.Create, 974 exp.Delete, 975 exp.Insert, 976 exp.Intersect, 977 exp.Except, 978 exp.Merge, 979 exp.Select, 980 exp.Subquery, 981 exp.Union, 982 exp.Update, 983 } 984 985 SUPPORTED_JSON_PATH_PARTS = { 986 exp.JSONPathKey, 987 exp.JSONPathRoot, 988 exp.JSONPathSubscript, 989 } 990 991 TYPE_MAPPING = { 992 **generator.Generator.TYPE_MAPPING, 993 exp.DataType.Type.BOOLEAN: "BIT", 994 exp.DataType.Type.DATETIME2: "DATETIME2", 995 exp.DataType.Type.DECIMAL: "NUMERIC", 996 exp.DataType.Type.DOUBLE: "FLOAT", 997 exp.DataType.Type.INT: "INTEGER", 998 exp.DataType.Type.ROWVERSION: "ROWVERSION", 999 exp.DataType.Type.TEXT: "VARCHAR(MAX)", 1000 exp.DataType.Type.TIMESTAMP: "DATETIME2", 1001 exp.DataType.Type.TIMESTAMPNTZ: "DATETIME2", 1002 exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET", 1003 exp.DataType.Type.SMALLDATETIME: "SMALLDATETIME", 1004 exp.DataType.Type.UTINYINT: "TINYINT", 1005 exp.DataType.Type.VARIANT: "SQL_VARIANT", 1006 exp.DataType.Type.UUID: "UNIQUEIDENTIFIER", 1007 } 1008 1009 TYPE_MAPPING.pop(exp.DataType.Type.NCHAR) 1010 TYPE_MAPPING.pop(exp.DataType.Type.NVARCHAR) 1011 1012 TRANSFORMS = { 1013 **generator.Generator.TRANSFORMS, 1014 exp.AnyValue: any_value_to_max_sql, 1015 exp.ArrayToString: rename_func("STRING_AGG"), 1016 exp.AutoIncrementColumnConstraint: lambda *_: "IDENTITY", 1017 exp.Chr: rename_func("CHAR"), 1018 exp.DateAdd: date_delta_sql("DATEADD"), 1019 exp.DateDiff: date_delta_sql("DATEDIFF"), 1020 exp.CTE: transforms.preprocess([qualify_derived_table_outputs]), 1021 exp.CurrentDate: rename_func("GETDATE"), 1022 exp.CurrentTimestamp: rename_func("GETDATE"), 1023 exp.DateStrToDate: datestrtodate_sql, 1024 exp.Extract: rename_func("DATEPART"), 1025 exp.GeneratedAsIdentityColumnConstraint: generatedasidentitycolumnconstraint_sql, 1026 exp.GroupConcat: _string_agg_sql, 1027 exp.If: rename_func("IIF"), 1028 exp.JSONExtract: _json_extract_sql, 1029 exp.JSONExtractScalar: _json_extract_sql, 1030 exp.LastDay: lambda self, e: self.func("EOMONTH", e.this), 1031 exp.Ln: rename_func("LOG"), 1032 exp.Max: max_or_greatest, 1033 exp.MD5: lambda self, e: self.func("HASHBYTES", exp.Literal.string("MD5"), e.this), 1034 exp.Min: min_or_least, 1035 exp.NumberToStr: _format_sql, 1036 exp.Repeat: rename_func("REPLICATE"), 1037 exp.CurrentSchema: rename_func("SCHEMA_NAME"), 1038 exp.Select: transforms.preprocess( 1039 [ 1040 transforms.eliminate_distinct_on, 1041 transforms.eliminate_semi_and_anti_joins, 1042 transforms.eliminate_qualify, 1043 transforms.unnest_generate_date_array_using_recursive_cte, 1044 ] 1045 ), 1046 exp.Stddev: rename_func("STDEV"), 1047 exp.StrPosition: lambda self, e: strposition_sql( 1048 self, e, func_name="CHARINDEX", supports_position=True 1049 ), 1050 exp.Subquery: transforms.preprocess([qualify_derived_table_outputs]), 1051 exp.SHA: lambda self, e: self.func("HASHBYTES", exp.Literal.string("SHA1"), e.this), 1052 exp.SHA2: lambda self, e: self.func( 1053 "HASHBYTES", exp.Literal.string(f"SHA2_{e.args.get('length', 256)}"), e.this 1054 ), 1055 exp.TemporaryProperty: lambda self, e: "", 1056 exp.TimeStrToTime: _timestrtotime_sql, 1057 exp.TimeToStr: _format_sql, 1058 exp.Trim: trim_sql, 1059 exp.TsOrDsAdd: date_delta_sql("DATEADD", cast=True), 1060 exp.TsOrDsDiff: date_delta_sql("DATEDIFF"), 1061 exp.TimestampTrunc: lambda self, e: self.func("DATETRUNC", e.unit, e.this), 1062 exp.Uuid: lambda *_: "NEWID()", 1063 exp.DateFromParts: rename_func("DATEFROMPARTS"), 1064 } 1065 1066 TRANSFORMS.pop(exp.ReturnsProperty) 1067 1068 PROPERTIES_LOCATION = { 1069 **generator.Generator.PROPERTIES_LOCATION, 1070 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 1071 } 1072 1073 def scope_resolution(self, rhs: str, scope_name: str) -> str: 1074 return f"{scope_name}::{rhs}" 1075 1076 def select_sql(self, expression: exp.Select) -> str: 1077 limit = expression.args.get("limit") 1078 offset = expression.args.get("offset") 1079 1080 if isinstance(limit, exp.Fetch) and not offset: 1081 # Dialects like Oracle can FETCH directly from a row set but 1082 # T-SQL requires an ORDER BY + OFFSET clause in order to FETCH 1083 offset = exp.Offset(expression=exp.Literal.number(0)) 1084 expression.set("offset", offset) 1085 1086 if offset: 1087 if not expression.args.get("order"): 1088 # ORDER BY is required in order to use OFFSET in a query, so we use 1089 # a noop order by, since we don't really care about the order. 1090 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 1091 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 1092 1093 if isinstance(limit, exp.Limit): 1094 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 1095 # we replace here because otherwise TOP would be generated in select_sql 1096 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 1097 1098 return super().select_sql(expression) 1099 1100 def convert_sql(self, expression: exp.Convert) -> str: 1101 name = "TRY_CONVERT" if expression.args.get("safe") else "CONVERT" 1102 return self.func( 1103 name, expression.this, expression.expression, expression.args.get("style") 1104 ) 1105 1106 def queryoption_sql(self, expression: exp.QueryOption) -> str: 1107 option = self.sql(expression, "this") 1108 value = self.sql(expression, "expression") 1109 if value: 1110 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 1111 return f"{option} {optional_equal_sign}{value}" 1112 return option 1113 1114 def lateral_op(self, expression: exp.Lateral) -> str: 1115 cross_apply = expression.args.get("cross_apply") 1116 if cross_apply is True: 1117 return "CROSS APPLY" 1118 if cross_apply is False: 1119 return "OUTER APPLY" 1120 1121 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 1122 self.unsupported("LATERAL clause is not supported.") 1123 return "LATERAL" 1124 1125 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 1126 this = expression.this 1127 split_count = len(this.name.split(".")) 1128 delimiter = expression.args.get("delimiter") 1129 part_index = expression.args.get("part_index") 1130 1131 if ( 1132 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 1133 or (delimiter and delimiter.name != ".") 1134 or not part_index 1135 or split_count > 4 1136 ): 1137 self.unsupported( 1138 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 1139 ) 1140 return "" 1141 1142 return self.func( 1143 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 1144 ) 1145 1146 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1147 nano = expression.args.get("nano") 1148 if nano is not None: 1149 nano.pop() 1150 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1151 1152 if expression.args.get("fractions") is None: 1153 expression.set("fractions", exp.Literal.number(0)) 1154 if expression.args.get("precision") is None: 1155 expression.set("precision", exp.Literal.number(0)) 1156 1157 return rename_func("TIMEFROMPARTS")(self, expression) 1158 1159 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1160 zone = expression.args.get("zone") 1161 if zone is not None: 1162 zone.pop() 1163 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1164 1165 nano = expression.args.get("nano") 1166 if nano is not None: 1167 nano.pop() 1168 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1169 1170 if expression.args.get("milli") is None: 1171 expression.set("milli", exp.Literal.number(0)) 1172 1173 return rename_func("DATETIMEFROMPARTS")(self, expression) 1174 1175 def setitem_sql(self, expression: exp.SetItem) -> str: 1176 this = expression.this 1177 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1178 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1179 return f"{self.sql(this.left)} {self.sql(this.right)}" 1180 1181 return super().setitem_sql(expression) 1182 1183 def boolean_sql(self, expression: exp.Boolean) -> str: 1184 if type(expression.parent) in BIT_TYPES or isinstance( 1185 expression.find_ancestor(exp.Values, exp.Select), exp.Values 1186 ): 1187 return "1" if expression.this else "0" 1188 1189 return "(1 = 1)" if expression.this else "(1 = 0)" 1190 1191 def is_sql(self, expression: exp.Is) -> str: 1192 if isinstance(expression.expression, exp.Boolean): 1193 return self.binary(expression, "=") 1194 return self.binary(expression, "IS") 1195 1196 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1197 sql = self.sql(expression, "this") 1198 properties = expression.args.get("properties") 1199 1200 if sql[:1] != "#" and any( 1201 isinstance(prop, exp.TemporaryProperty) 1202 for prop in (properties.expressions if properties else []) 1203 ): 1204 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1205 1206 return sql 1207 1208 def create_sql(self, expression: exp.Create) -> str: 1209 kind = expression.kind 1210 exists = expression.args.pop("exists", None) 1211 1212 like_property = expression.find(exp.LikeProperty) 1213 if like_property: 1214 ctas_expression = like_property.this 1215 else: 1216 ctas_expression = expression.expression 1217 1218 if kind == "VIEW": 1219 expression.this.set("catalog", None) 1220 with_ = expression.args.get("with") 1221 if ctas_expression and with_: 1222 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1223 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1224 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1225 ctas_expression.set("with", with_.pop()) 1226 1227 sql = super().create_sql(expression) 1228 1229 table = expression.find(exp.Table) 1230 1231 # Convert CTAS statement to SELECT .. INTO .. 1232 if kind == "TABLE" and ctas_expression: 1233 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1234 ctas_expression = ctas_expression.subquery() 1235 1236 properties = expression.args.get("properties") or exp.Properties() 1237 is_temp = any(isinstance(p, exp.TemporaryProperty) for p in properties.expressions) 1238 1239 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1240 select_into.set("into", exp.Into(this=table, temporary=is_temp)) 1241 1242 if like_property: 1243 select_into.limit(0, copy=False) 1244 1245 sql = self.sql(select_into) 1246 1247 if exists: 1248 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1249 sql_with_ctes = self.prepend_ctes(expression, sql) 1250 sql_literal = self.sql(exp.Literal.string(sql_with_ctes)) 1251 if kind == "SCHEMA": 1252 return f"""IF NOT EXISTS (SELECT * FROM information_schema.schemata WHERE schema_name = {identifier}) EXEC({sql_literal})""" 1253 elif kind == "TABLE": 1254 assert table 1255 where = exp.and_( 1256 exp.column("table_name").eq(table.name), 1257 exp.column("table_schema").eq(table.db) if table.db else None, 1258 exp.column("table_catalog").eq(table.catalog) if table.catalog else None, 1259 ) 1260 return f"""IF NOT EXISTS (SELECT * FROM information_schema.tables WHERE {where}) EXEC({sql_literal})""" 1261 elif kind == "INDEX": 1262 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1263 return f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql_literal})""" 1264 elif expression.args.get("replace"): 1265 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1266 1267 return self.prepend_ctes(expression, sql) 1268 1269 @generator.unsupported_args("unlogged", "expressions") 1270 def into_sql(self, expression: exp.Into) -> str: 1271 if expression.args.get("temporary"): 1272 # If the Into expression has a temporary property, push this down to the Identifier 1273 table = expression.find(exp.Table) 1274 if table and isinstance(table.this, exp.Identifier): 1275 table.this.set("temporary", True) 1276 1277 return f"{self.seg('INTO')} {self.sql(expression, 'this')}" 1278 1279 def count_sql(self, expression: exp.Count) -> str: 1280 func_name = "COUNT_BIG" if expression.args.get("big_int") else "COUNT" 1281 return rename_func(func_name)(self, expression) 1282 1283 def offset_sql(self, expression: exp.Offset) -> str: 1284 return f"{super().offset_sql(expression)} ROWS" 1285 1286 def version_sql(self, expression: exp.Version) -> str: 1287 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1288 this = f"FOR {name}" 1289 expr = expression.expression 1290 kind = expression.text("kind") 1291 if kind in ("FROM", "BETWEEN"): 1292 args = expr.expressions 1293 sep = "TO" if kind == "FROM" else "AND" 1294 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1295 else: 1296 expr_sql = self.sql(expr) 1297 1298 expr_sql = f" {expr_sql}" if expr_sql else "" 1299 return f"{this} {kind}{expr_sql}" 1300 1301 def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str: 1302 table = expression.args.get("table") 1303 table = f"{table} " if table else "" 1304 return f"RETURNS {table}{self.sql(expression, 'this')}" 1305 1306 def returning_sql(self, expression: exp.Returning) -> str: 1307 into = self.sql(expression, "into") 1308 into = self.seg(f"INTO {into}") if into else "" 1309 return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}" 1310 1311 def transaction_sql(self, expression: exp.Transaction) -> str: 1312 this = self.sql(expression, "this") 1313 this = f" {this}" if this else "" 1314 mark = self.sql(expression, "mark") 1315 mark = f" WITH MARK {mark}" if mark else "" 1316 return f"BEGIN TRANSACTION{this}{mark}" 1317 1318 def commit_sql(self, expression: exp.Commit) -> str: 1319 this = self.sql(expression, "this") 1320 this = f" {this}" if this else "" 1321 durability = expression.args.get("durability") 1322 durability = ( 1323 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1324 if durability is not None 1325 else "" 1326 ) 1327 return f"COMMIT TRANSACTION{this}{durability}" 1328 1329 def rollback_sql(self, expression: exp.Rollback) -> str: 1330 this = self.sql(expression, "this") 1331 this = f" {this}" if this else "" 1332 return f"ROLLBACK TRANSACTION{this}" 1333 1334 def identifier_sql(self, expression: exp.Identifier) -> str: 1335 identifier = super().identifier_sql(expression) 1336 1337 if expression.args.get("global"): 1338 identifier = f"##{identifier}" 1339 elif expression.args.get("temporary"): 1340 identifier = f"#{identifier}" 1341 1342 return identifier 1343 1344 def constraint_sql(self, expression: exp.Constraint) -> str: 1345 this = self.sql(expression, "this") 1346 expressions = self.expressions(expression, flat=True, sep=" ") 1347 return f"CONSTRAINT {this} {expressions}" 1348 1349 def length_sql(self, expression: exp.Length) -> str: 1350 return self._uncast_text(expression, "LEN") 1351 1352 def right_sql(self, expression: exp.Right) -> str: 1353 return self._uncast_text(expression, "RIGHT") 1354 1355 def left_sql(self, expression: exp.Left) -> str: 1356 return self._uncast_text(expression, "LEFT") 1357 1358 def _uncast_text(self, expression: exp.Expression, name: str) -> str: 1359 this = expression.this 1360 if isinstance(this, exp.Cast) and this.is_type(exp.DataType.Type.TEXT): 1361 this_sql = self.sql(this, "this") 1362 else: 1363 this_sql = self.sql(this) 1364 expression_sql = self.sql(expression, "expression") 1365 return self.func(name, this_sql, expression_sql if expression_sql else None) 1366 1367 def partition_sql(self, expression: exp.Partition) -> str: 1368 return f"WITH (PARTITIONS({self.expressions(expression, flat=True)}))" 1369 1370 def alter_sql(self, expression: exp.Alter) -> str: 1371 action = seq_get(expression.args.get("actions") or [], 0) 1372 if isinstance(action, exp.AlterRename): 1373 return f"EXEC sp_rename '{self.sql(expression.this)}', '{action.this.name}'" 1374 return super().alter_sql(expression) 1375 1376 def drop_sql(self, expression: exp.Drop) -> str: 1377 if expression.args["kind"] == "VIEW": 1378 expression.this.set("catalog", None) 1379 return super().drop_sql(expression) 1380 1381 def options_modifier(self, expression: exp.Expression) -> str: 1382 options = self.expressions(expression, key="options") 1383 return f" OPTION{self.wrap(options)}" if options else "" 1384 1385 def dpipe_sql(self, expression: exp.DPipe) -> str: 1386 return self.sql( 1387 reduce(lambda x, y: exp.Add(this=x, expression=y), expression.flatten()) 1388 ) 1389 1390 def isascii_sql(self, expression: exp.IsAscii) -> str: 1391 return f"(PATINDEX(CONVERT(VARCHAR(MAX), 0x255b5e002d7f5d25) COLLATE Latin1_General_BIN, {self.sql(expression.this)}) = 0)" 1392 1393 def columndef_sql(self, expression: exp.ColumnDef, sep: str = " ") -> str: 1394 this = super().columndef_sql(expression, sep) 1395 default = self.sql(expression, "default") 1396 default = f" = {default}" if default else "" 1397 output = self.sql(expression, "output") 1398 output = f" {output}" if output else "" 1399 return f"{this}{default}{output}" 1400 1401 def coalesce_sql(self, expression: exp.Coalesce) -> str: 1402 func_name = "ISNULL" if expression.args.get("is_null") else "COALESCE" 1403 return rename_func(func_name)(self, expression)
274def qualify_derived_table_outputs(expression: exp.Expression) -> exp.Expression: 275 """Ensures all (unnamed) output columns are aliased for CTEs and Subqueries.""" 276 alias = expression.args.get("alias") 277 278 if ( 279 isinstance(expression, (exp.CTE, exp.Subquery)) 280 and isinstance(alias, exp.TableAlias) 281 and not alias.columns 282 ): 283 from sqlglot.optimizer.qualify_columns import qualify_outputs 284 285 # We keep track of the unaliased column projection indexes instead of the expressions 286 # themselves, because the latter are going to be replaced by new nodes when the aliases 287 # are added and hence we won't be able to reach these newly added Alias parents 288 query = expression.this 289 unaliased_column_indexes = ( 290 i for i, c in enumerate(query.selects) if isinstance(c, exp.Column) and not c.alias 291 ) 292 293 qualify_outputs(query) 294 295 # Preserve the quoting information of columns for newly added Alias nodes 296 query_selects = query.selects 297 for select_index in unaliased_column_indexes: 298 alias = query_selects[select_index] 299 column = alias.this 300 if isinstance(column.this, exp.Identifier): 301 alias.args["alias"].set("quoted", column.this.quoted) 302 303 return expression
Ensures all (unnamed) output columns are aliased for CTEs and Subqueries.
406class TSQL(Dialect): 407 SUPPORTS_SEMI_ANTI_JOIN = False 408 LOG_BASE_FIRST = False 409 TYPED_DIVISION = True 410 CONCAT_COALESCE = True 411 NORMALIZATION_STRATEGY = NormalizationStrategy.CASE_INSENSITIVE 412 ALTER_TABLE_ADD_REQUIRED_FOR_EACH_COLUMN = False 413 414 TIME_FORMAT = "'yyyy-mm-dd hh:mm:ss'" 415 416 TIME_MAPPING = { 417 "year": "%Y", 418 "dayofyear": "%j", 419 "day": "%d", 420 "dy": "%d", 421 "y": "%Y", 422 "week": "%W", 423 "ww": "%W", 424 "wk": "%W", 425 "hour": "%h", 426 "hh": "%I", 427 "minute": "%M", 428 "mi": "%M", 429 "n": "%M", 430 "second": "%S", 431 "ss": "%S", 432 "s": "%-S", 433 "millisecond": "%f", 434 "ms": "%f", 435 "weekday": "%w", 436 "dw": "%w", 437 "month": "%m", 438 "mm": "%M", 439 "m": "%-M", 440 "Y": "%Y", 441 "YYYY": "%Y", 442 "YY": "%y", 443 "MMMM": "%B", 444 "MMM": "%b", 445 "MM": "%m", 446 "M": "%-m", 447 "dddd": "%A", 448 "dd": "%d", 449 "d": "%-d", 450 "HH": "%H", 451 "H": "%-H", 452 "h": "%-I", 453 "ffffff": "%f", 454 "yyyy": "%Y", 455 "yy": "%y", 456 } 457 458 CONVERT_FORMAT_MAPPING = { 459 "0": "%b %d %Y %-I:%M%p", 460 "1": "%m/%d/%y", 461 "2": "%y.%m.%d", 462 "3": "%d/%m/%y", 463 "4": "%d.%m.%y", 464 "5": "%d-%m-%y", 465 "6": "%d %b %y", 466 "7": "%b %d, %y", 467 "8": "%H:%M:%S", 468 "9": "%b %d %Y %-I:%M:%S:%f%p", 469 "10": "mm-dd-yy", 470 "11": "yy/mm/dd", 471 "12": "yymmdd", 472 "13": "%d %b %Y %H:%M:ss:%f", 473 "14": "%H:%M:%S:%f", 474 "20": "%Y-%m-%d %H:%M:%S", 475 "21": "%Y-%m-%d %H:%M:%S.%f", 476 "22": "%m/%d/%y %-I:%M:%S %p", 477 "23": "%Y-%m-%d", 478 "24": "%H:%M:%S", 479 "25": "%Y-%m-%d %H:%M:%S.%f", 480 "100": "%b %d %Y %-I:%M%p", 481 "101": "%m/%d/%Y", 482 "102": "%Y.%m.%d", 483 "103": "%d/%m/%Y", 484 "104": "%d.%m.%Y", 485 "105": "%d-%m-%Y", 486 "106": "%d %b %Y", 487 "107": "%b %d, %Y", 488 "108": "%H:%M:%S", 489 "109": "%b %d %Y %-I:%M:%S:%f%p", 490 "110": "%m-%d-%Y", 491 "111": "%Y/%m/%d", 492 "112": "%Y%m%d", 493 "113": "%d %b %Y %H:%M:%S:%f", 494 "114": "%H:%M:%S:%f", 495 "120": "%Y-%m-%d %H:%M:%S", 496 "121": "%Y-%m-%d %H:%M:%S.%f", 497 "126": "%Y-%m-%dT%H:%M:%S.%f", 498 } 499 500 FORMAT_TIME_MAPPING = { 501 "y": "%B %Y", 502 "d": "%m/%d/%Y", 503 "H": "%-H", 504 "h": "%-I", 505 "s": "%Y-%m-%d %H:%M:%S", 506 "D": "%A,%B,%Y", 507 "f": "%A,%B,%Y %-I:%M %p", 508 "F": "%A,%B,%Y %-I:%M:%S %p", 509 "g": "%m/%d/%Y %-I:%M %p", 510 "G": "%m/%d/%Y %-I:%M:%S %p", 511 "M": "%B %-d", 512 "m": "%B %-d", 513 "O": "%Y-%m-%dT%H:%M:%S", 514 "u": "%Y-%M-%D %H:%M:%S%z", 515 "U": "%A, %B %D, %Y %H:%M:%S%z", 516 "T": "%-I:%M:%S %p", 517 "t": "%-I:%M", 518 "Y": "%a %Y", 519 } 520 521 class Tokenizer(tokens.Tokenizer): 522 IDENTIFIERS = [("[", "]"), '"'] 523 QUOTES = ["'", '"'] 524 HEX_STRINGS = [("0x", ""), ("0X", "")] 525 VAR_SINGLE_TOKENS = {"@", "$", "#"} 526 527 KEYWORDS = { 528 **tokens.Tokenizer.KEYWORDS, 529 "CLUSTERED INDEX": TokenType.INDEX, 530 "DATETIME2": TokenType.DATETIME2, 531 "DATETIMEOFFSET": TokenType.TIMESTAMPTZ, 532 "DECLARE": TokenType.DECLARE, 533 "EXEC": TokenType.COMMAND, 534 "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT, 535 "GO": TokenType.COMMAND, 536 "IMAGE": TokenType.IMAGE, 537 "MONEY": TokenType.MONEY, 538 "NONCLUSTERED INDEX": TokenType.INDEX, 539 "NTEXT": TokenType.TEXT, 540 "OPTION": TokenType.OPTION, 541 "OUTPUT": TokenType.RETURNING, 542 "PRINT": TokenType.COMMAND, 543 "PROC": TokenType.PROCEDURE, 544 "REAL": TokenType.FLOAT, 545 "ROWVERSION": TokenType.ROWVERSION, 546 "SMALLDATETIME": TokenType.SMALLDATETIME, 547 "SMALLMONEY": TokenType.SMALLMONEY, 548 "SQL_VARIANT": TokenType.VARIANT, 549 "SYSTEM_USER": TokenType.CURRENT_USER, 550 "TOP": TokenType.TOP, 551 "TIMESTAMP": TokenType.ROWVERSION, 552 "TINYINT": TokenType.UTINYINT, 553 "UNIQUEIDENTIFIER": TokenType.UUID, 554 "UPDATE STATISTICS": TokenType.COMMAND, 555 "XML": TokenType.XML, 556 } 557 KEYWORDS.pop("/*+") 558 559 COMMANDS = {*tokens.Tokenizer.COMMANDS, TokenType.END} 560 561 class Parser(parser.Parser): 562 SET_REQUIRES_ASSIGNMENT_DELIMITER = False 563 LOG_DEFAULTS_TO_LN = True 564 STRING_ALIASES = True 565 NO_PAREN_IF_COMMANDS = False 566 567 QUERY_MODIFIER_PARSERS = { 568 **parser.Parser.QUERY_MODIFIER_PARSERS, 569 TokenType.OPTION: lambda self: ("options", self._parse_options()), 570 TokenType.FOR: lambda self: ("for", self._parse_for()), 571 } 572 573 # T-SQL does not allow BEGIN to be used as an identifier 574 ID_VAR_TOKENS = parser.Parser.ID_VAR_TOKENS - {TokenType.BEGIN} 575 ALIAS_TOKENS = parser.Parser.ALIAS_TOKENS - {TokenType.BEGIN} 576 TABLE_ALIAS_TOKENS = parser.Parser.TABLE_ALIAS_TOKENS - {TokenType.BEGIN} 577 COMMENT_TABLE_ALIAS_TOKENS = parser.Parser.COMMENT_TABLE_ALIAS_TOKENS - {TokenType.BEGIN} 578 UPDATE_ALIAS_TOKENS = parser.Parser.UPDATE_ALIAS_TOKENS - {TokenType.BEGIN} 579 580 FUNCTIONS = { 581 **parser.Parser.FUNCTIONS, 582 "CHARINDEX": lambda args: exp.StrPosition( 583 this=seq_get(args, 1), 584 substr=seq_get(args, 0), 585 position=seq_get(args, 2), 586 ), 587 "COUNT": lambda args: exp.Count( 588 this=seq_get(args, 0), expressions=args[1:], big_int=False 589 ), 590 "COUNT_BIG": lambda args: exp.Count( 591 this=seq_get(args, 0), expressions=args[1:], big_int=True 592 ), 593 "DATEADD": build_date_delta(exp.DateAdd, unit_mapping=DATE_DELTA_INTERVAL), 594 "DATEDIFF": _build_date_delta(exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL), 595 "DATENAME": _build_formatted_time(exp.TimeToStr, full_format_mapping=True), 596 "DATEPART": _build_formatted_time(exp.TimeToStr), 597 "DATETIMEFROMPARTS": _build_datetimefromparts, 598 "EOMONTH": _build_eomonth, 599 "FORMAT": _build_format, 600 "GETDATE": exp.CurrentTimestamp.from_arg_list, 601 "HASHBYTES": _build_hashbytes, 602 "ISNULL": lambda args: build_coalesce(args=args, is_null=True), 603 "JSON_QUERY": _build_json_query, 604 "JSON_VALUE": parser.build_extract_json_with_path(exp.JSONExtractScalar), 605 "LEN": _build_with_arg_as_text(exp.Length), 606 "LEFT": _build_with_arg_as_text(exp.Left), 607 "NEWID": exp.Uuid.from_arg_list, 608 "RIGHT": _build_with_arg_as_text(exp.Right), 609 "PARSENAME": _build_parsename, 610 "REPLICATE": exp.Repeat.from_arg_list, 611 "SCHEMA_NAME": exp.CurrentSchema.from_arg_list, 612 "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)), 613 "SYSDATETIME": exp.CurrentTimestamp.from_arg_list, 614 "SUSER_NAME": exp.CurrentUser.from_arg_list, 615 "SUSER_SNAME": exp.CurrentUser.from_arg_list, 616 "SYSTEM_USER": exp.CurrentUser.from_arg_list, 617 "TIMEFROMPARTS": _build_timefromparts, 618 "DATETRUNC": _build_datetrunc, 619 } 620 621 JOIN_HINTS = {"LOOP", "HASH", "MERGE", "REMOTE"} 622 623 PROCEDURE_OPTIONS = dict.fromkeys( 624 ("ENCRYPTION", "RECOMPILE", "SCHEMABINDING", "NATIVE_COMPILATION", "EXECUTE"), tuple() 625 ) 626 627 COLUMN_DEFINITION_MODES = {"OUT", "OUTPUT", "READ_ONLY"} 628 629 RETURNS_TABLE_TOKENS = parser.Parser.ID_VAR_TOKENS - { 630 TokenType.TABLE, 631 *parser.Parser.TYPE_TOKENS, 632 } 633 634 STATEMENT_PARSERS = { 635 **parser.Parser.STATEMENT_PARSERS, 636 TokenType.DECLARE: lambda self: self._parse_declare(), 637 } 638 639 RANGE_PARSERS = { 640 **parser.Parser.RANGE_PARSERS, 641 TokenType.DCOLON: lambda self, this: self.expression( 642 exp.ScopeResolution, 643 this=this, 644 expression=self._parse_function() or self._parse_var(any_token=True), 645 ), 646 } 647 648 NO_PAREN_FUNCTION_PARSERS = { 649 **parser.Parser.NO_PAREN_FUNCTION_PARSERS, 650 "NEXT": lambda self: self._parse_next_value_for(), 651 } 652 653 # The DCOLON (::) operator serves as a scope resolution (exp.ScopeResolution) operator in T-SQL 654 COLUMN_OPERATORS = { 655 **parser.Parser.COLUMN_OPERATORS, 656 TokenType.DCOLON: lambda self, this, to: self.expression(exp.Cast, this=this, to=to) 657 if isinstance(to, exp.DataType) and to.this != exp.DataType.Type.USERDEFINED 658 else self.expression(exp.ScopeResolution, this=this, expression=to), 659 } 660 661 def _parse_alter_table_set(self) -> exp.AlterSet: 662 return self._parse_wrapped(super()._parse_alter_table_set) 663 664 def _parse_wrapped_select(self, table: bool = False) -> t.Optional[exp.Expression]: 665 if self._match(TokenType.MERGE): 666 comments = self._prev_comments 667 merge = self._parse_merge() 668 merge.add_comments(comments, prepend=True) 669 return merge 670 671 return super()._parse_wrapped_select(table=table) 672 673 def _parse_dcolon(self) -> t.Optional[exp.Expression]: 674 # We want to use _parse_types() if the first token after :: is a known type, 675 # otherwise we could parse something like x::varchar(max) into a function 676 if self._match_set(self.TYPE_TOKENS, advance=False): 677 return self._parse_types() 678 679 return self._parse_function() or self._parse_types() 680 681 def _parse_options(self) -> t.Optional[t.List[exp.Expression]]: 682 if not self._match(TokenType.OPTION): 683 return None 684 685 def _parse_option() -> t.Optional[exp.Expression]: 686 option = self._parse_var_from_options(OPTIONS) 687 if not option: 688 return None 689 690 self._match(TokenType.EQ) 691 return self.expression( 692 exp.QueryOption, this=option, expression=self._parse_primary_or_var() 693 ) 694 695 return self._parse_wrapped_csv(_parse_option) 696 697 def _parse_xml_key_value_option(self) -> exp.XMLKeyValueOption: 698 this = self._parse_primary_or_var() 699 if self._match(TokenType.L_PAREN, advance=False): 700 expression = self._parse_wrapped(self._parse_string) 701 else: 702 expression = None 703 704 return exp.XMLKeyValueOption(this=this, expression=expression) 705 706 def _parse_for(self) -> t.Optional[t.List[exp.Expression]]: 707 if not self._match_pair(TokenType.FOR, TokenType.XML): 708 return None 709 710 def _parse_for_xml() -> t.Optional[exp.Expression]: 711 return self.expression( 712 exp.QueryOption, 713 this=self._parse_var_from_options(XML_OPTIONS, raise_unmatched=False) 714 or self._parse_xml_key_value_option(), 715 ) 716 717 return self._parse_csv(_parse_for_xml) 718 719 def _parse_projections(self) -> t.List[exp.Expression]: 720 """ 721 T-SQL supports the syntax alias = expression in the SELECT's projection list, 722 so we transform all parsed Selects to convert their EQ projections into Aliases. 723 724 See: https://learn.microsoft.com/en-us/sql/t-sql/queries/select-clause-transact-sql?view=sql-server-ver16#syntax 725 """ 726 return [ 727 ( 728 exp.alias_(projection.expression, projection.this.this, copy=False) 729 if isinstance(projection, exp.EQ) and isinstance(projection.this, exp.Column) 730 else projection 731 ) 732 for projection in super()._parse_projections() 733 ] 734 735 def _parse_commit_or_rollback(self) -> exp.Commit | exp.Rollback: 736 """Applies to SQL Server and Azure SQL Database 737 COMMIT [ { TRAN | TRANSACTION } 738 [ transaction_name | @tran_name_variable ] ] 739 [ WITH ( DELAYED_DURABILITY = { OFF | ON } ) ] 740 741 ROLLBACK { TRAN | TRANSACTION } 742 [ transaction_name | @tran_name_variable 743 | savepoint_name | @savepoint_variable ] 744 """ 745 rollback = self._prev.token_type == TokenType.ROLLBACK 746 747 self._match_texts(("TRAN", "TRANSACTION")) 748 this = self._parse_id_var() 749 750 if rollback: 751 return self.expression(exp.Rollback, this=this) 752 753 durability = None 754 if self._match_pair(TokenType.WITH, TokenType.L_PAREN): 755 self._match_text_seq("DELAYED_DURABILITY") 756 self._match(TokenType.EQ) 757 758 if self._match_text_seq("OFF"): 759 durability = False 760 else: 761 self._match(TokenType.ON) 762 durability = True 763 764 self._match_r_paren() 765 766 return self.expression(exp.Commit, this=this, durability=durability) 767 768 def _parse_transaction(self) -> exp.Transaction | exp.Command: 769 """Applies to SQL Server and Azure SQL Database 770 BEGIN { TRAN | TRANSACTION } 771 [ { transaction_name | @tran_name_variable } 772 [ WITH MARK [ 'description' ] ] 773 ] 774 """ 775 if self._match_texts(("TRAN", "TRANSACTION")): 776 transaction = self.expression(exp.Transaction, this=self._parse_id_var()) 777 if self._match_text_seq("WITH", "MARK"): 778 transaction.set("mark", self._parse_string()) 779 780 return transaction 781 782 return self._parse_as_command(self._prev) 783 784 def _parse_returns(self) -> exp.ReturnsProperty: 785 table = self._parse_id_var(any_token=False, tokens=self.RETURNS_TABLE_TOKENS) 786 returns = super()._parse_returns() 787 returns.set("table", table) 788 return returns 789 790 def _parse_convert( 791 self, strict: bool, safe: t.Optional[bool] = None 792 ) -> t.Optional[exp.Expression]: 793 this = self._parse_types() 794 self._match(TokenType.COMMA) 795 args = [this, *self._parse_csv(self._parse_assignment)] 796 convert = exp.Convert.from_arg_list(args) 797 convert.set("safe", safe) 798 convert.set("strict", strict) 799 return convert 800 801 def _parse_column_def( 802 self, this: t.Optional[exp.Expression], computed_column: bool = True 803 ) -> t.Optional[exp.Expression]: 804 this = super()._parse_column_def(this=this, computed_column=computed_column) 805 if not this: 806 return None 807 if self._match(TokenType.EQ): 808 this.set("default", self._parse_disjunction()) 809 if self._match_texts(self.COLUMN_DEFINITION_MODES): 810 this.set("output", self._prev.text) 811 return this 812 813 def _parse_user_defined_function( 814 self, kind: t.Optional[TokenType] = None 815 ) -> t.Optional[exp.Expression]: 816 this = super()._parse_user_defined_function(kind=kind) 817 818 if ( 819 kind == TokenType.FUNCTION 820 or isinstance(this, exp.UserDefinedFunction) 821 or self._match(TokenType.ALIAS, advance=False) 822 ): 823 return this 824 825 if not self._match(TokenType.WITH, advance=False): 826 expressions = self._parse_csv(self._parse_function_parameter) 827 else: 828 expressions = None 829 830 return self.expression(exp.UserDefinedFunction, this=this, expressions=expressions) 831 832 def _parse_into(self) -> t.Optional[exp.Into]: 833 into = super()._parse_into() 834 835 table = isinstance(into, exp.Into) and into.find(exp.Table) 836 if isinstance(table, exp.Table): 837 table_identifier = table.this 838 if table_identifier.args.get("temporary"): 839 # Promote the temporary property from the Identifier to the Into expression 840 t.cast(exp.Into, into).set("temporary", True) 841 842 return into 843 844 def _parse_id_var( 845 self, 846 any_token: bool = True, 847 tokens: t.Optional[t.Collection[TokenType]] = None, 848 ) -> t.Optional[exp.Expression]: 849 is_temporary = self._match(TokenType.HASH) 850 is_global = is_temporary and self._match(TokenType.HASH) 851 852 this = super()._parse_id_var(any_token=any_token, tokens=tokens) 853 if this: 854 if is_global: 855 this.set("global", True) 856 elif is_temporary: 857 this.set("temporary", True) 858 859 return this 860 861 def _parse_create(self) -> exp.Create | exp.Command: 862 create = super()._parse_create() 863 864 if isinstance(create, exp.Create): 865 table = create.this.this if isinstance(create.this, exp.Schema) else create.this 866 if isinstance(table, exp.Table) and table.this and table.this.args.get("temporary"): 867 if not create.args.get("properties"): 868 create.set("properties", exp.Properties(expressions=[])) 869 870 create.args["properties"].append("expressions", exp.TemporaryProperty()) 871 872 return create 873 874 def _parse_if(self) -> t.Optional[exp.Expression]: 875 index = self._index 876 877 if self._match_text_seq("OBJECT_ID"): 878 self._parse_wrapped_csv(self._parse_string) 879 if self._match_text_seq("IS", "NOT", "NULL") and self._match(TokenType.DROP): 880 return self._parse_drop(exists=True) 881 self._retreat(index) 882 883 return super()._parse_if() 884 885 def _parse_unique(self) -> exp.UniqueColumnConstraint: 886 if self._match_texts(("CLUSTERED", "NONCLUSTERED")): 887 this = self.CONSTRAINT_PARSERS[self._prev.text.upper()](self) 888 else: 889 this = self._parse_schema(self._parse_id_var(any_token=False)) 890 891 return self.expression(exp.UniqueColumnConstraint, this=this) 892 893 def _parse_partition(self) -> t.Optional[exp.Partition]: 894 if not self._match_text_seq("WITH", "(", "PARTITIONS"): 895 return None 896 897 def parse_range(): 898 low = self._parse_bitwise() 899 high = self._parse_bitwise() if self._match_text_seq("TO") else None 900 901 return ( 902 self.expression(exp.PartitionRange, this=low, expression=high) if high else low 903 ) 904 905 partition = self.expression( 906 exp.Partition, expressions=self._parse_wrapped_csv(parse_range) 907 ) 908 909 self._match_r_paren() 910 911 return partition 912 913 def _parse_declare(self) -> exp.Declare | exp.Command: 914 index = self._index 915 expressions = self._try_parse(partial(self._parse_csv, self._parse_declareitem)) 916 917 if not expressions or self._curr: 918 self._retreat(index) 919 return self._parse_as_command(self._prev) 920 921 return self.expression(exp.Declare, expressions=expressions) 922 923 def _parse_declareitem(self) -> t.Optional[exp.DeclareItem]: 924 var = self._parse_id_var() 925 if not var: 926 return None 927 928 value = None 929 self._match(TokenType.ALIAS) 930 if self._match(TokenType.TABLE): 931 data_type = self._parse_schema() 932 else: 933 data_type = self._parse_types() 934 if self._match(TokenType.EQ): 935 value = self._parse_bitwise() 936 937 return self.expression(exp.DeclareItem, this=var, kind=data_type, default=value) 938 939 def _parse_alter_table_alter(self) -> t.Optional[exp.Expression]: 940 expression = super()._parse_alter_table_alter() 941 942 if expression is not None: 943 collation = expression.args.get("collate") 944 if isinstance(collation, exp.Column) and isinstance(collation.this, exp.Identifier): 945 identifier = collation.this 946 collation.set("this", exp.Var(this=identifier.name)) 947 948 return expression 949 950 class Generator(generator.Generator): 951 LIMIT_IS_TOP = True 952 QUERY_HINTS = False 953 RETURNING_END = False 954 NVL2_SUPPORTED = False 955 ALTER_TABLE_INCLUDE_COLUMN_KEYWORD = False 956 LIMIT_FETCH = "FETCH" 957 COMPUTED_COLUMN_WITH_TYPE = False 958 CTE_RECURSIVE_KEYWORD_REQUIRED = False 959 ENSURE_BOOLS = True 960 NULL_ORDERING_SUPPORTED = None 961 SUPPORTS_SINGLE_ARG_CONCAT = False 962 TABLESAMPLE_SEED_KEYWORD = "REPEATABLE" 963 SUPPORTS_SELECT_INTO = True 964 JSON_PATH_BRACKETED_KEY_SUPPORTED = False 965 SUPPORTS_TO_NUMBER = False 966 SET_OP_MODIFIERS = False 967 COPY_PARAMS_EQ_REQUIRED = True 968 PARSE_JSON_NAME = None 969 EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE = False 970 ALTER_SET_WRAPPED = True 971 ALTER_SET_TYPE = "" 972 973 EXPRESSIONS_WITHOUT_NESTED_CTES = { 974 exp.Create, 975 exp.Delete, 976 exp.Insert, 977 exp.Intersect, 978 exp.Except, 979 exp.Merge, 980 exp.Select, 981 exp.Subquery, 982 exp.Union, 983 exp.Update, 984 } 985 986 SUPPORTED_JSON_PATH_PARTS = { 987 exp.JSONPathKey, 988 exp.JSONPathRoot, 989 exp.JSONPathSubscript, 990 } 991 992 TYPE_MAPPING = { 993 **generator.Generator.TYPE_MAPPING, 994 exp.DataType.Type.BOOLEAN: "BIT", 995 exp.DataType.Type.DATETIME2: "DATETIME2", 996 exp.DataType.Type.DECIMAL: "NUMERIC", 997 exp.DataType.Type.DOUBLE: "FLOAT", 998 exp.DataType.Type.INT: "INTEGER", 999 exp.DataType.Type.ROWVERSION: "ROWVERSION", 1000 exp.DataType.Type.TEXT: "VARCHAR(MAX)", 1001 exp.DataType.Type.TIMESTAMP: "DATETIME2", 1002 exp.DataType.Type.TIMESTAMPNTZ: "DATETIME2", 1003 exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET", 1004 exp.DataType.Type.SMALLDATETIME: "SMALLDATETIME", 1005 exp.DataType.Type.UTINYINT: "TINYINT", 1006 exp.DataType.Type.VARIANT: "SQL_VARIANT", 1007 exp.DataType.Type.UUID: "UNIQUEIDENTIFIER", 1008 } 1009 1010 TYPE_MAPPING.pop(exp.DataType.Type.NCHAR) 1011 TYPE_MAPPING.pop(exp.DataType.Type.NVARCHAR) 1012 1013 TRANSFORMS = { 1014 **generator.Generator.TRANSFORMS, 1015 exp.AnyValue: any_value_to_max_sql, 1016 exp.ArrayToString: rename_func("STRING_AGG"), 1017 exp.AutoIncrementColumnConstraint: lambda *_: "IDENTITY", 1018 exp.Chr: rename_func("CHAR"), 1019 exp.DateAdd: date_delta_sql("DATEADD"), 1020 exp.DateDiff: date_delta_sql("DATEDIFF"), 1021 exp.CTE: transforms.preprocess([qualify_derived_table_outputs]), 1022 exp.CurrentDate: rename_func("GETDATE"), 1023 exp.CurrentTimestamp: rename_func("GETDATE"), 1024 exp.DateStrToDate: datestrtodate_sql, 1025 exp.Extract: rename_func("DATEPART"), 1026 exp.GeneratedAsIdentityColumnConstraint: generatedasidentitycolumnconstraint_sql, 1027 exp.GroupConcat: _string_agg_sql, 1028 exp.If: rename_func("IIF"), 1029 exp.JSONExtract: _json_extract_sql, 1030 exp.JSONExtractScalar: _json_extract_sql, 1031 exp.LastDay: lambda self, e: self.func("EOMONTH", e.this), 1032 exp.Ln: rename_func("LOG"), 1033 exp.Max: max_or_greatest, 1034 exp.MD5: lambda self, e: self.func("HASHBYTES", exp.Literal.string("MD5"), e.this), 1035 exp.Min: min_or_least, 1036 exp.NumberToStr: _format_sql, 1037 exp.Repeat: rename_func("REPLICATE"), 1038 exp.CurrentSchema: rename_func("SCHEMA_NAME"), 1039 exp.Select: transforms.preprocess( 1040 [ 1041 transforms.eliminate_distinct_on, 1042 transforms.eliminate_semi_and_anti_joins, 1043 transforms.eliminate_qualify, 1044 transforms.unnest_generate_date_array_using_recursive_cte, 1045 ] 1046 ), 1047 exp.Stddev: rename_func("STDEV"), 1048 exp.StrPosition: lambda self, e: strposition_sql( 1049 self, e, func_name="CHARINDEX", supports_position=True 1050 ), 1051 exp.Subquery: transforms.preprocess([qualify_derived_table_outputs]), 1052 exp.SHA: lambda self, e: self.func("HASHBYTES", exp.Literal.string("SHA1"), e.this), 1053 exp.SHA2: lambda self, e: self.func( 1054 "HASHBYTES", exp.Literal.string(f"SHA2_{e.args.get('length', 256)}"), e.this 1055 ), 1056 exp.TemporaryProperty: lambda self, e: "", 1057 exp.TimeStrToTime: _timestrtotime_sql, 1058 exp.TimeToStr: _format_sql, 1059 exp.Trim: trim_sql, 1060 exp.TsOrDsAdd: date_delta_sql("DATEADD", cast=True), 1061 exp.TsOrDsDiff: date_delta_sql("DATEDIFF"), 1062 exp.TimestampTrunc: lambda self, e: self.func("DATETRUNC", e.unit, e.this), 1063 exp.Uuid: lambda *_: "NEWID()", 1064 exp.DateFromParts: rename_func("DATEFROMPARTS"), 1065 } 1066 1067 TRANSFORMS.pop(exp.ReturnsProperty) 1068 1069 PROPERTIES_LOCATION = { 1070 **generator.Generator.PROPERTIES_LOCATION, 1071 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 1072 } 1073 1074 def scope_resolution(self, rhs: str, scope_name: str) -> str: 1075 return f"{scope_name}::{rhs}" 1076 1077 def select_sql(self, expression: exp.Select) -> str: 1078 limit = expression.args.get("limit") 1079 offset = expression.args.get("offset") 1080 1081 if isinstance(limit, exp.Fetch) and not offset: 1082 # Dialects like Oracle can FETCH directly from a row set but 1083 # T-SQL requires an ORDER BY + OFFSET clause in order to FETCH 1084 offset = exp.Offset(expression=exp.Literal.number(0)) 1085 expression.set("offset", offset) 1086 1087 if offset: 1088 if not expression.args.get("order"): 1089 # ORDER BY is required in order to use OFFSET in a query, so we use 1090 # a noop order by, since we don't really care about the order. 1091 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 1092 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 1093 1094 if isinstance(limit, exp.Limit): 1095 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 1096 # we replace here because otherwise TOP would be generated in select_sql 1097 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 1098 1099 return super().select_sql(expression) 1100 1101 def convert_sql(self, expression: exp.Convert) -> str: 1102 name = "TRY_CONVERT" if expression.args.get("safe") else "CONVERT" 1103 return self.func( 1104 name, expression.this, expression.expression, expression.args.get("style") 1105 ) 1106 1107 def queryoption_sql(self, expression: exp.QueryOption) -> str: 1108 option = self.sql(expression, "this") 1109 value = self.sql(expression, "expression") 1110 if value: 1111 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 1112 return f"{option} {optional_equal_sign}{value}" 1113 return option 1114 1115 def lateral_op(self, expression: exp.Lateral) -> str: 1116 cross_apply = expression.args.get("cross_apply") 1117 if cross_apply is True: 1118 return "CROSS APPLY" 1119 if cross_apply is False: 1120 return "OUTER APPLY" 1121 1122 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 1123 self.unsupported("LATERAL clause is not supported.") 1124 return "LATERAL" 1125 1126 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 1127 this = expression.this 1128 split_count = len(this.name.split(".")) 1129 delimiter = expression.args.get("delimiter") 1130 part_index = expression.args.get("part_index") 1131 1132 if ( 1133 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 1134 or (delimiter and delimiter.name != ".") 1135 or not part_index 1136 or split_count > 4 1137 ): 1138 self.unsupported( 1139 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 1140 ) 1141 return "" 1142 1143 return self.func( 1144 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 1145 ) 1146 1147 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1148 nano = expression.args.get("nano") 1149 if nano is not None: 1150 nano.pop() 1151 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1152 1153 if expression.args.get("fractions") is None: 1154 expression.set("fractions", exp.Literal.number(0)) 1155 if expression.args.get("precision") is None: 1156 expression.set("precision", exp.Literal.number(0)) 1157 1158 return rename_func("TIMEFROMPARTS")(self, expression) 1159 1160 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1161 zone = expression.args.get("zone") 1162 if zone is not None: 1163 zone.pop() 1164 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1165 1166 nano = expression.args.get("nano") 1167 if nano is not None: 1168 nano.pop() 1169 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1170 1171 if expression.args.get("milli") is None: 1172 expression.set("milli", exp.Literal.number(0)) 1173 1174 return rename_func("DATETIMEFROMPARTS")(self, expression) 1175 1176 def setitem_sql(self, expression: exp.SetItem) -> str: 1177 this = expression.this 1178 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1179 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1180 return f"{self.sql(this.left)} {self.sql(this.right)}" 1181 1182 return super().setitem_sql(expression) 1183 1184 def boolean_sql(self, expression: exp.Boolean) -> str: 1185 if type(expression.parent) in BIT_TYPES or isinstance( 1186 expression.find_ancestor(exp.Values, exp.Select), exp.Values 1187 ): 1188 return "1" if expression.this else "0" 1189 1190 return "(1 = 1)" if expression.this else "(1 = 0)" 1191 1192 def is_sql(self, expression: exp.Is) -> str: 1193 if isinstance(expression.expression, exp.Boolean): 1194 return self.binary(expression, "=") 1195 return self.binary(expression, "IS") 1196 1197 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1198 sql = self.sql(expression, "this") 1199 properties = expression.args.get("properties") 1200 1201 if sql[:1] != "#" and any( 1202 isinstance(prop, exp.TemporaryProperty) 1203 for prop in (properties.expressions if properties else []) 1204 ): 1205 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1206 1207 return sql 1208 1209 def create_sql(self, expression: exp.Create) -> str: 1210 kind = expression.kind 1211 exists = expression.args.pop("exists", None) 1212 1213 like_property = expression.find(exp.LikeProperty) 1214 if like_property: 1215 ctas_expression = like_property.this 1216 else: 1217 ctas_expression = expression.expression 1218 1219 if kind == "VIEW": 1220 expression.this.set("catalog", None) 1221 with_ = expression.args.get("with") 1222 if ctas_expression and with_: 1223 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1224 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1225 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1226 ctas_expression.set("with", with_.pop()) 1227 1228 sql = super().create_sql(expression) 1229 1230 table = expression.find(exp.Table) 1231 1232 # Convert CTAS statement to SELECT .. INTO .. 1233 if kind == "TABLE" and ctas_expression: 1234 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1235 ctas_expression = ctas_expression.subquery() 1236 1237 properties = expression.args.get("properties") or exp.Properties() 1238 is_temp = any(isinstance(p, exp.TemporaryProperty) for p in properties.expressions) 1239 1240 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1241 select_into.set("into", exp.Into(this=table, temporary=is_temp)) 1242 1243 if like_property: 1244 select_into.limit(0, copy=False) 1245 1246 sql = self.sql(select_into) 1247 1248 if exists: 1249 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1250 sql_with_ctes = self.prepend_ctes(expression, sql) 1251 sql_literal = self.sql(exp.Literal.string(sql_with_ctes)) 1252 if kind == "SCHEMA": 1253 return f"""IF NOT EXISTS (SELECT * FROM information_schema.schemata WHERE schema_name = {identifier}) EXEC({sql_literal})""" 1254 elif kind == "TABLE": 1255 assert table 1256 where = exp.and_( 1257 exp.column("table_name").eq(table.name), 1258 exp.column("table_schema").eq(table.db) if table.db else None, 1259 exp.column("table_catalog").eq(table.catalog) if table.catalog else None, 1260 ) 1261 return f"""IF NOT EXISTS (SELECT * FROM information_schema.tables WHERE {where}) EXEC({sql_literal})""" 1262 elif kind == "INDEX": 1263 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1264 return f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql_literal})""" 1265 elif expression.args.get("replace"): 1266 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1267 1268 return self.prepend_ctes(expression, sql) 1269 1270 @generator.unsupported_args("unlogged", "expressions") 1271 def into_sql(self, expression: exp.Into) -> str: 1272 if expression.args.get("temporary"): 1273 # If the Into expression has a temporary property, push this down to the Identifier 1274 table = expression.find(exp.Table) 1275 if table and isinstance(table.this, exp.Identifier): 1276 table.this.set("temporary", True) 1277 1278 return f"{self.seg('INTO')} {self.sql(expression, 'this')}" 1279 1280 def count_sql(self, expression: exp.Count) -> str: 1281 func_name = "COUNT_BIG" if expression.args.get("big_int") else "COUNT" 1282 return rename_func(func_name)(self, expression) 1283 1284 def offset_sql(self, expression: exp.Offset) -> str: 1285 return f"{super().offset_sql(expression)} ROWS" 1286 1287 def version_sql(self, expression: exp.Version) -> str: 1288 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1289 this = f"FOR {name}" 1290 expr = expression.expression 1291 kind = expression.text("kind") 1292 if kind in ("FROM", "BETWEEN"): 1293 args = expr.expressions 1294 sep = "TO" if kind == "FROM" else "AND" 1295 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1296 else: 1297 expr_sql = self.sql(expr) 1298 1299 expr_sql = f" {expr_sql}" if expr_sql else "" 1300 return f"{this} {kind}{expr_sql}" 1301 1302 def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str: 1303 table = expression.args.get("table") 1304 table = f"{table} " if table else "" 1305 return f"RETURNS {table}{self.sql(expression, 'this')}" 1306 1307 def returning_sql(self, expression: exp.Returning) -> str: 1308 into = self.sql(expression, "into") 1309 into = self.seg(f"INTO {into}") if into else "" 1310 return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}" 1311 1312 def transaction_sql(self, expression: exp.Transaction) -> str: 1313 this = self.sql(expression, "this") 1314 this = f" {this}" if this else "" 1315 mark = self.sql(expression, "mark") 1316 mark = f" WITH MARK {mark}" if mark else "" 1317 return f"BEGIN TRANSACTION{this}{mark}" 1318 1319 def commit_sql(self, expression: exp.Commit) -> str: 1320 this = self.sql(expression, "this") 1321 this = f" {this}" if this else "" 1322 durability = expression.args.get("durability") 1323 durability = ( 1324 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1325 if durability is not None 1326 else "" 1327 ) 1328 return f"COMMIT TRANSACTION{this}{durability}" 1329 1330 def rollback_sql(self, expression: exp.Rollback) -> str: 1331 this = self.sql(expression, "this") 1332 this = f" {this}" if this else "" 1333 return f"ROLLBACK TRANSACTION{this}" 1334 1335 def identifier_sql(self, expression: exp.Identifier) -> str: 1336 identifier = super().identifier_sql(expression) 1337 1338 if expression.args.get("global"): 1339 identifier = f"##{identifier}" 1340 elif expression.args.get("temporary"): 1341 identifier = f"#{identifier}" 1342 1343 return identifier 1344 1345 def constraint_sql(self, expression: exp.Constraint) -> str: 1346 this = self.sql(expression, "this") 1347 expressions = self.expressions(expression, flat=True, sep=" ") 1348 return f"CONSTRAINT {this} {expressions}" 1349 1350 def length_sql(self, expression: exp.Length) -> str: 1351 return self._uncast_text(expression, "LEN") 1352 1353 def right_sql(self, expression: exp.Right) -> str: 1354 return self._uncast_text(expression, "RIGHT") 1355 1356 def left_sql(self, expression: exp.Left) -> str: 1357 return self._uncast_text(expression, "LEFT") 1358 1359 def _uncast_text(self, expression: exp.Expression, name: str) -> str: 1360 this = expression.this 1361 if isinstance(this, exp.Cast) and this.is_type(exp.DataType.Type.TEXT): 1362 this_sql = self.sql(this, "this") 1363 else: 1364 this_sql = self.sql(this) 1365 expression_sql = self.sql(expression, "expression") 1366 return self.func(name, this_sql, expression_sql if expression_sql else None) 1367 1368 def partition_sql(self, expression: exp.Partition) -> str: 1369 return f"WITH (PARTITIONS({self.expressions(expression, flat=True)}))" 1370 1371 def alter_sql(self, expression: exp.Alter) -> str: 1372 action = seq_get(expression.args.get("actions") or [], 0) 1373 if isinstance(action, exp.AlterRename): 1374 return f"EXEC sp_rename '{self.sql(expression.this)}', '{action.this.name}'" 1375 return super().alter_sql(expression) 1376 1377 def drop_sql(self, expression: exp.Drop) -> str: 1378 if expression.args["kind"] == "VIEW": 1379 expression.this.set("catalog", None) 1380 return super().drop_sql(expression) 1381 1382 def options_modifier(self, expression: exp.Expression) -> str: 1383 options = self.expressions(expression, key="options") 1384 return f" OPTION{self.wrap(options)}" if options else "" 1385 1386 def dpipe_sql(self, expression: exp.DPipe) -> str: 1387 return self.sql( 1388 reduce(lambda x, y: exp.Add(this=x, expression=y), expression.flatten()) 1389 ) 1390 1391 def isascii_sql(self, expression: exp.IsAscii) -> str: 1392 return f"(PATINDEX(CONVERT(VARCHAR(MAX), 0x255b5e002d7f5d25) COLLATE Latin1_General_BIN, {self.sql(expression.this)}) = 0)" 1393 1394 def columndef_sql(self, expression: exp.ColumnDef, sep: str = " ") -> str: 1395 this = super().columndef_sql(expression, sep) 1396 default = self.sql(expression, "default") 1397 default = f" = {default}" if default else "" 1398 output = self.sql(expression, "output") 1399 output = f" {output}" if output else "" 1400 return f"{this}{default}{output}" 1401 1402 def coalesce_sql(self, expression: exp.Coalesce) -> str: 1403 func_name = "ISNULL" if expression.args.get("is_null") else "COALESCE" 1404 return rename_func(func_name)(self, expression)
Whether the base comes first in the LOG
function.
Possible values: True
, False
, None
(two arguments are not supported by LOG
)
Whether the behavior of a / b
depends on the types of a
and b
.
False means a / b
is always float division.
True means a / b
is integer division if both a
and b
are integers.
A NULL
arg in CONCAT
yields NULL
by default, but in some dialects it yields an empty string.
Specifies the strategy according to which identifiers should be normalized.
Associates this dialect's time formats with their equivalent Python strftime
formats.
521 class Tokenizer(tokens.Tokenizer): 522 IDENTIFIERS = [("[", "]"), '"'] 523 QUOTES = ["'", '"'] 524 HEX_STRINGS = [("0x", ""), ("0X", "")] 525 VAR_SINGLE_TOKENS = {"@", "$", "#"} 526 527 KEYWORDS = { 528 **tokens.Tokenizer.KEYWORDS, 529 "CLUSTERED INDEX": TokenType.INDEX, 530 "DATETIME2": TokenType.DATETIME2, 531 "DATETIMEOFFSET": TokenType.TIMESTAMPTZ, 532 "DECLARE": TokenType.DECLARE, 533 "EXEC": TokenType.COMMAND, 534 "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT, 535 "GO": TokenType.COMMAND, 536 "IMAGE": TokenType.IMAGE, 537 "MONEY": TokenType.MONEY, 538 "NONCLUSTERED INDEX": TokenType.INDEX, 539 "NTEXT": TokenType.TEXT, 540 "OPTION": TokenType.OPTION, 541 "OUTPUT": TokenType.RETURNING, 542 "PRINT": TokenType.COMMAND, 543 "PROC": TokenType.PROCEDURE, 544 "REAL": TokenType.FLOAT, 545 "ROWVERSION": TokenType.ROWVERSION, 546 "SMALLDATETIME": TokenType.SMALLDATETIME, 547 "SMALLMONEY": TokenType.SMALLMONEY, 548 "SQL_VARIANT": TokenType.VARIANT, 549 "SYSTEM_USER": TokenType.CURRENT_USER, 550 "TOP": TokenType.TOP, 551 "TIMESTAMP": TokenType.ROWVERSION, 552 "TINYINT": TokenType.UTINYINT, 553 "UNIQUEIDENTIFIER": TokenType.UUID, 554 "UPDATE STATISTICS": TokenType.COMMAND, 555 "XML": TokenType.XML, 556 } 557 KEYWORDS.pop("/*+") 558 559 COMMANDS = {*tokens.Tokenizer.COMMANDS, TokenType.END}
Inherited Members
- sqlglot.tokens.Tokenizer
- Tokenizer
- SINGLE_TOKENS
- BIT_STRINGS
- BYTE_STRINGS
- RAW_STRINGS
- HEREDOC_STRINGS
- UNICODE_STRINGS
- STRING_ESCAPES
- IDENTIFIER_ESCAPES
- HEREDOC_TAG_IS_IDENTIFIER
- HEREDOC_STRING_ALTERNATIVE
- STRING_ESCAPES_ALLOWED_IN_RAW_STRINGS
- NESTED_COMMENTS
- HINT_START
- TOKENS_PRECEDING_HINT
- WHITE_SPACE
- COMMAND_PREFIX_TOKENS
- NUMERIC_LITERALS
- COMMENTS
- dialect
- use_rs_tokenizer
- reset
- tokenize
- tokenize_rs
- size
- sql
- tokens
561 class Parser(parser.Parser): 562 SET_REQUIRES_ASSIGNMENT_DELIMITER = False 563 LOG_DEFAULTS_TO_LN = True 564 STRING_ALIASES = True 565 NO_PAREN_IF_COMMANDS = False 566 567 QUERY_MODIFIER_PARSERS = { 568 **parser.Parser.QUERY_MODIFIER_PARSERS, 569 TokenType.OPTION: lambda self: ("options", self._parse_options()), 570 TokenType.FOR: lambda self: ("for", self._parse_for()), 571 } 572 573 # T-SQL does not allow BEGIN to be used as an identifier 574 ID_VAR_TOKENS = parser.Parser.ID_VAR_TOKENS - {TokenType.BEGIN} 575 ALIAS_TOKENS = parser.Parser.ALIAS_TOKENS - {TokenType.BEGIN} 576 TABLE_ALIAS_TOKENS = parser.Parser.TABLE_ALIAS_TOKENS - {TokenType.BEGIN} 577 COMMENT_TABLE_ALIAS_TOKENS = parser.Parser.COMMENT_TABLE_ALIAS_TOKENS - {TokenType.BEGIN} 578 UPDATE_ALIAS_TOKENS = parser.Parser.UPDATE_ALIAS_TOKENS - {TokenType.BEGIN} 579 580 FUNCTIONS = { 581 **parser.Parser.FUNCTIONS, 582 "CHARINDEX": lambda args: exp.StrPosition( 583 this=seq_get(args, 1), 584 substr=seq_get(args, 0), 585 position=seq_get(args, 2), 586 ), 587 "COUNT": lambda args: exp.Count( 588 this=seq_get(args, 0), expressions=args[1:], big_int=False 589 ), 590 "COUNT_BIG": lambda args: exp.Count( 591 this=seq_get(args, 0), expressions=args[1:], big_int=True 592 ), 593 "DATEADD": build_date_delta(exp.DateAdd, unit_mapping=DATE_DELTA_INTERVAL), 594 "DATEDIFF": _build_date_delta(exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL), 595 "DATENAME": _build_formatted_time(exp.TimeToStr, full_format_mapping=True), 596 "DATEPART": _build_formatted_time(exp.TimeToStr), 597 "DATETIMEFROMPARTS": _build_datetimefromparts, 598 "EOMONTH": _build_eomonth, 599 "FORMAT": _build_format, 600 "GETDATE": exp.CurrentTimestamp.from_arg_list, 601 "HASHBYTES": _build_hashbytes, 602 "ISNULL": lambda args: build_coalesce(args=args, is_null=True), 603 "JSON_QUERY": _build_json_query, 604 "JSON_VALUE": parser.build_extract_json_with_path(exp.JSONExtractScalar), 605 "LEN": _build_with_arg_as_text(exp.Length), 606 "LEFT": _build_with_arg_as_text(exp.Left), 607 "NEWID": exp.Uuid.from_arg_list, 608 "RIGHT": _build_with_arg_as_text(exp.Right), 609 "PARSENAME": _build_parsename, 610 "REPLICATE": exp.Repeat.from_arg_list, 611 "SCHEMA_NAME": exp.CurrentSchema.from_arg_list, 612 "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)), 613 "SYSDATETIME": exp.CurrentTimestamp.from_arg_list, 614 "SUSER_NAME": exp.CurrentUser.from_arg_list, 615 "SUSER_SNAME": exp.CurrentUser.from_arg_list, 616 "SYSTEM_USER": exp.CurrentUser.from_arg_list, 617 "TIMEFROMPARTS": _build_timefromparts, 618 "DATETRUNC": _build_datetrunc, 619 } 620 621 JOIN_HINTS = {"LOOP", "HASH", "MERGE", "REMOTE"} 622 623 PROCEDURE_OPTIONS = dict.fromkeys( 624 ("ENCRYPTION", "RECOMPILE", "SCHEMABINDING", "NATIVE_COMPILATION", "EXECUTE"), tuple() 625 ) 626 627 COLUMN_DEFINITION_MODES = {"OUT", "OUTPUT", "READ_ONLY"} 628 629 RETURNS_TABLE_TOKENS = parser.Parser.ID_VAR_TOKENS - { 630 TokenType.TABLE, 631 *parser.Parser.TYPE_TOKENS, 632 } 633 634 STATEMENT_PARSERS = { 635 **parser.Parser.STATEMENT_PARSERS, 636 TokenType.DECLARE: lambda self: self._parse_declare(), 637 } 638 639 RANGE_PARSERS = { 640 **parser.Parser.RANGE_PARSERS, 641 TokenType.DCOLON: lambda self, this: self.expression( 642 exp.ScopeResolution, 643 this=this, 644 expression=self._parse_function() or self._parse_var(any_token=True), 645 ), 646 } 647 648 NO_PAREN_FUNCTION_PARSERS = { 649 **parser.Parser.NO_PAREN_FUNCTION_PARSERS, 650 "NEXT": lambda self: self._parse_next_value_for(), 651 } 652 653 # The DCOLON (::) operator serves as a scope resolution (exp.ScopeResolution) operator in T-SQL 654 COLUMN_OPERATORS = { 655 **parser.Parser.COLUMN_OPERATORS, 656 TokenType.DCOLON: lambda self, this, to: self.expression(exp.Cast, this=this, to=to) 657 if isinstance(to, exp.DataType) and to.this != exp.DataType.Type.USERDEFINED 658 else self.expression(exp.ScopeResolution, this=this, expression=to), 659 } 660 661 def _parse_alter_table_set(self) -> exp.AlterSet: 662 return self._parse_wrapped(super()._parse_alter_table_set) 663 664 def _parse_wrapped_select(self, table: bool = False) -> t.Optional[exp.Expression]: 665 if self._match(TokenType.MERGE): 666 comments = self._prev_comments 667 merge = self._parse_merge() 668 merge.add_comments(comments, prepend=True) 669 return merge 670 671 return super()._parse_wrapped_select(table=table) 672 673 def _parse_dcolon(self) -> t.Optional[exp.Expression]: 674 # We want to use _parse_types() if the first token after :: is a known type, 675 # otherwise we could parse something like x::varchar(max) into a function 676 if self._match_set(self.TYPE_TOKENS, advance=False): 677 return self._parse_types() 678 679 return self._parse_function() or self._parse_types() 680 681 def _parse_options(self) -> t.Optional[t.List[exp.Expression]]: 682 if not self._match(TokenType.OPTION): 683 return None 684 685 def _parse_option() -> t.Optional[exp.Expression]: 686 option = self._parse_var_from_options(OPTIONS) 687 if not option: 688 return None 689 690 self._match(TokenType.EQ) 691 return self.expression( 692 exp.QueryOption, this=option, expression=self._parse_primary_or_var() 693 ) 694 695 return self._parse_wrapped_csv(_parse_option) 696 697 def _parse_xml_key_value_option(self) -> exp.XMLKeyValueOption: 698 this = self._parse_primary_or_var() 699 if self._match(TokenType.L_PAREN, advance=False): 700 expression = self._parse_wrapped(self._parse_string) 701 else: 702 expression = None 703 704 return exp.XMLKeyValueOption(this=this, expression=expression) 705 706 def _parse_for(self) -> t.Optional[t.List[exp.Expression]]: 707 if not self._match_pair(TokenType.FOR, TokenType.XML): 708 return None 709 710 def _parse_for_xml() -> t.Optional[exp.Expression]: 711 return self.expression( 712 exp.QueryOption, 713 this=self._parse_var_from_options(XML_OPTIONS, raise_unmatched=False) 714 or self._parse_xml_key_value_option(), 715 ) 716 717 return self._parse_csv(_parse_for_xml) 718 719 def _parse_projections(self) -> t.List[exp.Expression]: 720 """ 721 T-SQL supports the syntax alias = expression in the SELECT's projection list, 722 so we transform all parsed Selects to convert their EQ projections into Aliases. 723 724 See: https://learn.microsoft.com/en-us/sql/t-sql/queries/select-clause-transact-sql?view=sql-server-ver16#syntax 725 """ 726 return [ 727 ( 728 exp.alias_(projection.expression, projection.this.this, copy=False) 729 if isinstance(projection, exp.EQ) and isinstance(projection.this, exp.Column) 730 else projection 731 ) 732 for projection in super()._parse_projections() 733 ] 734 735 def _parse_commit_or_rollback(self) -> exp.Commit | exp.Rollback: 736 """Applies to SQL Server and Azure SQL Database 737 COMMIT [ { TRAN | TRANSACTION } 738 [ transaction_name | @tran_name_variable ] ] 739 [ WITH ( DELAYED_DURABILITY = { OFF | ON } ) ] 740 741 ROLLBACK { TRAN | TRANSACTION } 742 [ transaction_name | @tran_name_variable 743 | savepoint_name | @savepoint_variable ] 744 """ 745 rollback = self._prev.token_type == TokenType.ROLLBACK 746 747 self._match_texts(("TRAN", "TRANSACTION")) 748 this = self._parse_id_var() 749 750 if rollback: 751 return self.expression(exp.Rollback, this=this) 752 753 durability = None 754 if self._match_pair(TokenType.WITH, TokenType.L_PAREN): 755 self._match_text_seq("DELAYED_DURABILITY") 756 self._match(TokenType.EQ) 757 758 if self._match_text_seq("OFF"): 759 durability = False 760 else: 761 self._match(TokenType.ON) 762 durability = True 763 764 self._match_r_paren() 765 766 return self.expression(exp.Commit, this=this, durability=durability) 767 768 def _parse_transaction(self) -> exp.Transaction | exp.Command: 769 """Applies to SQL Server and Azure SQL Database 770 BEGIN { TRAN | TRANSACTION } 771 [ { transaction_name | @tran_name_variable } 772 [ WITH MARK [ 'description' ] ] 773 ] 774 """ 775 if self._match_texts(("TRAN", "TRANSACTION")): 776 transaction = self.expression(exp.Transaction, this=self._parse_id_var()) 777 if self._match_text_seq("WITH", "MARK"): 778 transaction.set("mark", self._parse_string()) 779 780 return transaction 781 782 return self._parse_as_command(self._prev) 783 784 def _parse_returns(self) -> exp.ReturnsProperty: 785 table = self._parse_id_var(any_token=False, tokens=self.RETURNS_TABLE_TOKENS) 786 returns = super()._parse_returns() 787 returns.set("table", table) 788 return returns 789 790 def _parse_convert( 791 self, strict: bool, safe: t.Optional[bool] = None 792 ) -> t.Optional[exp.Expression]: 793 this = self._parse_types() 794 self._match(TokenType.COMMA) 795 args = [this, *self._parse_csv(self._parse_assignment)] 796 convert = exp.Convert.from_arg_list(args) 797 convert.set("safe", safe) 798 convert.set("strict", strict) 799 return convert 800 801 def _parse_column_def( 802 self, this: t.Optional[exp.Expression], computed_column: bool = True 803 ) -> t.Optional[exp.Expression]: 804 this = super()._parse_column_def(this=this, computed_column=computed_column) 805 if not this: 806 return None 807 if self._match(TokenType.EQ): 808 this.set("default", self._parse_disjunction()) 809 if self._match_texts(self.COLUMN_DEFINITION_MODES): 810 this.set("output", self._prev.text) 811 return this 812 813 def _parse_user_defined_function( 814 self, kind: t.Optional[TokenType] = None 815 ) -> t.Optional[exp.Expression]: 816 this = super()._parse_user_defined_function(kind=kind) 817 818 if ( 819 kind == TokenType.FUNCTION 820 or isinstance(this, exp.UserDefinedFunction) 821 or self._match(TokenType.ALIAS, advance=False) 822 ): 823 return this 824 825 if not self._match(TokenType.WITH, advance=False): 826 expressions = self._parse_csv(self._parse_function_parameter) 827 else: 828 expressions = None 829 830 return self.expression(exp.UserDefinedFunction, this=this, expressions=expressions) 831 832 def _parse_into(self) -> t.Optional[exp.Into]: 833 into = super()._parse_into() 834 835 table = isinstance(into, exp.Into) and into.find(exp.Table) 836 if isinstance(table, exp.Table): 837 table_identifier = table.this 838 if table_identifier.args.get("temporary"): 839 # Promote the temporary property from the Identifier to the Into expression 840 t.cast(exp.Into, into).set("temporary", True) 841 842 return into 843 844 def _parse_id_var( 845 self, 846 any_token: bool = True, 847 tokens: t.Optional[t.Collection[TokenType]] = None, 848 ) -> t.Optional[exp.Expression]: 849 is_temporary = self._match(TokenType.HASH) 850 is_global = is_temporary and self._match(TokenType.HASH) 851 852 this = super()._parse_id_var(any_token=any_token, tokens=tokens) 853 if this: 854 if is_global: 855 this.set("global", True) 856 elif is_temporary: 857 this.set("temporary", True) 858 859 return this 860 861 def _parse_create(self) -> exp.Create | exp.Command: 862 create = super()._parse_create() 863 864 if isinstance(create, exp.Create): 865 table = create.this.this if isinstance(create.this, exp.Schema) else create.this 866 if isinstance(table, exp.Table) and table.this and table.this.args.get("temporary"): 867 if not create.args.get("properties"): 868 create.set("properties", exp.Properties(expressions=[])) 869 870 create.args["properties"].append("expressions", exp.TemporaryProperty()) 871 872 return create 873 874 def _parse_if(self) -> t.Optional[exp.Expression]: 875 index = self._index 876 877 if self._match_text_seq("OBJECT_ID"): 878 self._parse_wrapped_csv(self._parse_string) 879 if self._match_text_seq("IS", "NOT", "NULL") and self._match(TokenType.DROP): 880 return self._parse_drop(exists=True) 881 self._retreat(index) 882 883 return super()._parse_if() 884 885 def _parse_unique(self) -> exp.UniqueColumnConstraint: 886 if self._match_texts(("CLUSTERED", "NONCLUSTERED")): 887 this = self.CONSTRAINT_PARSERS[self._prev.text.upper()](self) 888 else: 889 this = self._parse_schema(self._parse_id_var(any_token=False)) 890 891 return self.expression(exp.UniqueColumnConstraint, this=this) 892 893 def _parse_partition(self) -> t.Optional[exp.Partition]: 894 if not self._match_text_seq("WITH", "(", "PARTITIONS"): 895 return None 896 897 def parse_range(): 898 low = self._parse_bitwise() 899 high = self._parse_bitwise() if self._match_text_seq("TO") else None 900 901 return ( 902 self.expression(exp.PartitionRange, this=low, expression=high) if high else low 903 ) 904 905 partition = self.expression( 906 exp.Partition, expressions=self._parse_wrapped_csv(parse_range) 907 ) 908 909 self._match_r_paren() 910 911 return partition 912 913 def _parse_declare(self) -> exp.Declare | exp.Command: 914 index = self._index 915 expressions = self._try_parse(partial(self._parse_csv, self._parse_declareitem)) 916 917 if not expressions or self._curr: 918 self._retreat(index) 919 return self._parse_as_command(self._prev) 920 921 return self.expression(exp.Declare, expressions=expressions) 922 923 def _parse_declareitem(self) -> t.Optional[exp.DeclareItem]: 924 var = self._parse_id_var() 925 if not var: 926 return None 927 928 value = None 929 self._match(TokenType.ALIAS) 930 if self._match(TokenType.TABLE): 931 data_type = self._parse_schema() 932 else: 933 data_type = self._parse_types() 934 if self._match(TokenType.EQ): 935 value = self._parse_bitwise() 936 937 return self.expression(exp.DeclareItem, this=var, kind=data_type, default=value) 938 939 def _parse_alter_table_alter(self) -> t.Optional[exp.Expression]: 940 expression = super()._parse_alter_table_alter() 941 942 if expression is not None: 943 collation = expression.args.get("collate") 944 if isinstance(collation, exp.Column) and isinstance(collation.this, exp.Identifier): 945 identifier = collation.this 946 collation.set("this", exp.Var(this=identifier.name)) 947 948 return expression
Parser consumes a list of tokens produced by the Tokenizer and produces a parsed syntax tree.
Arguments:
- error_level: The desired error level. Default: ErrorLevel.IMMEDIATE
- error_message_context: The amount of context to capture from a query string when displaying the error message (in number of characters). Default: 100
- max_errors: Maximum number of error messages to include in a raised ParseError. This is only relevant if error_level is ErrorLevel.RAISE. Default: 3
Inherited Members
- sqlglot.parser.Parser
- Parser
- NO_PAREN_FUNCTIONS
- STRUCT_TYPE_TOKENS
- NESTED_TYPE_TOKENS
- ENUM_TYPE_TOKENS
- AGGREGATE_TYPE_TOKENS
- TYPE_TOKENS
- SIGNED_TO_UNSIGNED_TYPE_TOKEN
- SUBQUERY_PREDICATES
- RESERVED_TOKENS
- DB_CREATABLES
- CREATABLES
- ALTERABLES
- COLON_PLACEHOLDER_TOKENS
- ARRAY_CONSTRUCTORS
- TRIM_TYPES
- FUNC_TOKENS
- CONJUNCTION
- ASSIGNMENT
- DISJUNCTION
- EQUALITY
- COMPARISON
- BITWISE
- TERM
- FACTOR
- EXPONENT
- TIMES
- TIMESTAMPS
- SET_OPERATIONS
- JOIN_METHODS
- JOIN_SIDES
- JOIN_KINDS
- LAMBDAS
- EXPRESSION_PARSERS
- UNARY_PARSERS
- STRING_PARSERS
- NUMERIC_PARSERS
- PRIMARY_PARSERS
- PLACEHOLDER_PARSERS
- PIPE_SYNTAX_TRANSFORM_PARSERS
- PROPERTY_PARSERS
- CONSTRAINT_PARSERS
- ALTER_PARSERS
- ALTER_ALTER_PARSERS
- SCHEMA_UNNAMED_CONSTRAINTS
- INVALID_FUNC_NAME_TOKENS
- FUNCTIONS_WITH_ALIASED_ARGS
- KEY_VALUE_DEFINITIONS
- FUNCTION_PARSERS
- SET_PARSERS
- SHOW_PARSERS
- TYPE_LITERAL_PARSERS
- TYPE_CONVERTERS
- DDL_SELECT_TOKENS
- PRE_VOLATILE_TOKENS
- TRANSACTION_KIND
- TRANSACTION_CHARACTERISTICS
- CONFLICT_ACTIONS
- CREATE_SEQUENCE
- ISOLATED_LOADING_OPTIONS
- USABLES
- CAST_ACTIONS
- SCHEMA_BINDING_OPTIONS
- EXECUTE_AS_OPTIONS
- KEY_CONSTRAINT_OPTIONS
- WINDOW_EXCLUDE_OPTIONS
- INSERT_ALTERNATIVES
- CLONE_KEYWORDS
- HISTORICAL_DATA_PREFIX
- HISTORICAL_DATA_KIND
- OPCLASS_FOLLOW_KEYWORDS
- OPTYPE_FOLLOW_TOKENS
- TABLE_INDEX_HINT_TOKENS
- VIEW_ATTRIBUTES
- WINDOW_ALIAS_TOKENS
- WINDOW_BEFORE_PAREN_TOKENS
- WINDOW_SIDES
- JSON_KEY_VALUE_SEPARATOR_TOKENS
- FETCH_TOKENS
- ADD_CONSTRAINT_TOKENS
- DISTINCT_TOKENS
- NULL_TOKENS
- UNNEST_OFFSET_ALIAS_TOKENS
- SELECT_START_TOKENS
- COPY_INTO_VARLEN_OPTIONS
- IS_JSON_PREDICATE_KIND
- ODBC_DATETIME_LITERALS
- ON_CONDITION_TOKENS
- PRIVILEGE_FOLLOW_TOKENS
- DESCRIBE_STYLES
- ANALYZE_STYLES
- ANALYZE_EXPRESSION_PARSERS
- PARTITION_KEYWORDS
- AMBIGUOUS_ALIAS_TOKENS
- OPERATION_MODIFIERS
- RECURSIVE_CTE_SEARCH_KIND
- MODIFIABLES
- STRICT_CAST
- PREFIXED_PIVOT_COLUMNS
- IDENTIFY_PIVOT_STRINGS
- TABLESAMPLE_CSV
- DEFAULT_SAMPLING_METHOD
- TRIM_PATTERN_FIRST
- MODIFIERS_ATTACHED_TO_SET_OP
- SET_OP_MODIFIERS
- JSON_ARROWS_REQUIRE_JSON_TYPE
- COLON_IS_VARIANT_EXTRACT
- VALUES_FOLLOWED_BY_PAREN
- SUPPORTS_IMPLICIT_UNNEST
- INTERVAL_SPANS
- SUPPORTS_PARTITION_SELECTION
- WRAPPED_TRANSFORM_COLUMN_CONSTRAINT
- OPTIONAL_ALIAS_TOKEN_CTE
- error_level
- error_message_context
- max_errors
- dialect
- reset
- parse
- parse_into
- check_errors
- raise_error
- expression
- validate_expression
- parse_set_operation
- errors
- sql
950 class Generator(generator.Generator): 951 LIMIT_IS_TOP = True 952 QUERY_HINTS = False 953 RETURNING_END = False 954 NVL2_SUPPORTED = False 955 ALTER_TABLE_INCLUDE_COLUMN_KEYWORD = False 956 LIMIT_FETCH = "FETCH" 957 COMPUTED_COLUMN_WITH_TYPE = False 958 CTE_RECURSIVE_KEYWORD_REQUIRED = False 959 ENSURE_BOOLS = True 960 NULL_ORDERING_SUPPORTED = None 961 SUPPORTS_SINGLE_ARG_CONCAT = False 962 TABLESAMPLE_SEED_KEYWORD = "REPEATABLE" 963 SUPPORTS_SELECT_INTO = True 964 JSON_PATH_BRACKETED_KEY_SUPPORTED = False 965 SUPPORTS_TO_NUMBER = False 966 SET_OP_MODIFIERS = False 967 COPY_PARAMS_EQ_REQUIRED = True 968 PARSE_JSON_NAME = None 969 EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE = False 970 ALTER_SET_WRAPPED = True 971 ALTER_SET_TYPE = "" 972 973 EXPRESSIONS_WITHOUT_NESTED_CTES = { 974 exp.Create, 975 exp.Delete, 976 exp.Insert, 977 exp.Intersect, 978 exp.Except, 979 exp.Merge, 980 exp.Select, 981 exp.Subquery, 982 exp.Union, 983 exp.Update, 984 } 985 986 SUPPORTED_JSON_PATH_PARTS = { 987 exp.JSONPathKey, 988 exp.JSONPathRoot, 989 exp.JSONPathSubscript, 990 } 991 992 TYPE_MAPPING = { 993 **generator.Generator.TYPE_MAPPING, 994 exp.DataType.Type.BOOLEAN: "BIT", 995 exp.DataType.Type.DATETIME2: "DATETIME2", 996 exp.DataType.Type.DECIMAL: "NUMERIC", 997 exp.DataType.Type.DOUBLE: "FLOAT", 998 exp.DataType.Type.INT: "INTEGER", 999 exp.DataType.Type.ROWVERSION: "ROWVERSION", 1000 exp.DataType.Type.TEXT: "VARCHAR(MAX)", 1001 exp.DataType.Type.TIMESTAMP: "DATETIME2", 1002 exp.DataType.Type.TIMESTAMPNTZ: "DATETIME2", 1003 exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET", 1004 exp.DataType.Type.SMALLDATETIME: "SMALLDATETIME", 1005 exp.DataType.Type.UTINYINT: "TINYINT", 1006 exp.DataType.Type.VARIANT: "SQL_VARIANT", 1007 exp.DataType.Type.UUID: "UNIQUEIDENTIFIER", 1008 } 1009 1010 TYPE_MAPPING.pop(exp.DataType.Type.NCHAR) 1011 TYPE_MAPPING.pop(exp.DataType.Type.NVARCHAR) 1012 1013 TRANSFORMS = { 1014 **generator.Generator.TRANSFORMS, 1015 exp.AnyValue: any_value_to_max_sql, 1016 exp.ArrayToString: rename_func("STRING_AGG"), 1017 exp.AutoIncrementColumnConstraint: lambda *_: "IDENTITY", 1018 exp.Chr: rename_func("CHAR"), 1019 exp.DateAdd: date_delta_sql("DATEADD"), 1020 exp.DateDiff: date_delta_sql("DATEDIFF"), 1021 exp.CTE: transforms.preprocess([qualify_derived_table_outputs]), 1022 exp.CurrentDate: rename_func("GETDATE"), 1023 exp.CurrentTimestamp: rename_func("GETDATE"), 1024 exp.DateStrToDate: datestrtodate_sql, 1025 exp.Extract: rename_func("DATEPART"), 1026 exp.GeneratedAsIdentityColumnConstraint: generatedasidentitycolumnconstraint_sql, 1027 exp.GroupConcat: _string_agg_sql, 1028 exp.If: rename_func("IIF"), 1029 exp.JSONExtract: _json_extract_sql, 1030 exp.JSONExtractScalar: _json_extract_sql, 1031 exp.LastDay: lambda self, e: self.func("EOMONTH", e.this), 1032 exp.Ln: rename_func("LOG"), 1033 exp.Max: max_or_greatest, 1034 exp.MD5: lambda self, e: self.func("HASHBYTES", exp.Literal.string("MD5"), e.this), 1035 exp.Min: min_or_least, 1036 exp.NumberToStr: _format_sql, 1037 exp.Repeat: rename_func("REPLICATE"), 1038 exp.CurrentSchema: rename_func("SCHEMA_NAME"), 1039 exp.Select: transforms.preprocess( 1040 [ 1041 transforms.eliminate_distinct_on, 1042 transforms.eliminate_semi_and_anti_joins, 1043 transforms.eliminate_qualify, 1044 transforms.unnest_generate_date_array_using_recursive_cte, 1045 ] 1046 ), 1047 exp.Stddev: rename_func("STDEV"), 1048 exp.StrPosition: lambda self, e: strposition_sql( 1049 self, e, func_name="CHARINDEX", supports_position=True 1050 ), 1051 exp.Subquery: transforms.preprocess([qualify_derived_table_outputs]), 1052 exp.SHA: lambda self, e: self.func("HASHBYTES", exp.Literal.string("SHA1"), e.this), 1053 exp.SHA2: lambda self, e: self.func( 1054 "HASHBYTES", exp.Literal.string(f"SHA2_{e.args.get('length', 256)}"), e.this 1055 ), 1056 exp.TemporaryProperty: lambda self, e: "", 1057 exp.TimeStrToTime: _timestrtotime_sql, 1058 exp.TimeToStr: _format_sql, 1059 exp.Trim: trim_sql, 1060 exp.TsOrDsAdd: date_delta_sql("DATEADD", cast=True), 1061 exp.TsOrDsDiff: date_delta_sql("DATEDIFF"), 1062 exp.TimestampTrunc: lambda self, e: self.func("DATETRUNC", e.unit, e.this), 1063 exp.Uuid: lambda *_: "NEWID()", 1064 exp.DateFromParts: rename_func("DATEFROMPARTS"), 1065 } 1066 1067 TRANSFORMS.pop(exp.ReturnsProperty) 1068 1069 PROPERTIES_LOCATION = { 1070 **generator.Generator.PROPERTIES_LOCATION, 1071 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 1072 } 1073 1074 def scope_resolution(self, rhs: str, scope_name: str) -> str: 1075 return f"{scope_name}::{rhs}" 1076 1077 def select_sql(self, expression: exp.Select) -> str: 1078 limit = expression.args.get("limit") 1079 offset = expression.args.get("offset") 1080 1081 if isinstance(limit, exp.Fetch) and not offset: 1082 # Dialects like Oracle can FETCH directly from a row set but 1083 # T-SQL requires an ORDER BY + OFFSET clause in order to FETCH 1084 offset = exp.Offset(expression=exp.Literal.number(0)) 1085 expression.set("offset", offset) 1086 1087 if offset: 1088 if not expression.args.get("order"): 1089 # ORDER BY is required in order to use OFFSET in a query, so we use 1090 # a noop order by, since we don't really care about the order. 1091 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 1092 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 1093 1094 if isinstance(limit, exp.Limit): 1095 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 1096 # we replace here because otherwise TOP would be generated in select_sql 1097 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 1098 1099 return super().select_sql(expression) 1100 1101 def convert_sql(self, expression: exp.Convert) -> str: 1102 name = "TRY_CONVERT" if expression.args.get("safe") else "CONVERT" 1103 return self.func( 1104 name, expression.this, expression.expression, expression.args.get("style") 1105 ) 1106 1107 def queryoption_sql(self, expression: exp.QueryOption) -> str: 1108 option = self.sql(expression, "this") 1109 value = self.sql(expression, "expression") 1110 if value: 1111 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 1112 return f"{option} {optional_equal_sign}{value}" 1113 return option 1114 1115 def lateral_op(self, expression: exp.Lateral) -> str: 1116 cross_apply = expression.args.get("cross_apply") 1117 if cross_apply is True: 1118 return "CROSS APPLY" 1119 if cross_apply is False: 1120 return "OUTER APPLY" 1121 1122 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 1123 self.unsupported("LATERAL clause is not supported.") 1124 return "LATERAL" 1125 1126 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 1127 this = expression.this 1128 split_count = len(this.name.split(".")) 1129 delimiter = expression.args.get("delimiter") 1130 part_index = expression.args.get("part_index") 1131 1132 if ( 1133 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 1134 or (delimiter and delimiter.name != ".") 1135 or not part_index 1136 or split_count > 4 1137 ): 1138 self.unsupported( 1139 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 1140 ) 1141 return "" 1142 1143 return self.func( 1144 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 1145 ) 1146 1147 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1148 nano = expression.args.get("nano") 1149 if nano is not None: 1150 nano.pop() 1151 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1152 1153 if expression.args.get("fractions") is None: 1154 expression.set("fractions", exp.Literal.number(0)) 1155 if expression.args.get("precision") is None: 1156 expression.set("precision", exp.Literal.number(0)) 1157 1158 return rename_func("TIMEFROMPARTS")(self, expression) 1159 1160 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1161 zone = expression.args.get("zone") 1162 if zone is not None: 1163 zone.pop() 1164 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1165 1166 nano = expression.args.get("nano") 1167 if nano is not None: 1168 nano.pop() 1169 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1170 1171 if expression.args.get("milli") is None: 1172 expression.set("milli", exp.Literal.number(0)) 1173 1174 return rename_func("DATETIMEFROMPARTS")(self, expression) 1175 1176 def setitem_sql(self, expression: exp.SetItem) -> str: 1177 this = expression.this 1178 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1179 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1180 return f"{self.sql(this.left)} {self.sql(this.right)}" 1181 1182 return super().setitem_sql(expression) 1183 1184 def boolean_sql(self, expression: exp.Boolean) -> str: 1185 if type(expression.parent) in BIT_TYPES or isinstance( 1186 expression.find_ancestor(exp.Values, exp.Select), exp.Values 1187 ): 1188 return "1" if expression.this else "0" 1189 1190 return "(1 = 1)" if expression.this else "(1 = 0)" 1191 1192 def is_sql(self, expression: exp.Is) -> str: 1193 if isinstance(expression.expression, exp.Boolean): 1194 return self.binary(expression, "=") 1195 return self.binary(expression, "IS") 1196 1197 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1198 sql = self.sql(expression, "this") 1199 properties = expression.args.get("properties") 1200 1201 if sql[:1] != "#" and any( 1202 isinstance(prop, exp.TemporaryProperty) 1203 for prop in (properties.expressions if properties else []) 1204 ): 1205 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1206 1207 return sql 1208 1209 def create_sql(self, expression: exp.Create) -> str: 1210 kind = expression.kind 1211 exists = expression.args.pop("exists", None) 1212 1213 like_property = expression.find(exp.LikeProperty) 1214 if like_property: 1215 ctas_expression = like_property.this 1216 else: 1217 ctas_expression = expression.expression 1218 1219 if kind == "VIEW": 1220 expression.this.set("catalog", None) 1221 with_ = expression.args.get("with") 1222 if ctas_expression and with_: 1223 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1224 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1225 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1226 ctas_expression.set("with", with_.pop()) 1227 1228 sql = super().create_sql(expression) 1229 1230 table = expression.find(exp.Table) 1231 1232 # Convert CTAS statement to SELECT .. INTO .. 1233 if kind == "TABLE" and ctas_expression: 1234 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1235 ctas_expression = ctas_expression.subquery() 1236 1237 properties = expression.args.get("properties") or exp.Properties() 1238 is_temp = any(isinstance(p, exp.TemporaryProperty) for p in properties.expressions) 1239 1240 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1241 select_into.set("into", exp.Into(this=table, temporary=is_temp)) 1242 1243 if like_property: 1244 select_into.limit(0, copy=False) 1245 1246 sql = self.sql(select_into) 1247 1248 if exists: 1249 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1250 sql_with_ctes = self.prepend_ctes(expression, sql) 1251 sql_literal = self.sql(exp.Literal.string(sql_with_ctes)) 1252 if kind == "SCHEMA": 1253 return f"""IF NOT EXISTS (SELECT * FROM information_schema.schemata WHERE schema_name = {identifier}) EXEC({sql_literal})""" 1254 elif kind == "TABLE": 1255 assert table 1256 where = exp.and_( 1257 exp.column("table_name").eq(table.name), 1258 exp.column("table_schema").eq(table.db) if table.db else None, 1259 exp.column("table_catalog").eq(table.catalog) if table.catalog else None, 1260 ) 1261 return f"""IF NOT EXISTS (SELECT * FROM information_schema.tables WHERE {where}) EXEC({sql_literal})""" 1262 elif kind == "INDEX": 1263 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1264 return f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql_literal})""" 1265 elif expression.args.get("replace"): 1266 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1267 1268 return self.prepend_ctes(expression, sql) 1269 1270 @generator.unsupported_args("unlogged", "expressions") 1271 def into_sql(self, expression: exp.Into) -> str: 1272 if expression.args.get("temporary"): 1273 # If the Into expression has a temporary property, push this down to the Identifier 1274 table = expression.find(exp.Table) 1275 if table and isinstance(table.this, exp.Identifier): 1276 table.this.set("temporary", True) 1277 1278 return f"{self.seg('INTO')} {self.sql(expression, 'this')}" 1279 1280 def count_sql(self, expression: exp.Count) -> str: 1281 func_name = "COUNT_BIG" if expression.args.get("big_int") else "COUNT" 1282 return rename_func(func_name)(self, expression) 1283 1284 def offset_sql(self, expression: exp.Offset) -> str: 1285 return f"{super().offset_sql(expression)} ROWS" 1286 1287 def version_sql(self, expression: exp.Version) -> str: 1288 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1289 this = f"FOR {name}" 1290 expr = expression.expression 1291 kind = expression.text("kind") 1292 if kind in ("FROM", "BETWEEN"): 1293 args = expr.expressions 1294 sep = "TO" if kind == "FROM" else "AND" 1295 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1296 else: 1297 expr_sql = self.sql(expr) 1298 1299 expr_sql = f" {expr_sql}" if expr_sql else "" 1300 return f"{this} {kind}{expr_sql}" 1301 1302 def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str: 1303 table = expression.args.get("table") 1304 table = f"{table} " if table else "" 1305 return f"RETURNS {table}{self.sql(expression, 'this')}" 1306 1307 def returning_sql(self, expression: exp.Returning) -> str: 1308 into = self.sql(expression, "into") 1309 into = self.seg(f"INTO {into}") if into else "" 1310 return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}" 1311 1312 def transaction_sql(self, expression: exp.Transaction) -> str: 1313 this = self.sql(expression, "this") 1314 this = f" {this}" if this else "" 1315 mark = self.sql(expression, "mark") 1316 mark = f" WITH MARK {mark}" if mark else "" 1317 return f"BEGIN TRANSACTION{this}{mark}" 1318 1319 def commit_sql(self, expression: exp.Commit) -> str: 1320 this = self.sql(expression, "this") 1321 this = f" {this}" if this else "" 1322 durability = expression.args.get("durability") 1323 durability = ( 1324 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1325 if durability is not None 1326 else "" 1327 ) 1328 return f"COMMIT TRANSACTION{this}{durability}" 1329 1330 def rollback_sql(self, expression: exp.Rollback) -> str: 1331 this = self.sql(expression, "this") 1332 this = f" {this}" if this else "" 1333 return f"ROLLBACK TRANSACTION{this}" 1334 1335 def identifier_sql(self, expression: exp.Identifier) -> str: 1336 identifier = super().identifier_sql(expression) 1337 1338 if expression.args.get("global"): 1339 identifier = f"##{identifier}" 1340 elif expression.args.get("temporary"): 1341 identifier = f"#{identifier}" 1342 1343 return identifier 1344 1345 def constraint_sql(self, expression: exp.Constraint) -> str: 1346 this = self.sql(expression, "this") 1347 expressions = self.expressions(expression, flat=True, sep=" ") 1348 return f"CONSTRAINT {this} {expressions}" 1349 1350 def length_sql(self, expression: exp.Length) -> str: 1351 return self._uncast_text(expression, "LEN") 1352 1353 def right_sql(self, expression: exp.Right) -> str: 1354 return self._uncast_text(expression, "RIGHT") 1355 1356 def left_sql(self, expression: exp.Left) -> str: 1357 return self._uncast_text(expression, "LEFT") 1358 1359 def _uncast_text(self, expression: exp.Expression, name: str) -> str: 1360 this = expression.this 1361 if isinstance(this, exp.Cast) and this.is_type(exp.DataType.Type.TEXT): 1362 this_sql = self.sql(this, "this") 1363 else: 1364 this_sql = self.sql(this) 1365 expression_sql = self.sql(expression, "expression") 1366 return self.func(name, this_sql, expression_sql if expression_sql else None) 1367 1368 def partition_sql(self, expression: exp.Partition) -> str: 1369 return f"WITH (PARTITIONS({self.expressions(expression, flat=True)}))" 1370 1371 def alter_sql(self, expression: exp.Alter) -> str: 1372 action = seq_get(expression.args.get("actions") or [], 0) 1373 if isinstance(action, exp.AlterRename): 1374 return f"EXEC sp_rename '{self.sql(expression.this)}', '{action.this.name}'" 1375 return super().alter_sql(expression) 1376 1377 def drop_sql(self, expression: exp.Drop) -> str: 1378 if expression.args["kind"] == "VIEW": 1379 expression.this.set("catalog", None) 1380 return super().drop_sql(expression) 1381 1382 def options_modifier(self, expression: exp.Expression) -> str: 1383 options = self.expressions(expression, key="options") 1384 return f" OPTION{self.wrap(options)}" if options else "" 1385 1386 def dpipe_sql(self, expression: exp.DPipe) -> str: 1387 return self.sql( 1388 reduce(lambda x, y: exp.Add(this=x, expression=y), expression.flatten()) 1389 ) 1390 1391 def isascii_sql(self, expression: exp.IsAscii) -> str: 1392 return f"(PATINDEX(CONVERT(VARCHAR(MAX), 0x255b5e002d7f5d25) COLLATE Latin1_General_BIN, {self.sql(expression.this)}) = 0)" 1393 1394 def columndef_sql(self, expression: exp.ColumnDef, sep: str = " ") -> str: 1395 this = super().columndef_sql(expression, sep) 1396 default = self.sql(expression, "default") 1397 default = f" = {default}" if default else "" 1398 output = self.sql(expression, "output") 1399 output = f" {output}" if output else "" 1400 return f"{this}{default}{output}" 1401 1402 def coalesce_sql(self, expression: exp.Coalesce) -> str: 1403 func_name = "ISNULL" if expression.args.get("is_null") else "COALESCE" 1404 return rename_func(func_name)(self, expression)
Generator converts a given syntax tree to the corresponding SQL string.
Arguments:
- pretty: Whether to format the produced SQL string. Default: False.
- identify: Determines when an identifier should be quoted. Possible values are: False (default): Never quote, except in cases where it's mandatory by the dialect. True or 'always': Always quote. 'safe': Only quote identifiers that are case insensitive.
- normalize: Whether to normalize identifiers to lowercase. Default: False.
- pad: The pad size in a formatted string. For example, this affects the indentation of a projection in a query, relative to its nesting level. Default: 2.
- indent: The indentation size in a formatted string. For example, this affects the
indentation of subqueries and filters under a
WHERE
clause. Default: 2. - normalize_functions: How to normalize function names. Possible values are: "upper" or True (default): Convert names to uppercase. "lower": Convert names to lowercase. False: Disables function name normalization.
- unsupported_level: Determines the generator's behavior when it encounters unsupported expressions. Default ErrorLevel.WARN.
- max_unsupported: Maximum number of unsupported messages to include in a raised UnsupportedError. This is only relevant if unsupported_level is ErrorLevel.RAISE. Default: 3
- leading_comma: Whether the comma is leading or trailing in select expressions. This is only relevant when generating in pretty mode. Default: False
- max_text_width: The max number of characters in a segment before creating new lines in pretty mode. The default is on the smaller end because the length only represents a segment and not the true line length. Default: 80
- comments: Whether to preserve comments in the output SQL code. Default: True
1077 def select_sql(self, expression: exp.Select) -> str: 1078 limit = expression.args.get("limit") 1079 offset = expression.args.get("offset") 1080 1081 if isinstance(limit, exp.Fetch) and not offset: 1082 # Dialects like Oracle can FETCH directly from a row set but 1083 # T-SQL requires an ORDER BY + OFFSET clause in order to FETCH 1084 offset = exp.Offset(expression=exp.Literal.number(0)) 1085 expression.set("offset", offset) 1086 1087 if offset: 1088 if not expression.args.get("order"): 1089 # ORDER BY is required in order to use OFFSET in a query, so we use 1090 # a noop order by, since we don't really care about the order. 1091 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 1092 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 1093 1094 if isinstance(limit, exp.Limit): 1095 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 1096 # we replace here because otherwise TOP would be generated in select_sql 1097 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 1098 1099 return super().select_sql(expression)
1107 def queryoption_sql(self, expression: exp.QueryOption) -> str: 1108 option = self.sql(expression, "this") 1109 value = self.sql(expression, "expression") 1110 if value: 1111 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 1112 return f"{option} {optional_equal_sign}{value}" 1113 return option
1115 def lateral_op(self, expression: exp.Lateral) -> str: 1116 cross_apply = expression.args.get("cross_apply") 1117 if cross_apply is True: 1118 return "CROSS APPLY" 1119 if cross_apply is False: 1120 return "OUTER APPLY" 1121 1122 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 1123 self.unsupported("LATERAL clause is not supported.") 1124 return "LATERAL"
1126 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 1127 this = expression.this 1128 split_count = len(this.name.split(".")) 1129 delimiter = expression.args.get("delimiter") 1130 part_index = expression.args.get("part_index") 1131 1132 if ( 1133 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 1134 or (delimiter and delimiter.name != ".") 1135 or not part_index 1136 or split_count > 4 1137 ): 1138 self.unsupported( 1139 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 1140 ) 1141 return "" 1142 1143 return self.func( 1144 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 1145 )
1147 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1148 nano = expression.args.get("nano") 1149 if nano is not None: 1150 nano.pop() 1151 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1152 1153 if expression.args.get("fractions") is None: 1154 expression.set("fractions", exp.Literal.number(0)) 1155 if expression.args.get("precision") is None: 1156 expression.set("precision", exp.Literal.number(0)) 1157 1158 return rename_func("TIMEFROMPARTS")(self, expression)
1160 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1161 zone = expression.args.get("zone") 1162 if zone is not None: 1163 zone.pop() 1164 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1165 1166 nano = expression.args.get("nano") 1167 if nano is not None: 1168 nano.pop() 1169 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1170 1171 if expression.args.get("milli") is None: 1172 expression.set("milli", exp.Literal.number(0)) 1173 1174 return rename_func("DATETIMEFROMPARTS")(self, expression)
1176 def setitem_sql(self, expression: exp.SetItem) -> str: 1177 this = expression.this 1178 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1179 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1180 return f"{self.sql(this.left)} {self.sql(this.right)}" 1181 1182 return super().setitem_sql(expression)
1197 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1198 sql = self.sql(expression, "this") 1199 properties = expression.args.get("properties") 1200 1201 if sql[:1] != "#" and any( 1202 isinstance(prop, exp.TemporaryProperty) 1203 for prop in (properties.expressions if properties else []) 1204 ): 1205 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1206 1207 return sql
1209 def create_sql(self, expression: exp.Create) -> str: 1210 kind = expression.kind 1211 exists = expression.args.pop("exists", None) 1212 1213 like_property = expression.find(exp.LikeProperty) 1214 if like_property: 1215 ctas_expression = like_property.this 1216 else: 1217 ctas_expression = expression.expression 1218 1219 if kind == "VIEW": 1220 expression.this.set("catalog", None) 1221 with_ = expression.args.get("with") 1222 if ctas_expression and with_: 1223 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1224 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1225 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1226 ctas_expression.set("with", with_.pop()) 1227 1228 sql = super().create_sql(expression) 1229 1230 table = expression.find(exp.Table) 1231 1232 # Convert CTAS statement to SELECT .. INTO .. 1233 if kind == "TABLE" and ctas_expression: 1234 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1235 ctas_expression = ctas_expression.subquery() 1236 1237 properties = expression.args.get("properties") or exp.Properties() 1238 is_temp = any(isinstance(p, exp.TemporaryProperty) for p in properties.expressions) 1239 1240 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1241 select_into.set("into", exp.Into(this=table, temporary=is_temp)) 1242 1243 if like_property: 1244 select_into.limit(0, copy=False) 1245 1246 sql = self.sql(select_into) 1247 1248 if exists: 1249 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1250 sql_with_ctes = self.prepend_ctes(expression, sql) 1251 sql_literal = self.sql(exp.Literal.string(sql_with_ctes)) 1252 if kind == "SCHEMA": 1253 return f"""IF NOT EXISTS (SELECT * FROM information_schema.schemata WHERE schema_name = {identifier}) EXEC({sql_literal})""" 1254 elif kind == "TABLE": 1255 assert table 1256 where = exp.and_( 1257 exp.column("table_name").eq(table.name), 1258 exp.column("table_schema").eq(table.db) if table.db else None, 1259 exp.column("table_catalog").eq(table.catalog) if table.catalog else None, 1260 ) 1261 return f"""IF NOT EXISTS (SELECT * FROM information_schema.tables WHERE {where}) EXEC({sql_literal})""" 1262 elif kind == "INDEX": 1263 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1264 return f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql_literal})""" 1265 elif expression.args.get("replace"): 1266 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1267 1268 return self.prepend_ctes(expression, sql)
1270 @generator.unsupported_args("unlogged", "expressions") 1271 def into_sql(self, expression: exp.Into) -> str: 1272 if expression.args.get("temporary"): 1273 # If the Into expression has a temporary property, push this down to the Identifier 1274 table = expression.find(exp.Table) 1275 if table and isinstance(table.this, exp.Identifier): 1276 table.this.set("temporary", True) 1277 1278 return f"{self.seg('INTO')} {self.sql(expression, 'this')}"
1287 def version_sql(self, expression: exp.Version) -> str: 1288 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1289 this = f"FOR {name}" 1290 expr = expression.expression 1291 kind = expression.text("kind") 1292 if kind in ("FROM", "BETWEEN"): 1293 args = expr.expressions 1294 sep = "TO" if kind == "FROM" else "AND" 1295 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1296 else: 1297 expr_sql = self.sql(expr) 1298 1299 expr_sql = f" {expr_sql}" if expr_sql else "" 1300 return f"{this} {kind}{expr_sql}"
1319 def commit_sql(self, expression: exp.Commit) -> str: 1320 this = self.sql(expression, "this") 1321 this = f" {this}" if this else "" 1322 durability = expression.args.get("durability") 1323 durability = ( 1324 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1325 if durability is not None 1326 else "" 1327 ) 1328 return f"COMMIT TRANSACTION{this}{durability}"
1335 def identifier_sql(self, expression: exp.Identifier) -> str: 1336 identifier = super().identifier_sql(expression) 1337 1338 if expression.args.get("global"): 1339 identifier = f"##{identifier}" 1340 elif expression.args.get("temporary"): 1341 identifier = f"#{identifier}" 1342 1343 return identifier
1394 def columndef_sql(self, expression: exp.ColumnDef, sep: str = " ") -> str: 1395 this = super().columndef_sql(expression, sep) 1396 default = self.sql(expression, "default") 1397 default = f" = {default}" if default else "" 1398 output = self.sql(expression, "output") 1399 output = f" {output}" if output else "" 1400 return f"{this}{default}{output}"
Inherited Members
- sqlglot.generator.Generator
- Generator
- IGNORE_NULLS_IN_FUNC
- LOCKING_READS_SUPPORTED
- WRAP_DERIVED_VALUES
- CREATE_FUNCTION_RETURN_AS
- MATCHED_BY_SOURCE
- SINGLE_STRING_INTERVAL
- INTERVAL_ALLOWS_PLURAL_FORM
- LIMIT_ONLY_LITERALS
- RENAME_TABLE_WITH_DB
- GROUPINGS_SEP
- INDEX_ON
- JOIN_HINTS
- TABLE_HINTS
- QUERY_HINT_SEP
- IS_BOOL_ALLOWED
- DUPLICATE_KEY_UPDATE_WITH_SET
- EXTRACT_ALLOWS_QUOTES
- TZ_TO_WITH_TIME_ZONE
- VALUES_AS_TABLE
- UNNEST_WITH_ORDINALITY
- AGGREGATE_FILTER_SUPPORTED
- SEMI_ANTI_JOIN_WITH_SIDE
- SUPPORTS_TABLE_COPY
- TABLESAMPLE_REQUIRES_PARENS
- TABLESAMPLE_SIZE_IS_ROWS
- TABLESAMPLE_KEYWORDS
- TABLESAMPLE_WITH_METHOD
- COLLATE_IS_FUNC
- DATA_TYPE_SPECIFIERS_ALLOWED
- LAST_DAY_SUPPORTS_DATE_PART
- SUPPORTS_TABLE_ALIAS_COLUMNS
- UNPIVOT_ALIASES_ARE_IDENTIFIERS
- JSON_KEY_VALUE_PAIR_SEP
- INSERT_OVERWRITE
- SUPPORTS_UNLOGGED_TABLES
- SUPPORTS_CREATE_TABLE_LIKE
- LIKE_PROPERTY_INSIDE_SCHEMA
- MULTI_ARG_DISTINCT
- JSON_TYPE_REQUIRED_FOR_EXTRACTION
- JSON_PATH_SINGLE_QUOTE_ESCAPE
- CAN_IMPLEMENT_ARRAY_ANY
- SUPPORTS_WINDOW_EXCLUDE
- COPY_PARAMS_ARE_WRAPPED
- COPY_HAS_INTO_KEYWORD
- STAR_EXCEPT
- HEX_FUNC
- WITH_PROPERTIES_PREFIX
- QUOTE_JSON_PATH
- PAD_FILL_PATTERN_IS_REQUIRED
- SUPPORTS_EXPLODING_PROJECTIONS
- ARRAY_CONCAT_IS_VAR_LEN
- SUPPORTS_CONVERT_TIMEZONE
- SUPPORTS_MEDIAN
- SUPPORTS_UNIX_SECONDS
- ARRAY_SIZE_NAME
- ARRAY_SIZE_DIM_REQUIRED
- TIME_PART_SINGULARS
- TOKEN_MAPPING
- STRUCT_DELIMITER
- PARAMETER_TOKEN
- NAMED_PLACEHOLDER_TOKEN
- EXPRESSION_PRECEDES_PROPERTIES_CREATABLES
- RESERVED_KEYWORDS
- WITH_SEPARATED_COMMENTS
- EXCLUDE_COMMENTS
- UNWRAPPED_INTERVAL_VALUES
- PARAMETERIZABLE_TEXT_TYPES
- RESPECT_IGNORE_NULLS_UNSUPPORTED_EXPRESSIONS
- SENTINEL_LINE_BREAK
- pretty
- identify
- normalize
- pad
- unsupported_level
- max_unsupported
- leading_comma
- max_text_width
- comments
- dialect
- normalize_functions
- unsupported_messages
- generate
- preprocess
- unsupported
- sep
- seg
- sanitize_comment
- maybe_comment
- wrap
- no_identify
- normalize_func
- indent
- sql
- uncache_sql
- cache_sql
- characterset_sql
- column_parts
- column_sql
- columnposition_sql
- columnconstraint_sql
- computedcolumnconstraint_sql
- autoincrementcolumnconstraint_sql
- compresscolumnconstraint_sql
- generatedasidentitycolumnconstraint_sql
- generatedasrowcolumnconstraint_sql
- periodforsystemtimeconstraint_sql
- notnullcolumnconstraint_sql
- transformcolumnconstraint_sql
- primarykeycolumnconstraint_sql
- uniquecolumnconstraint_sql
- sequenceproperties_sql
- clone_sql
- describe_sql
- heredoc_sql
- prepend_ctes
- with_sql
- cte_sql
- tablealias_sql
- bitstring_sql
- hexstring_sql
- bytestring_sql
- unicodestring_sql
- rawstring_sql
- datatypeparam_sql
- datatype_sql
- directory_sql
- delete_sql
- set_operation
- set_operations
- fetch_sql
- limitoptions_sql
- filter_sql
- hint_sql
- indexparameters_sql
- index_sql
- hex_sql
- lowerhex_sql
- inputoutputformat_sql
- national_sql
- properties_sql
- root_properties
- properties
- with_properties
- locate_properties
- property_name
- property_sql
- likeproperty_sql
- fallbackproperty_sql
- journalproperty_sql
- freespaceproperty_sql
- checksumproperty_sql
- mergeblockratioproperty_sql
- datablocksizeproperty_sql
- blockcompressionproperty_sql
- isolatedloadingproperty_sql
- partitionboundspec_sql
- partitionedofproperty_sql
- lockingproperty_sql
- withdataproperty_sql
- withsystemversioningproperty_sql
- insert_sql
- introducer_sql
- kill_sql
- pseudotype_sql
- objectidentifier_sql
- onconflict_sql
- rowformatdelimitedproperty_sql
- withtablehint_sql
- indextablehint_sql
- historicaldata_sql
- table_parts
- table_sql
- tablefromrows_sql
- tablesample_sql
- pivot_sql
- tuple_sql
- update_sql
- values_sql
- var_sql
- from_sql
- groupingsets_sql
- rollup_sql
- cube_sql
- group_sql
- having_sql
- connect_sql
- prior_sql
- join_sql
- lambda_sql
- lateral_sql
- limit_sql
- set_sql
- pragma_sql
- lock_sql
- literal_sql
- escape_str
- loaddata_sql
- null_sql
- order_sql
- withfill_sql
- cluster_sql
- distribute_sql
- sort_sql
- ordered_sql
- matchrecognizemeasure_sql
- matchrecognize_sql
- query_modifiers
- for_modifiers
- offset_limit_modifiers
- after_limit_modifiers
- schema_sql
- schema_columns_sql
- star_sql
- parameter_sql
- sessionparameter_sql
- placeholder_sql
- subquery_sql
- qualify_sql
- unnest_sql
- prewhere_sql
- where_sql
- window_sql
- partition_by_sql
- windowspec_sql
- withingroup_sql
- between_sql
- bracket_offset_expressions
- bracket_sql
- all_sql
- any_sql
- exists_sql
- case_sql
- nextvaluefor_sql
- extract_sql
- trim_sql
- convert_concat_args
- concat_sql
- concatws_sql
- check_sql
- foreignkey_sql
- primarykey_sql
- if_sql
- matchagainst_sql
- jsonkeyvalue_sql
- jsonpath_sql
- json_path_part
- formatjson_sql
- jsonobject_sql
- jsonobjectagg_sql
- jsonarray_sql
- jsonarrayagg_sql
- jsoncolumndef_sql
- jsonschema_sql
- jsontable_sql
- openjsoncolumndef_sql
- openjson_sql
- in_sql
- in_unnest_op
- interval_sql
- return_sql
- reference_sql
- anonymous_sql
- paren_sql
- neg_sql
- not_sql
- alias_sql
- pivotalias_sql
- aliases_sql
- atindex_sql
- attimezone_sql
- fromtimezone_sql
- add_sql
- and_sql
- or_sql
- xor_sql
- connector_sql
- bitwiseand_sql
- bitwiseleftshift_sql
- bitwisenot_sql
- bitwiseor_sql
- bitwiserightshift_sql
- bitwisexor_sql
- cast_sql
- currentdate_sql
- collate_sql
- command_sql
- comment_sql
- mergetreettlaction_sql
- mergetreettl_sql
- altercolumn_sql
- alterindex_sql
- alterdiststyle_sql
- altersortkey_sql
- alterrename_sql
- renamecolumn_sql
- alterset_sql
- add_column_sql
- droppartition_sql
- addconstraint_sql
- distinct_sql
- ignorenulls_sql
- respectnulls_sql
- havingmax_sql
- intdiv_sql
- div_sql
- safedivide_sql
- overlaps_sql
- distance_sql
- dot_sql
- eq_sql
- propertyeq_sql
- escape_sql
- glob_sql
- gt_sql
- gte_sql
- ilike_sql
- ilikeany_sql
- like_sql
- likeany_sql
- similarto_sql
- lt_sql
- lte_sql
- mod_sql
- mul_sql
- neq_sql
- nullsafeeq_sql
- nullsafeneq_sql
- slice_sql
- sub_sql
- trycast_sql
- jsoncast_sql
- try_sql
- log_sql
- use_sql
- binary
- ceil_floor
- function_fallback_sql
- func
- format_args
- too_wide
- format_time
- expressions
- op_expressions
- naked_property
- tag_sql
- token_sql
- userdefinedfunction_sql
- joinhint_sql
- kwarg_sql
- when_sql
- whens_sql
- merge_sql
- tochar_sql
- tonumber_sql
- dictproperty_sql
- dictrange_sql
- dictsubproperty_sql
- duplicatekeyproperty_sql
- uniquekeyproperty_sql
- distributedbyproperty_sql
- oncluster_sql
- clusteredbyproperty_sql
- anyvalue_sql
- querytransform_sql
- indexconstraintoption_sql
- checkcolumnconstraint_sql
- indexcolumnconstraint_sql
- nvl2_sql
- comprehension_sql
- columnprefix_sql
- opclass_sql
- predict_sql
- forin_sql
- refresh_sql
- toarray_sql
- tsordstotime_sql
- tsordstotimestamp_sql
- tsordstodatetime_sql
- tsordstodate_sql
- unixdate_sql
- lastday_sql
- dateadd_sql
- arrayany_sql
- struct_sql
- partitionrange_sql
- truncatetable_sql
- copyparameter_sql
- credentials_sql
- copy_sql
- semicolon_sql
- datadeletionproperty_sql
- maskingpolicycolumnconstraint_sql
- gapfill_sql
- scoperesolution_sql
- parsejson_sql
- rand_sql
- changes_sql
- pad_sql
- summarize_sql
- explodinggenerateseries_sql
- arrayconcat_sql
- converttimezone_sql
- json_sql
- jsonvalue_sql
- conditionalinsert_sql
- multitableinserts_sql
- oncondition_sql
- jsonextractquote_sql
- jsonexists_sql
- arrayagg_sql
- apply_sql
- grant_sql
- grantprivilege_sql
- grantprincipal_sql
- columns_sql
- overlay_sql
- todouble_sql
- string_sql
- median_sql
- overflowtruncatebehavior_sql
- unixseconds_sql
- arraysize_sql
- attach_sql
- detach_sql
- attachoption_sql
- featuresattime_sql
- watermarkcolumnconstraint_sql
- encodeproperty_sql
- includeproperty_sql
- xmlelement_sql
- xmlkeyvalueoption_sql
- partitionbyrangeproperty_sql
- partitionbyrangepropertydynamic_sql
- unpivotcolumns_sql
- analyzesample_sql
- analyzestatistics_sql
- analyzehistogram_sql
- analyzedelete_sql
- analyzelistchainedrows_sql
- analyzevalidate_sql
- analyze_sql
- xmltable_sql
- xmlnamespace_sql
- export_sql
- declare_sql
- declareitem_sql
- recursivewithsearch_sql
- parameterizedagg_sql
- anonymousaggfunc_sql
- combinedaggfunc_sql
- combinedparameterizedagg_sql
- show_sql
- get_put_sql
- translatecharacters_sql