sqlglot.dialects.tsql
1from __future__ import annotations 2 3import datetime 4import re 5import typing as t 6from functools import partial 7 8from sqlglot import exp, generator, parser, tokens, transforms 9from sqlglot.dialects.dialect import ( 10 Dialect, 11 NormalizationStrategy, 12 any_value_to_max_sql, 13 date_delta_sql, 14 datestrtodate_sql, 15 generatedasidentitycolumnconstraint_sql, 16 max_or_greatest, 17 min_or_least, 18 build_date_delta, 19 rename_func, 20 timestrtotime_sql, 21 trim_sql, 22) 23from sqlglot.helper import seq_get 24from sqlglot.time import format_time 25from sqlglot.tokens import TokenType 26 27if t.TYPE_CHECKING: 28 from sqlglot._typing import E 29 30FULL_FORMAT_TIME_MAPPING = { 31 "weekday": "%A", 32 "dw": "%A", 33 "w": "%A", 34 "month": "%B", 35 "mm": "%B", 36 "m": "%B", 37} 38 39DATE_DELTA_INTERVAL = { 40 "year": "year", 41 "yyyy": "year", 42 "yy": "year", 43 "quarter": "quarter", 44 "qq": "quarter", 45 "q": "quarter", 46 "month": "month", 47 "mm": "month", 48 "m": "month", 49 "week": "week", 50 "ww": "week", 51 "wk": "week", 52 "day": "day", 53 "dd": "day", 54 "d": "day", 55} 56 57 58DATE_FMT_RE = re.compile("([dD]{1,2})|([mM]{1,2})|([yY]{1,4})|([hH]{1,2})|([sS]{1,2})") 59 60# N = Numeric, C=Currency 61TRANSPILE_SAFE_NUMBER_FMT = {"N", "C"} 62 63DEFAULT_START_DATE = datetime.date(1900, 1, 1) 64 65BIT_TYPES = {exp.EQ, exp.NEQ, exp.Is, exp.In, exp.Select, exp.Alias} 66 67# Unsupported options: 68# - OPTIMIZE FOR ( @variable_name { UNKNOWN | = <literal_constant> } [ , ...n ] ) 69# - TABLE HINT 70OPTIONS: parser.OPTIONS_TYPE = { 71 **dict.fromkeys( 72 ( 73 "DISABLE_OPTIMIZED_PLAN_FORCING", 74 "FAST", 75 "IGNORE_NONCLUSTERED_COLUMNSTORE_INDEX", 76 "LABEL", 77 "MAXDOP", 78 "MAXRECURSION", 79 "MAX_GRANT_PERCENT", 80 "MIN_GRANT_PERCENT", 81 "NO_PERFORMANCE_SPOOL", 82 "QUERYTRACEON", 83 "RECOMPILE", 84 ), 85 tuple(), 86 ), 87 "CONCAT": ("UNION",), 88 "DISABLE": ("EXTERNALPUSHDOWN", "SCALEOUTEXECUTION"), 89 "EXPAND": ("VIEWS",), 90 "FORCE": ("EXTERNALPUSHDOWN", "ORDER", "SCALEOUTEXECUTION"), 91 "HASH": ("GROUP", "JOIN", "UNION"), 92 "KEEP": ("PLAN",), 93 "KEEPFIXED": ("PLAN",), 94 "LOOP": ("JOIN",), 95 "MERGE": ("JOIN", "UNION"), 96 "OPTIMIZE": (("FOR", "UNKNOWN"),), 97 "ORDER": ("GROUP",), 98 "PARAMETERIZATION": ("FORCED", "SIMPLE"), 99 "ROBUST": ("PLAN",), 100 "USE": ("PLAN",), 101} 102 103OPTIONS_THAT_REQUIRE_EQUAL = ("MAX_GRANT_PERCENT", "MIN_GRANT_PERCENT", "LABEL") 104 105 106def _build_formatted_time( 107 exp_class: t.Type[E], full_format_mapping: t.Optional[bool] = None 108) -> t.Callable[[t.List], E]: 109 def _builder(args: t.List) -> E: 110 assert len(args) == 2 111 112 return exp_class( 113 this=exp.cast(args[1], exp.DataType.Type.DATETIME), 114 format=exp.Literal.string( 115 format_time( 116 args[0].name.lower(), 117 ( 118 {**TSQL.TIME_MAPPING, **FULL_FORMAT_TIME_MAPPING} 119 if full_format_mapping 120 else TSQL.TIME_MAPPING 121 ), 122 ) 123 ), 124 ) 125 126 return _builder 127 128 129def _build_format(args: t.List) -> exp.NumberToStr | exp.TimeToStr: 130 this = seq_get(args, 0) 131 fmt = seq_get(args, 1) 132 culture = seq_get(args, 2) 133 134 number_fmt = fmt and (fmt.name in TRANSPILE_SAFE_NUMBER_FMT or not DATE_FMT_RE.search(fmt.name)) 135 136 if number_fmt: 137 return exp.NumberToStr(this=this, format=fmt, culture=culture) 138 139 if fmt: 140 fmt = exp.Literal.string( 141 format_time(fmt.name, TSQL.FORMAT_TIME_MAPPING) 142 if len(fmt.name) == 1 143 else format_time(fmt.name, TSQL.TIME_MAPPING) 144 ) 145 146 return exp.TimeToStr(this=this, format=fmt, culture=culture) 147 148 149def _build_eomonth(args: t.List) -> exp.LastDay: 150 date = exp.TsOrDsToDate(this=seq_get(args, 0)) 151 month_lag = seq_get(args, 1) 152 153 if month_lag is None: 154 this: exp.Expression = date 155 else: 156 unit = DATE_DELTA_INTERVAL.get("month") 157 this = exp.DateAdd(this=date, expression=month_lag, unit=unit and exp.var(unit)) 158 159 return exp.LastDay(this=this) 160 161 162def _build_hashbytes(args: t.List) -> exp.Expression: 163 kind, data = args 164 kind = kind.name.upper() if kind.is_string else "" 165 166 if kind == "MD5": 167 args.pop(0) 168 return exp.MD5(this=data) 169 if kind in ("SHA", "SHA1"): 170 args.pop(0) 171 return exp.SHA(this=data) 172 if kind == "SHA2_256": 173 return exp.SHA2(this=data, length=exp.Literal.number(256)) 174 if kind == "SHA2_512": 175 return exp.SHA2(this=data, length=exp.Literal.number(512)) 176 177 return exp.func("HASHBYTES", *args) 178 179 180DATEPART_ONLY_FORMATS = {"DW", "HOUR", "QUARTER"} 181 182 183def _format_sql(self: TSQL.Generator, expression: exp.NumberToStr | exp.TimeToStr) -> str: 184 fmt = expression.args["format"] 185 186 if not isinstance(expression, exp.NumberToStr): 187 if fmt.is_string: 188 mapped_fmt = format_time(fmt.name, TSQL.INVERSE_TIME_MAPPING) 189 190 name = (mapped_fmt or "").upper() 191 if name in DATEPART_ONLY_FORMATS: 192 return self.func("DATEPART", name, expression.this) 193 194 fmt_sql = self.sql(exp.Literal.string(mapped_fmt)) 195 else: 196 fmt_sql = self.format_time(expression) or self.sql(fmt) 197 else: 198 fmt_sql = self.sql(fmt) 199 200 return self.func("FORMAT", expression.this, fmt_sql, expression.args.get("culture")) 201 202 203def _string_agg_sql(self: TSQL.Generator, expression: exp.GroupConcat) -> str: 204 this = expression.this 205 distinct = expression.find(exp.Distinct) 206 if distinct: 207 # exp.Distinct can appear below an exp.Order or an exp.GroupConcat expression 208 self.unsupported("T-SQL STRING_AGG doesn't support DISTINCT.") 209 this = distinct.pop().expressions[0] 210 211 order = "" 212 if isinstance(expression.this, exp.Order): 213 if expression.this.this: 214 this = expression.this.this.pop() 215 # Order has a leading space 216 order = f" WITHIN GROUP ({self.sql(expression.this)[1:]})" 217 218 separator = expression.args.get("separator") or exp.Literal.string(",") 219 return f"STRING_AGG({self.format_args(this, separator)}){order}" 220 221 222def _build_date_delta( 223 exp_class: t.Type[E], unit_mapping: t.Optional[t.Dict[str, str]] = None 224) -> t.Callable[[t.List], E]: 225 def _builder(args: t.List) -> E: 226 unit = seq_get(args, 0) 227 if unit and unit_mapping: 228 unit = exp.var(unit_mapping.get(unit.name.lower(), unit.name)) 229 230 start_date = seq_get(args, 1) 231 if start_date and start_date.is_number: 232 # Numeric types are valid DATETIME values 233 if start_date.is_int: 234 adds = DEFAULT_START_DATE + datetime.timedelta(days=int(start_date.this)) 235 start_date = exp.Literal.string(adds.strftime("%F")) 236 else: 237 # We currently don't handle float values, i.e. they're not converted to equivalent DATETIMEs. 238 # This is not a problem when generating T-SQL code, it is when transpiling to other dialects. 239 return exp_class(this=seq_get(args, 2), expression=start_date, unit=unit) 240 241 return exp_class( 242 this=exp.TimeStrToTime(this=seq_get(args, 2)), 243 expression=exp.TimeStrToTime(this=start_date), 244 unit=unit, 245 ) 246 247 return _builder 248 249 250def qualify_derived_table_outputs(expression: exp.Expression) -> exp.Expression: 251 """Ensures all (unnamed) output columns are aliased for CTEs and Subqueries.""" 252 alias = expression.args.get("alias") 253 254 if ( 255 isinstance(expression, (exp.CTE, exp.Subquery)) 256 and isinstance(alias, exp.TableAlias) 257 and not alias.columns 258 ): 259 from sqlglot.optimizer.qualify_columns import qualify_outputs 260 261 # We keep track of the unaliased column projection indexes instead of the expressions 262 # themselves, because the latter are going to be replaced by new nodes when the aliases 263 # are added and hence we won't be able to reach these newly added Alias parents 264 query = expression.this 265 unaliased_column_indexes = ( 266 i for i, c in enumerate(query.selects) if isinstance(c, exp.Column) and not c.alias 267 ) 268 269 qualify_outputs(query) 270 271 # Preserve the quoting information of columns for newly added Alias nodes 272 query_selects = query.selects 273 for select_index in unaliased_column_indexes: 274 alias = query_selects[select_index] 275 column = alias.this 276 if isinstance(column.this, exp.Identifier): 277 alias.args["alias"].set("quoted", column.this.quoted) 278 279 return expression 280 281 282# https://learn.microsoft.com/en-us/sql/t-sql/functions/datetimefromparts-transact-sql?view=sql-server-ver16#syntax 283def _build_datetimefromparts(args: t.List) -> exp.TimestampFromParts: 284 return exp.TimestampFromParts( 285 year=seq_get(args, 0), 286 month=seq_get(args, 1), 287 day=seq_get(args, 2), 288 hour=seq_get(args, 3), 289 min=seq_get(args, 4), 290 sec=seq_get(args, 5), 291 milli=seq_get(args, 6), 292 ) 293 294 295# https://learn.microsoft.com/en-us/sql/t-sql/functions/timefromparts-transact-sql?view=sql-server-ver16#syntax 296def _build_timefromparts(args: t.List) -> exp.TimeFromParts: 297 return exp.TimeFromParts( 298 hour=seq_get(args, 0), 299 min=seq_get(args, 1), 300 sec=seq_get(args, 2), 301 fractions=seq_get(args, 3), 302 precision=seq_get(args, 4), 303 ) 304 305 306def _build_with_arg_as_text( 307 klass: t.Type[exp.Expression], 308) -> t.Callable[[t.List[exp.Expression]], exp.Expression]: 309 def _parse(args: t.List[exp.Expression]) -> exp.Expression: 310 this = seq_get(args, 0) 311 312 if this and not this.is_string: 313 this = exp.cast(this, exp.DataType.Type.TEXT) 314 315 expression = seq_get(args, 1) 316 kwargs = {"this": this} 317 318 if expression: 319 kwargs["expression"] = expression 320 321 return klass(**kwargs) 322 323 return _parse 324 325 326def _json_extract_sql( 327 self: TSQL.Generator, expression: exp.JSONExtract | exp.JSONExtractScalar 328) -> str: 329 json_query = self.func("JSON_QUERY", expression.this, expression.expression) 330 json_value = self.func("JSON_VALUE", expression.this, expression.expression) 331 return self.func("ISNULL", json_query, json_value) 332 333 334class TSQL(Dialect): 335 NORMALIZATION_STRATEGY = NormalizationStrategy.CASE_INSENSITIVE 336 TIME_FORMAT = "'yyyy-mm-dd hh:mm:ss'" 337 SUPPORTS_SEMI_ANTI_JOIN = False 338 LOG_BASE_FIRST = False 339 TYPED_DIVISION = True 340 CONCAT_COALESCE = True 341 342 TIME_MAPPING = { 343 "year": "%Y", 344 "dayofyear": "%j", 345 "day": "%d", 346 "dy": "%d", 347 "y": "%Y", 348 "week": "%W", 349 "ww": "%W", 350 "wk": "%W", 351 "hour": "%h", 352 "hh": "%I", 353 "minute": "%M", 354 "mi": "%M", 355 "n": "%M", 356 "second": "%S", 357 "ss": "%S", 358 "s": "%-S", 359 "millisecond": "%f", 360 "ms": "%f", 361 "weekday": "%W", 362 "dw": "%W", 363 "month": "%m", 364 "mm": "%M", 365 "m": "%-M", 366 "Y": "%Y", 367 "YYYY": "%Y", 368 "YY": "%y", 369 "MMMM": "%B", 370 "MMM": "%b", 371 "MM": "%m", 372 "M": "%-m", 373 "dddd": "%A", 374 "dd": "%d", 375 "d": "%-d", 376 "HH": "%H", 377 "H": "%-H", 378 "h": "%-I", 379 "S": "%f", 380 "yyyy": "%Y", 381 "yy": "%y", 382 } 383 384 CONVERT_FORMAT_MAPPING = { 385 "0": "%b %d %Y %-I:%M%p", 386 "1": "%m/%d/%y", 387 "2": "%y.%m.%d", 388 "3": "%d/%m/%y", 389 "4": "%d.%m.%y", 390 "5": "%d-%m-%y", 391 "6": "%d %b %y", 392 "7": "%b %d, %y", 393 "8": "%H:%M:%S", 394 "9": "%b %d %Y %-I:%M:%S:%f%p", 395 "10": "mm-dd-yy", 396 "11": "yy/mm/dd", 397 "12": "yymmdd", 398 "13": "%d %b %Y %H:%M:ss:%f", 399 "14": "%H:%M:%S:%f", 400 "20": "%Y-%m-%d %H:%M:%S", 401 "21": "%Y-%m-%d %H:%M:%S.%f", 402 "22": "%m/%d/%y %-I:%M:%S %p", 403 "23": "%Y-%m-%d", 404 "24": "%H:%M:%S", 405 "25": "%Y-%m-%d %H:%M:%S.%f", 406 "100": "%b %d %Y %-I:%M%p", 407 "101": "%m/%d/%Y", 408 "102": "%Y.%m.%d", 409 "103": "%d/%m/%Y", 410 "104": "%d.%m.%Y", 411 "105": "%d-%m-%Y", 412 "106": "%d %b %Y", 413 "107": "%b %d, %Y", 414 "108": "%H:%M:%S", 415 "109": "%b %d %Y %-I:%M:%S:%f%p", 416 "110": "%m-%d-%Y", 417 "111": "%Y/%m/%d", 418 "112": "%Y%m%d", 419 "113": "%d %b %Y %H:%M:%S:%f", 420 "114": "%H:%M:%S:%f", 421 "120": "%Y-%m-%d %H:%M:%S", 422 "121": "%Y-%m-%d %H:%M:%S.%f", 423 } 424 425 FORMAT_TIME_MAPPING = { 426 "y": "%B %Y", 427 "d": "%m/%d/%Y", 428 "H": "%-H", 429 "h": "%-I", 430 "s": "%Y-%m-%d %H:%M:%S", 431 "D": "%A,%B,%Y", 432 "f": "%A,%B,%Y %-I:%M %p", 433 "F": "%A,%B,%Y %-I:%M:%S %p", 434 "g": "%m/%d/%Y %-I:%M %p", 435 "G": "%m/%d/%Y %-I:%M:%S %p", 436 "M": "%B %-d", 437 "m": "%B %-d", 438 "O": "%Y-%m-%dT%H:%M:%S", 439 "u": "%Y-%M-%D %H:%M:%S%z", 440 "U": "%A, %B %D, %Y %H:%M:%S%z", 441 "T": "%-I:%M:%S %p", 442 "t": "%-I:%M", 443 "Y": "%a %Y", 444 } 445 446 class Tokenizer(tokens.Tokenizer): 447 IDENTIFIERS = [("[", "]"), '"'] 448 QUOTES = ["'", '"'] 449 HEX_STRINGS = [("0x", ""), ("0X", "")] 450 VAR_SINGLE_TOKENS = {"@", "$", "#"} 451 452 KEYWORDS = { 453 **tokens.Tokenizer.KEYWORDS, 454 "DATETIME2": TokenType.DATETIME, 455 "DATETIMEOFFSET": TokenType.TIMESTAMPTZ, 456 "DECLARE": TokenType.DECLARE, 457 "EXEC": TokenType.COMMAND, 458 "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT, 459 "IMAGE": TokenType.IMAGE, 460 "MONEY": TokenType.MONEY, 461 "NTEXT": TokenType.TEXT, 462 "OPTION": TokenType.OPTION, 463 "OUTPUT": TokenType.RETURNING, 464 "PRINT": TokenType.COMMAND, 465 "PROC": TokenType.PROCEDURE, 466 "REAL": TokenType.FLOAT, 467 "ROWVERSION": TokenType.ROWVERSION, 468 "SMALLDATETIME": TokenType.DATETIME, 469 "SMALLMONEY": TokenType.SMALLMONEY, 470 "SQL_VARIANT": TokenType.VARIANT, 471 "SYSTEM_USER": TokenType.CURRENT_USER, 472 "TOP": TokenType.TOP, 473 "TIMESTAMP": TokenType.ROWVERSION, 474 "TINYINT": TokenType.UTINYINT, 475 "UNIQUEIDENTIFIER": TokenType.UNIQUEIDENTIFIER, 476 "UPDATE STATISTICS": TokenType.COMMAND, 477 "XML": TokenType.XML, 478 } 479 480 COMMANDS = {*tokens.Tokenizer.COMMANDS, TokenType.END} 481 482 class Parser(parser.Parser): 483 SET_REQUIRES_ASSIGNMENT_DELIMITER = False 484 LOG_DEFAULTS_TO_LN = True 485 ALTER_TABLE_ADD_REQUIRED_FOR_EACH_COLUMN = False 486 STRING_ALIASES = True 487 NO_PAREN_IF_COMMANDS = False 488 489 QUERY_MODIFIER_PARSERS = { 490 **parser.Parser.QUERY_MODIFIER_PARSERS, 491 TokenType.OPTION: lambda self: ("options", self._parse_options()), 492 } 493 494 FUNCTIONS = { 495 **parser.Parser.FUNCTIONS, 496 "CHARINDEX": lambda args: exp.StrPosition( 497 this=seq_get(args, 1), 498 substr=seq_get(args, 0), 499 position=seq_get(args, 2), 500 ), 501 "DATEADD": build_date_delta(exp.DateAdd, unit_mapping=DATE_DELTA_INTERVAL), 502 "DATEDIFF": _build_date_delta(exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL), 503 "DATENAME": _build_formatted_time(exp.TimeToStr, full_format_mapping=True), 504 "DATEPART": _build_formatted_time(exp.TimeToStr), 505 "DATETIMEFROMPARTS": _build_datetimefromparts, 506 "EOMONTH": _build_eomonth, 507 "FORMAT": _build_format, 508 "GETDATE": exp.CurrentTimestamp.from_arg_list, 509 "HASHBYTES": _build_hashbytes, 510 "ISNULL": exp.Coalesce.from_arg_list, 511 "JSON_QUERY": parser.build_extract_json_with_path(exp.JSONExtract), 512 "JSON_VALUE": parser.build_extract_json_with_path(exp.JSONExtractScalar), 513 "LEN": _build_with_arg_as_text(exp.Length), 514 "LEFT": _build_with_arg_as_text(exp.Left), 515 "RIGHT": _build_with_arg_as_text(exp.Right), 516 "REPLICATE": exp.Repeat.from_arg_list, 517 "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)), 518 "SYSDATETIME": exp.CurrentTimestamp.from_arg_list, 519 "SUSER_NAME": exp.CurrentUser.from_arg_list, 520 "SUSER_SNAME": exp.CurrentUser.from_arg_list, 521 "SYSTEM_USER": exp.CurrentUser.from_arg_list, 522 "TIMEFROMPARTS": _build_timefromparts, 523 } 524 525 JOIN_HINTS = {"LOOP", "HASH", "MERGE", "REMOTE"} 526 527 RETURNS_TABLE_TOKENS = parser.Parser.ID_VAR_TOKENS - { 528 TokenType.TABLE, 529 *parser.Parser.TYPE_TOKENS, 530 } 531 532 STATEMENT_PARSERS = { 533 **parser.Parser.STATEMENT_PARSERS, 534 TokenType.DECLARE: lambda self: self._parse_declare(), 535 } 536 537 def _parse_options(self) -> t.Optional[t.List[exp.Expression]]: 538 if not self._match(TokenType.OPTION): 539 return None 540 541 def _parse_option() -> t.Optional[exp.Expression]: 542 option = self._parse_var_from_options(OPTIONS) 543 if not option: 544 return None 545 546 self._match(TokenType.EQ) 547 return self.expression( 548 exp.QueryOption, this=option, expression=self._parse_primary_or_var() 549 ) 550 551 return self._parse_wrapped_csv(_parse_option) 552 553 def _parse_projections(self) -> t.List[exp.Expression]: 554 """ 555 T-SQL supports the syntax alias = expression in the SELECT's projection list, 556 so we transform all parsed Selects to convert their EQ projections into Aliases. 557 558 See: https://learn.microsoft.com/en-us/sql/t-sql/queries/select-clause-transact-sql?view=sql-server-ver16#syntax 559 """ 560 return [ 561 ( 562 exp.alias_(projection.expression, projection.this.this, copy=False) 563 if isinstance(projection, exp.EQ) and isinstance(projection.this, exp.Column) 564 else projection 565 ) 566 for projection in super()._parse_projections() 567 ] 568 569 def _parse_commit_or_rollback(self) -> exp.Commit | exp.Rollback: 570 """Applies to SQL Server and Azure SQL Database 571 COMMIT [ { TRAN | TRANSACTION } 572 [ transaction_name | @tran_name_variable ] ] 573 [ WITH ( DELAYED_DURABILITY = { OFF | ON } ) ] 574 575 ROLLBACK { TRAN | TRANSACTION } 576 [ transaction_name | @tran_name_variable 577 | savepoint_name | @savepoint_variable ] 578 """ 579 rollback = self._prev.token_type == TokenType.ROLLBACK 580 581 self._match_texts(("TRAN", "TRANSACTION")) 582 this = self._parse_id_var() 583 584 if rollback: 585 return self.expression(exp.Rollback, this=this) 586 587 durability = None 588 if self._match_pair(TokenType.WITH, TokenType.L_PAREN): 589 self._match_text_seq("DELAYED_DURABILITY") 590 self._match(TokenType.EQ) 591 592 if self._match_text_seq("OFF"): 593 durability = False 594 else: 595 self._match(TokenType.ON) 596 durability = True 597 598 self._match_r_paren() 599 600 return self.expression(exp.Commit, this=this, durability=durability) 601 602 def _parse_transaction(self) -> exp.Transaction | exp.Command: 603 """Applies to SQL Server and Azure SQL Database 604 BEGIN { TRAN | TRANSACTION } 605 [ { transaction_name | @tran_name_variable } 606 [ WITH MARK [ 'description' ] ] 607 ] 608 """ 609 if self._match_texts(("TRAN", "TRANSACTION")): 610 transaction = self.expression(exp.Transaction, this=self._parse_id_var()) 611 if self._match_text_seq("WITH", "MARK"): 612 transaction.set("mark", self._parse_string()) 613 614 return transaction 615 616 return self._parse_as_command(self._prev) 617 618 def _parse_returns(self) -> exp.ReturnsProperty: 619 table = self._parse_id_var(any_token=False, tokens=self.RETURNS_TABLE_TOKENS) 620 returns = super()._parse_returns() 621 returns.set("table", table) 622 return returns 623 624 def _parse_convert( 625 self, strict: bool, safe: t.Optional[bool] = None 626 ) -> t.Optional[exp.Expression]: 627 this = self._parse_types() 628 self._match(TokenType.COMMA) 629 args = [this, *self._parse_csv(self._parse_conjunction)] 630 convert = exp.Convert.from_arg_list(args) 631 convert.set("safe", safe) 632 convert.set("strict", strict) 633 return convert 634 635 def _parse_user_defined_function( 636 self, kind: t.Optional[TokenType] = None 637 ) -> t.Optional[exp.Expression]: 638 this = super()._parse_user_defined_function(kind=kind) 639 640 if ( 641 kind == TokenType.FUNCTION 642 or isinstance(this, exp.UserDefinedFunction) 643 or self._match(TokenType.ALIAS, advance=False) 644 ): 645 return this 646 647 expressions = self._parse_csv(self._parse_function_parameter) 648 return self.expression(exp.UserDefinedFunction, this=this, expressions=expressions) 649 650 def _parse_id_var( 651 self, 652 any_token: bool = True, 653 tokens: t.Optional[t.Collection[TokenType]] = None, 654 ) -> t.Optional[exp.Expression]: 655 is_temporary = self._match(TokenType.HASH) 656 is_global = is_temporary and self._match(TokenType.HASH) 657 658 this = super()._parse_id_var(any_token=any_token, tokens=tokens) 659 if this: 660 if is_global: 661 this.set("global", True) 662 elif is_temporary: 663 this.set("temporary", True) 664 665 return this 666 667 def _parse_create(self) -> exp.Create | exp.Command: 668 create = super()._parse_create() 669 670 if isinstance(create, exp.Create): 671 table = create.this.this if isinstance(create.this, exp.Schema) else create.this 672 if isinstance(table, exp.Table) and table.this.args.get("temporary"): 673 if not create.args.get("properties"): 674 create.set("properties", exp.Properties(expressions=[])) 675 676 create.args["properties"].append("expressions", exp.TemporaryProperty()) 677 678 return create 679 680 def _parse_if(self) -> t.Optional[exp.Expression]: 681 index = self._index 682 683 if self._match_text_seq("OBJECT_ID"): 684 self._parse_wrapped_csv(self._parse_string) 685 if self._match_text_seq("IS", "NOT", "NULL") and self._match(TokenType.DROP): 686 return self._parse_drop(exists=True) 687 self._retreat(index) 688 689 return super()._parse_if() 690 691 def _parse_unique(self) -> exp.UniqueColumnConstraint: 692 if self._match_texts(("CLUSTERED", "NONCLUSTERED")): 693 this = self.CONSTRAINT_PARSERS[self._prev.text.upper()](self) 694 else: 695 this = self._parse_schema(self._parse_id_var(any_token=False)) 696 697 return self.expression(exp.UniqueColumnConstraint, this=this) 698 699 def _parse_partition(self) -> t.Optional[exp.Partition]: 700 if not self._match_text_seq("WITH", "(", "PARTITIONS"): 701 return None 702 703 def parse_range(): 704 low = self._parse_bitwise() 705 high = self._parse_bitwise() if self._match_text_seq("TO") else None 706 707 return ( 708 self.expression(exp.PartitionRange, this=low, expression=high) if high else low 709 ) 710 711 partition = self.expression( 712 exp.Partition, expressions=self._parse_wrapped_csv(parse_range) 713 ) 714 715 self._match_r_paren() 716 717 return partition 718 719 def _parse_declare(self) -> exp.Declare | exp.Command: 720 index = self._index 721 expressions = self._try_parse(partial(self._parse_csv, self._parse_declareitem)) 722 723 if not expressions or self._curr: 724 self._retreat(index) 725 return self._parse_as_command(self._prev) 726 727 return self.expression(exp.Declare, expressions=expressions) 728 729 def _parse_declareitem(self) -> t.Optional[exp.DeclareItem]: 730 var = self._parse_id_var() 731 if not var: 732 return None 733 734 value = None 735 self._match(TokenType.ALIAS) 736 if self._match(TokenType.TABLE): 737 data_type = self._parse_schema() 738 else: 739 data_type = self._parse_types() 740 if self._match(TokenType.EQ): 741 value = self._parse_bitwise() 742 743 return self.expression(exp.DeclareItem, this=var, kind=data_type, default=value) 744 745 class Generator(generator.Generator): 746 LIMIT_IS_TOP = True 747 QUERY_HINTS = False 748 RETURNING_END = False 749 NVL2_SUPPORTED = False 750 ALTER_TABLE_INCLUDE_COLUMN_KEYWORD = False 751 LIMIT_FETCH = "FETCH" 752 COMPUTED_COLUMN_WITH_TYPE = False 753 CTE_RECURSIVE_KEYWORD_REQUIRED = False 754 ENSURE_BOOLS = True 755 NULL_ORDERING_SUPPORTED = None 756 SUPPORTS_SINGLE_ARG_CONCAT = False 757 TABLESAMPLE_SEED_KEYWORD = "REPEATABLE" 758 SUPPORTS_SELECT_INTO = True 759 JSON_PATH_BRACKETED_KEY_SUPPORTED = False 760 SUPPORTS_TO_NUMBER = False 761 OUTER_UNION_MODIFIERS = False 762 COPY_PARAMS_EQ_REQUIRED = True 763 764 EXPRESSIONS_WITHOUT_NESTED_CTES = { 765 exp.Delete, 766 exp.Insert, 767 exp.Merge, 768 exp.Select, 769 exp.Subquery, 770 exp.Union, 771 exp.Update, 772 } 773 774 SUPPORTED_JSON_PATH_PARTS = { 775 exp.JSONPathKey, 776 exp.JSONPathRoot, 777 exp.JSONPathSubscript, 778 } 779 780 TYPE_MAPPING = { 781 **generator.Generator.TYPE_MAPPING, 782 exp.DataType.Type.BOOLEAN: "BIT", 783 exp.DataType.Type.DECIMAL: "NUMERIC", 784 exp.DataType.Type.DATETIME: "DATETIME2", 785 exp.DataType.Type.DOUBLE: "FLOAT", 786 exp.DataType.Type.INT: "INTEGER", 787 exp.DataType.Type.ROWVERSION: "ROWVERSION", 788 exp.DataType.Type.TEXT: "VARCHAR(MAX)", 789 exp.DataType.Type.TIMESTAMP: "DATETIME2", 790 exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET", 791 exp.DataType.Type.UTINYINT: "TINYINT", 792 exp.DataType.Type.VARIANT: "SQL_VARIANT", 793 } 794 795 TYPE_MAPPING.pop(exp.DataType.Type.NCHAR) 796 TYPE_MAPPING.pop(exp.DataType.Type.NVARCHAR) 797 798 TRANSFORMS = { 799 **generator.Generator.TRANSFORMS, 800 exp.AnyValue: any_value_to_max_sql, 801 exp.ArrayToString: rename_func("STRING_AGG"), 802 exp.AutoIncrementColumnConstraint: lambda *_: "IDENTITY", 803 exp.DateAdd: date_delta_sql("DATEADD"), 804 exp.DateDiff: date_delta_sql("DATEDIFF"), 805 exp.CTE: transforms.preprocess([qualify_derived_table_outputs]), 806 exp.CurrentDate: rename_func("GETDATE"), 807 exp.CurrentTimestamp: rename_func("GETDATE"), 808 exp.DateStrToDate: datestrtodate_sql, 809 exp.Extract: rename_func("DATEPART"), 810 exp.GeneratedAsIdentityColumnConstraint: generatedasidentitycolumnconstraint_sql, 811 exp.GroupConcat: _string_agg_sql, 812 exp.If: rename_func("IIF"), 813 exp.JSONExtract: _json_extract_sql, 814 exp.JSONExtractScalar: _json_extract_sql, 815 exp.LastDay: lambda self, e: self.func("EOMONTH", e.this), 816 exp.Max: max_or_greatest, 817 exp.MD5: lambda self, e: self.func("HASHBYTES", exp.Literal.string("MD5"), e.this), 818 exp.Min: min_or_least, 819 exp.NumberToStr: _format_sql, 820 exp.ParseJSON: lambda self, e: self.sql(e, "this"), 821 exp.Select: transforms.preprocess( 822 [ 823 transforms.eliminate_distinct_on, 824 transforms.eliminate_semi_and_anti_joins, 825 transforms.eliminate_qualify, 826 ] 827 ), 828 exp.StrPosition: lambda self, e: self.func( 829 "CHARINDEX", e.args.get("substr"), e.this, e.args.get("position") 830 ), 831 exp.Subquery: transforms.preprocess([qualify_derived_table_outputs]), 832 exp.SHA: lambda self, e: self.func("HASHBYTES", exp.Literal.string("SHA1"), e.this), 833 exp.SHA2: lambda self, e: self.func( 834 "HASHBYTES", exp.Literal.string(f"SHA2_{e.args.get('length', 256)}"), e.this 835 ), 836 exp.TemporaryProperty: lambda self, e: "", 837 exp.TimeStrToTime: timestrtotime_sql, 838 exp.TimeToStr: _format_sql, 839 exp.Trim: trim_sql, 840 exp.TsOrDsAdd: date_delta_sql("DATEADD", cast=True), 841 exp.TsOrDsDiff: date_delta_sql("DATEDIFF"), 842 } 843 844 TRANSFORMS.pop(exp.ReturnsProperty) 845 846 PROPERTIES_LOCATION = { 847 **generator.Generator.PROPERTIES_LOCATION, 848 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 849 } 850 851 def select_sql(self, expression: exp.Select) -> str: 852 if expression.args.get("offset"): 853 if not expression.args.get("order"): 854 # ORDER BY is required in order to use OFFSET in a query, so we use 855 # a noop order by, since we don't really care about the order. 856 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 857 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 858 859 limit = expression.args.get("limit") 860 if isinstance(limit, exp.Limit): 861 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 862 # we replace here because otherwise TOP would be generated in select_sql 863 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 864 865 return super().select_sql(expression) 866 867 def convert_sql(self, expression: exp.Convert) -> str: 868 name = "TRY_CONVERT" if expression.args.get("safe") else "CONVERT" 869 return self.func( 870 name, expression.this, expression.expression, expression.args.get("style") 871 ) 872 873 def queryoption_sql(self, expression: exp.QueryOption) -> str: 874 option = self.sql(expression, "this") 875 value = self.sql(expression, "expression") 876 if value: 877 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 878 return f"{option} {optional_equal_sign}{value}" 879 return option 880 881 def lateral_op(self, expression: exp.Lateral) -> str: 882 cross_apply = expression.args.get("cross_apply") 883 if cross_apply is True: 884 return "CROSS APPLY" 885 if cross_apply is False: 886 return "OUTER APPLY" 887 888 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 889 self.unsupported("LATERAL clause is not supported.") 890 return "LATERAL" 891 892 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 893 nano = expression.args.get("nano") 894 if nano is not None: 895 nano.pop() 896 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 897 898 if expression.args.get("fractions") is None: 899 expression.set("fractions", exp.Literal.number(0)) 900 if expression.args.get("precision") is None: 901 expression.set("precision", exp.Literal.number(0)) 902 903 return rename_func("TIMEFROMPARTS")(self, expression) 904 905 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 906 zone = expression.args.get("zone") 907 if zone is not None: 908 zone.pop() 909 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 910 911 nano = expression.args.get("nano") 912 if nano is not None: 913 nano.pop() 914 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 915 916 if expression.args.get("milli") is None: 917 expression.set("milli", exp.Literal.number(0)) 918 919 return rename_func("DATETIMEFROMPARTS")(self, expression) 920 921 def setitem_sql(self, expression: exp.SetItem) -> str: 922 this = expression.this 923 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 924 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 925 return f"{self.sql(this.left)} {self.sql(this.right)}" 926 927 return super().setitem_sql(expression) 928 929 def boolean_sql(self, expression: exp.Boolean) -> str: 930 if type(expression.parent) in BIT_TYPES: 931 return "1" if expression.this else "0" 932 933 return "(1 = 1)" if expression.this else "(1 = 0)" 934 935 def is_sql(self, expression: exp.Is) -> str: 936 if isinstance(expression.expression, exp.Boolean): 937 return self.binary(expression, "=") 938 return self.binary(expression, "IS") 939 940 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 941 sql = self.sql(expression, "this") 942 properties = expression.args.get("properties") 943 944 if sql[:1] != "#" and any( 945 isinstance(prop, exp.TemporaryProperty) 946 for prop in (properties.expressions if properties else []) 947 ): 948 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 949 950 return sql 951 952 def create_sql(self, expression: exp.Create) -> str: 953 kind = expression.kind 954 exists = expression.args.pop("exists", None) 955 956 if kind == "VIEW": 957 expression.this.set("catalog", None) 958 959 sql = super().create_sql(expression) 960 961 like_property = expression.find(exp.LikeProperty) 962 if like_property: 963 ctas_expression = like_property.this 964 else: 965 ctas_expression = expression.expression 966 967 table = expression.find(exp.Table) 968 969 # Convert CTAS statement to SELECT .. INTO .. 970 if kind == "TABLE" and ctas_expression: 971 ctas_with = ctas_expression.args.get("with") 972 if ctas_with: 973 ctas_with = ctas_with.pop() 974 975 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 976 ctas_expression = ctas_expression.subquery() 977 978 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 979 select_into.set("into", exp.Into(this=table)) 980 select_into.set("with", ctas_with) 981 982 if like_property: 983 select_into.limit(0, copy=False) 984 985 sql = self.sql(select_into) 986 987 if exists: 988 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 989 sql = self.sql(exp.Literal.string(sql)) 990 if kind == "SCHEMA": 991 sql = f"""IF NOT EXISTS (SELECT * FROM information_schema.schemata WHERE schema_name = {identifier}) EXEC({sql})""" 992 elif kind == "TABLE": 993 assert table 994 where = exp.and_( 995 exp.column("table_name").eq(table.name), 996 exp.column("table_schema").eq(table.db) if table.db else None, 997 exp.column("table_catalog").eq(table.catalog) if table.catalog else None, 998 ) 999 sql = f"""IF NOT EXISTS (SELECT * FROM information_schema.tables WHERE {where}) EXEC({sql})""" 1000 elif kind == "INDEX": 1001 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1002 sql = f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql})""" 1003 elif expression.args.get("replace"): 1004 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1005 1006 return self.prepend_ctes(expression, sql) 1007 1008 def offset_sql(self, expression: exp.Offset) -> str: 1009 return f"{super().offset_sql(expression)} ROWS" 1010 1011 def version_sql(self, expression: exp.Version) -> str: 1012 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1013 this = f"FOR {name}" 1014 expr = expression.expression 1015 kind = expression.text("kind") 1016 if kind in ("FROM", "BETWEEN"): 1017 args = expr.expressions 1018 sep = "TO" if kind == "FROM" else "AND" 1019 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1020 else: 1021 expr_sql = self.sql(expr) 1022 1023 expr_sql = f" {expr_sql}" if expr_sql else "" 1024 return f"{this} {kind}{expr_sql}" 1025 1026 def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str: 1027 table = expression.args.get("table") 1028 table = f"{table} " if table else "" 1029 return f"RETURNS {table}{self.sql(expression, 'this')}" 1030 1031 def returning_sql(self, expression: exp.Returning) -> str: 1032 into = self.sql(expression, "into") 1033 into = self.seg(f"INTO {into}") if into else "" 1034 return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}" 1035 1036 def transaction_sql(self, expression: exp.Transaction) -> str: 1037 this = self.sql(expression, "this") 1038 this = f" {this}" if this else "" 1039 mark = self.sql(expression, "mark") 1040 mark = f" WITH MARK {mark}" if mark else "" 1041 return f"BEGIN TRANSACTION{this}{mark}" 1042 1043 def commit_sql(self, expression: exp.Commit) -> str: 1044 this = self.sql(expression, "this") 1045 this = f" {this}" if this else "" 1046 durability = expression.args.get("durability") 1047 durability = ( 1048 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1049 if durability is not None 1050 else "" 1051 ) 1052 return f"COMMIT TRANSACTION{this}{durability}" 1053 1054 def rollback_sql(self, expression: exp.Rollback) -> str: 1055 this = self.sql(expression, "this") 1056 this = f" {this}" if this else "" 1057 return f"ROLLBACK TRANSACTION{this}" 1058 1059 def identifier_sql(self, expression: exp.Identifier) -> str: 1060 identifier = super().identifier_sql(expression) 1061 1062 if expression.args.get("global"): 1063 identifier = f"##{identifier}" 1064 elif expression.args.get("temporary"): 1065 identifier = f"#{identifier}" 1066 1067 return identifier 1068 1069 def constraint_sql(self, expression: exp.Constraint) -> str: 1070 this = self.sql(expression, "this") 1071 expressions = self.expressions(expression, flat=True, sep=" ") 1072 return f"CONSTRAINT {this} {expressions}" 1073 1074 def length_sql(self, expression: exp.Length) -> str: 1075 return self._uncast_text(expression, "LEN") 1076 1077 def right_sql(self, expression: exp.Right) -> str: 1078 return self._uncast_text(expression, "RIGHT") 1079 1080 def left_sql(self, expression: exp.Left) -> str: 1081 return self._uncast_text(expression, "LEFT") 1082 1083 def _uncast_text(self, expression: exp.Expression, name: str) -> str: 1084 this = expression.this 1085 if isinstance(this, exp.Cast) and this.is_type(exp.DataType.Type.TEXT): 1086 this_sql = self.sql(this, "this") 1087 else: 1088 this_sql = self.sql(this) 1089 expression_sql = self.sql(expression, "expression") 1090 return self.func(name, this_sql, expression_sql if expression_sql else None) 1091 1092 def partition_sql(self, expression: exp.Partition) -> str: 1093 return f"WITH (PARTITIONS({self.expressions(expression, flat=True)}))" 1094 1095 def altertable_sql(self, expression: exp.AlterTable) -> str: 1096 action = seq_get(expression.args.get("actions") or [], 0) 1097 if isinstance(action, exp.RenameTable): 1098 return f"EXEC sp_rename '{self.sql(expression.this)}', '{action.this.name}'" 1099 return super().altertable_sql(expression) 1100 1101 def drop_sql(self, expression: exp.Drop) -> str: 1102 if expression.args["kind"] == "VIEW": 1103 expression.this.set("catalog", None) 1104 return super().drop_sql(expression) 1105 1106 def declare_sql(self, expression: exp.Declare) -> str: 1107 return f"DECLARE {self.expressions(expression, flat=True)}" 1108 1109 def declareitem_sql(self, expression: exp.DeclareItem) -> str: 1110 variable = self.sql(expression, "this") 1111 default = self.sql(expression, "default") 1112 default = f" = {default}" if default else "" 1113 1114 kind = self.sql(expression, "kind") 1115 if isinstance(expression.args.get("kind"), exp.Schema): 1116 kind = f"TABLE {kind}" 1117 1118 return f"{variable} AS {kind}{default}"
251def qualify_derived_table_outputs(expression: exp.Expression) -> exp.Expression: 252 """Ensures all (unnamed) output columns are aliased for CTEs and Subqueries.""" 253 alias = expression.args.get("alias") 254 255 if ( 256 isinstance(expression, (exp.CTE, exp.Subquery)) 257 and isinstance(alias, exp.TableAlias) 258 and not alias.columns 259 ): 260 from sqlglot.optimizer.qualify_columns import qualify_outputs 261 262 # We keep track of the unaliased column projection indexes instead of the expressions 263 # themselves, because the latter are going to be replaced by new nodes when the aliases 264 # are added and hence we won't be able to reach these newly added Alias parents 265 query = expression.this 266 unaliased_column_indexes = ( 267 i for i, c in enumerate(query.selects) if isinstance(c, exp.Column) and not c.alias 268 ) 269 270 qualify_outputs(query) 271 272 # Preserve the quoting information of columns for newly added Alias nodes 273 query_selects = query.selects 274 for select_index in unaliased_column_indexes: 275 alias = query_selects[select_index] 276 column = alias.this 277 if isinstance(column.this, exp.Identifier): 278 alias.args["alias"].set("quoted", column.this.quoted) 279 280 return expression
Ensures all (unnamed) output columns are aliased for CTEs and Subqueries.
335class TSQL(Dialect): 336 NORMALIZATION_STRATEGY = NormalizationStrategy.CASE_INSENSITIVE 337 TIME_FORMAT = "'yyyy-mm-dd hh:mm:ss'" 338 SUPPORTS_SEMI_ANTI_JOIN = False 339 LOG_BASE_FIRST = False 340 TYPED_DIVISION = True 341 CONCAT_COALESCE = True 342 343 TIME_MAPPING = { 344 "year": "%Y", 345 "dayofyear": "%j", 346 "day": "%d", 347 "dy": "%d", 348 "y": "%Y", 349 "week": "%W", 350 "ww": "%W", 351 "wk": "%W", 352 "hour": "%h", 353 "hh": "%I", 354 "minute": "%M", 355 "mi": "%M", 356 "n": "%M", 357 "second": "%S", 358 "ss": "%S", 359 "s": "%-S", 360 "millisecond": "%f", 361 "ms": "%f", 362 "weekday": "%W", 363 "dw": "%W", 364 "month": "%m", 365 "mm": "%M", 366 "m": "%-M", 367 "Y": "%Y", 368 "YYYY": "%Y", 369 "YY": "%y", 370 "MMMM": "%B", 371 "MMM": "%b", 372 "MM": "%m", 373 "M": "%-m", 374 "dddd": "%A", 375 "dd": "%d", 376 "d": "%-d", 377 "HH": "%H", 378 "H": "%-H", 379 "h": "%-I", 380 "S": "%f", 381 "yyyy": "%Y", 382 "yy": "%y", 383 } 384 385 CONVERT_FORMAT_MAPPING = { 386 "0": "%b %d %Y %-I:%M%p", 387 "1": "%m/%d/%y", 388 "2": "%y.%m.%d", 389 "3": "%d/%m/%y", 390 "4": "%d.%m.%y", 391 "5": "%d-%m-%y", 392 "6": "%d %b %y", 393 "7": "%b %d, %y", 394 "8": "%H:%M:%S", 395 "9": "%b %d %Y %-I:%M:%S:%f%p", 396 "10": "mm-dd-yy", 397 "11": "yy/mm/dd", 398 "12": "yymmdd", 399 "13": "%d %b %Y %H:%M:ss:%f", 400 "14": "%H:%M:%S:%f", 401 "20": "%Y-%m-%d %H:%M:%S", 402 "21": "%Y-%m-%d %H:%M:%S.%f", 403 "22": "%m/%d/%y %-I:%M:%S %p", 404 "23": "%Y-%m-%d", 405 "24": "%H:%M:%S", 406 "25": "%Y-%m-%d %H:%M:%S.%f", 407 "100": "%b %d %Y %-I:%M%p", 408 "101": "%m/%d/%Y", 409 "102": "%Y.%m.%d", 410 "103": "%d/%m/%Y", 411 "104": "%d.%m.%Y", 412 "105": "%d-%m-%Y", 413 "106": "%d %b %Y", 414 "107": "%b %d, %Y", 415 "108": "%H:%M:%S", 416 "109": "%b %d %Y %-I:%M:%S:%f%p", 417 "110": "%m-%d-%Y", 418 "111": "%Y/%m/%d", 419 "112": "%Y%m%d", 420 "113": "%d %b %Y %H:%M:%S:%f", 421 "114": "%H:%M:%S:%f", 422 "120": "%Y-%m-%d %H:%M:%S", 423 "121": "%Y-%m-%d %H:%M:%S.%f", 424 } 425 426 FORMAT_TIME_MAPPING = { 427 "y": "%B %Y", 428 "d": "%m/%d/%Y", 429 "H": "%-H", 430 "h": "%-I", 431 "s": "%Y-%m-%d %H:%M:%S", 432 "D": "%A,%B,%Y", 433 "f": "%A,%B,%Y %-I:%M %p", 434 "F": "%A,%B,%Y %-I:%M:%S %p", 435 "g": "%m/%d/%Y %-I:%M %p", 436 "G": "%m/%d/%Y %-I:%M:%S %p", 437 "M": "%B %-d", 438 "m": "%B %-d", 439 "O": "%Y-%m-%dT%H:%M:%S", 440 "u": "%Y-%M-%D %H:%M:%S%z", 441 "U": "%A, %B %D, %Y %H:%M:%S%z", 442 "T": "%-I:%M:%S %p", 443 "t": "%-I:%M", 444 "Y": "%a %Y", 445 } 446 447 class Tokenizer(tokens.Tokenizer): 448 IDENTIFIERS = [("[", "]"), '"'] 449 QUOTES = ["'", '"'] 450 HEX_STRINGS = [("0x", ""), ("0X", "")] 451 VAR_SINGLE_TOKENS = {"@", "$", "#"} 452 453 KEYWORDS = { 454 **tokens.Tokenizer.KEYWORDS, 455 "DATETIME2": TokenType.DATETIME, 456 "DATETIMEOFFSET": TokenType.TIMESTAMPTZ, 457 "DECLARE": TokenType.DECLARE, 458 "EXEC": TokenType.COMMAND, 459 "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT, 460 "IMAGE": TokenType.IMAGE, 461 "MONEY": TokenType.MONEY, 462 "NTEXT": TokenType.TEXT, 463 "OPTION": TokenType.OPTION, 464 "OUTPUT": TokenType.RETURNING, 465 "PRINT": TokenType.COMMAND, 466 "PROC": TokenType.PROCEDURE, 467 "REAL": TokenType.FLOAT, 468 "ROWVERSION": TokenType.ROWVERSION, 469 "SMALLDATETIME": TokenType.DATETIME, 470 "SMALLMONEY": TokenType.SMALLMONEY, 471 "SQL_VARIANT": TokenType.VARIANT, 472 "SYSTEM_USER": TokenType.CURRENT_USER, 473 "TOP": TokenType.TOP, 474 "TIMESTAMP": TokenType.ROWVERSION, 475 "TINYINT": TokenType.UTINYINT, 476 "UNIQUEIDENTIFIER": TokenType.UNIQUEIDENTIFIER, 477 "UPDATE STATISTICS": TokenType.COMMAND, 478 "XML": TokenType.XML, 479 } 480 481 COMMANDS = {*tokens.Tokenizer.COMMANDS, TokenType.END} 482 483 class Parser(parser.Parser): 484 SET_REQUIRES_ASSIGNMENT_DELIMITER = False 485 LOG_DEFAULTS_TO_LN = True 486 ALTER_TABLE_ADD_REQUIRED_FOR_EACH_COLUMN = False 487 STRING_ALIASES = True 488 NO_PAREN_IF_COMMANDS = False 489 490 QUERY_MODIFIER_PARSERS = { 491 **parser.Parser.QUERY_MODIFIER_PARSERS, 492 TokenType.OPTION: lambda self: ("options", self._parse_options()), 493 } 494 495 FUNCTIONS = { 496 **parser.Parser.FUNCTIONS, 497 "CHARINDEX": lambda args: exp.StrPosition( 498 this=seq_get(args, 1), 499 substr=seq_get(args, 0), 500 position=seq_get(args, 2), 501 ), 502 "DATEADD": build_date_delta(exp.DateAdd, unit_mapping=DATE_DELTA_INTERVAL), 503 "DATEDIFF": _build_date_delta(exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL), 504 "DATENAME": _build_formatted_time(exp.TimeToStr, full_format_mapping=True), 505 "DATEPART": _build_formatted_time(exp.TimeToStr), 506 "DATETIMEFROMPARTS": _build_datetimefromparts, 507 "EOMONTH": _build_eomonth, 508 "FORMAT": _build_format, 509 "GETDATE": exp.CurrentTimestamp.from_arg_list, 510 "HASHBYTES": _build_hashbytes, 511 "ISNULL": exp.Coalesce.from_arg_list, 512 "JSON_QUERY": parser.build_extract_json_with_path(exp.JSONExtract), 513 "JSON_VALUE": parser.build_extract_json_with_path(exp.JSONExtractScalar), 514 "LEN": _build_with_arg_as_text(exp.Length), 515 "LEFT": _build_with_arg_as_text(exp.Left), 516 "RIGHT": _build_with_arg_as_text(exp.Right), 517 "REPLICATE": exp.Repeat.from_arg_list, 518 "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)), 519 "SYSDATETIME": exp.CurrentTimestamp.from_arg_list, 520 "SUSER_NAME": exp.CurrentUser.from_arg_list, 521 "SUSER_SNAME": exp.CurrentUser.from_arg_list, 522 "SYSTEM_USER": exp.CurrentUser.from_arg_list, 523 "TIMEFROMPARTS": _build_timefromparts, 524 } 525 526 JOIN_HINTS = {"LOOP", "HASH", "MERGE", "REMOTE"} 527 528 RETURNS_TABLE_TOKENS = parser.Parser.ID_VAR_TOKENS - { 529 TokenType.TABLE, 530 *parser.Parser.TYPE_TOKENS, 531 } 532 533 STATEMENT_PARSERS = { 534 **parser.Parser.STATEMENT_PARSERS, 535 TokenType.DECLARE: lambda self: self._parse_declare(), 536 } 537 538 def _parse_options(self) -> t.Optional[t.List[exp.Expression]]: 539 if not self._match(TokenType.OPTION): 540 return None 541 542 def _parse_option() -> t.Optional[exp.Expression]: 543 option = self._parse_var_from_options(OPTIONS) 544 if not option: 545 return None 546 547 self._match(TokenType.EQ) 548 return self.expression( 549 exp.QueryOption, this=option, expression=self._parse_primary_or_var() 550 ) 551 552 return self._parse_wrapped_csv(_parse_option) 553 554 def _parse_projections(self) -> t.List[exp.Expression]: 555 """ 556 T-SQL supports the syntax alias = expression in the SELECT's projection list, 557 so we transform all parsed Selects to convert their EQ projections into Aliases. 558 559 See: https://learn.microsoft.com/en-us/sql/t-sql/queries/select-clause-transact-sql?view=sql-server-ver16#syntax 560 """ 561 return [ 562 ( 563 exp.alias_(projection.expression, projection.this.this, copy=False) 564 if isinstance(projection, exp.EQ) and isinstance(projection.this, exp.Column) 565 else projection 566 ) 567 for projection in super()._parse_projections() 568 ] 569 570 def _parse_commit_or_rollback(self) -> exp.Commit | exp.Rollback: 571 """Applies to SQL Server and Azure SQL Database 572 COMMIT [ { TRAN | TRANSACTION } 573 [ transaction_name | @tran_name_variable ] ] 574 [ WITH ( DELAYED_DURABILITY = { OFF | ON } ) ] 575 576 ROLLBACK { TRAN | TRANSACTION } 577 [ transaction_name | @tran_name_variable 578 | savepoint_name | @savepoint_variable ] 579 """ 580 rollback = self._prev.token_type == TokenType.ROLLBACK 581 582 self._match_texts(("TRAN", "TRANSACTION")) 583 this = self._parse_id_var() 584 585 if rollback: 586 return self.expression(exp.Rollback, this=this) 587 588 durability = None 589 if self._match_pair(TokenType.WITH, TokenType.L_PAREN): 590 self._match_text_seq("DELAYED_DURABILITY") 591 self._match(TokenType.EQ) 592 593 if self._match_text_seq("OFF"): 594 durability = False 595 else: 596 self._match(TokenType.ON) 597 durability = True 598 599 self._match_r_paren() 600 601 return self.expression(exp.Commit, this=this, durability=durability) 602 603 def _parse_transaction(self) -> exp.Transaction | exp.Command: 604 """Applies to SQL Server and Azure SQL Database 605 BEGIN { TRAN | TRANSACTION } 606 [ { transaction_name | @tran_name_variable } 607 [ WITH MARK [ 'description' ] ] 608 ] 609 """ 610 if self._match_texts(("TRAN", "TRANSACTION")): 611 transaction = self.expression(exp.Transaction, this=self._parse_id_var()) 612 if self._match_text_seq("WITH", "MARK"): 613 transaction.set("mark", self._parse_string()) 614 615 return transaction 616 617 return self._parse_as_command(self._prev) 618 619 def _parse_returns(self) -> exp.ReturnsProperty: 620 table = self._parse_id_var(any_token=False, tokens=self.RETURNS_TABLE_TOKENS) 621 returns = super()._parse_returns() 622 returns.set("table", table) 623 return returns 624 625 def _parse_convert( 626 self, strict: bool, safe: t.Optional[bool] = None 627 ) -> t.Optional[exp.Expression]: 628 this = self._parse_types() 629 self._match(TokenType.COMMA) 630 args = [this, *self._parse_csv(self._parse_conjunction)] 631 convert = exp.Convert.from_arg_list(args) 632 convert.set("safe", safe) 633 convert.set("strict", strict) 634 return convert 635 636 def _parse_user_defined_function( 637 self, kind: t.Optional[TokenType] = None 638 ) -> t.Optional[exp.Expression]: 639 this = super()._parse_user_defined_function(kind=kind) 640 641 if ( 642 kind == TokenType.FUNCTION 643 or isinstance(this, exp.UserDefinedFunction) 644 or self._match(TokenType.ALIAS, advance=False) 645 ): 646 return this 647 648 expressions = self._parse_csv(self._parse_function_parameter) 649 return self.expression(exp.UserDefinedFunction, this=this, expressions=expressions) 650 651 def _parse_id_var( 652 self, 653 any_token: bool = True, 654 tokens: t.Optional[t.Collection[TokenType]] = None, 655 ) -> t.Optional[exp.Expression]: 656 is_temporary = self._match(TokenType.HASH) 657 is_global = is_temporary and self._match(TokenType.HASH) 658 659 this = super()._parse_id_var(any_token=any_token, tokens=tokens) 660 if this: 661 if is_global: 662 this.set("global", True) 663 elif is_temporary: 664 this.set("temporary", True) 665 666 return this 667 668 def _parse_create(self) -> exp.Create | exp.Command: 669 create = super()._parse_create() 670 671 if isinstance(create, exp.Create): 672 table = create.this.this if isinstance(create.this, exp.Schema) else create.this 673 if isinstance(table, exp.Table) and table.this.args.get("temporary"): 674 if not create.args.get("properties"): 675 create.set("properties", exp.Properties(expressions=[])) 676 677 create.args["properties"].append("expressions", exp.TemporaryProperty()) 678 679 return create 680 681 def _parse_if(self) -> t.Optional[exp.Expression]: 682 index = self._index 683 684 if self._match_text_seq("OBJECT_ID"): 685 self._parse_wrapped_csv(self._parse_string) 686 if self._match_text_seq("IS", "NOT", "NULL") and self._match(TokenType.DROP): 687 return self._parse_drop(exists=True) 688 self._retreat(index) 689 690 return super()._parse_if() 691 692 def _parse_unique(self) -> exp.UniqueColumnConstraint: 693 if self._match_texts(("CLUSTERED", "NONCLUSTERED")): 694 this = self.CONSTRAINT_PARSERS[self._prev.text.upper()](self) 695 else: 696 this = self._parse_schema(self._parse_id_var(any_token=False)) 697 698 return self.expression(exp.UniqueColumnConstraint, this=this) 699 700 def _parse_partition(self) -> t.Optional[exp.Partition]: 701 if not self._match_text_seq("WITH", "(", "PARTITIONS"): 702 return None 703 704 def parse_range(): 705 low = self._parse_bitwise() 706 high = self._parse_bitwise() if self._match_text_seq("TO") else None 707 708 return ( 709 self.expression(exp.PartitionRange, this=low, expression=high) if high else low 710 ) 711 712 partition = self.expression( 713 exp.Partition, expressions=self._parse_wrapped_csv(parse_range) 714 ) 715 716 self._match_r_paren() 717 718 return partition 719 720 def _parse_declare(self) -> exp.Declare | exp.Command: 721 index = self._index 722 expressions = self._try_parse(partial(self._parse_csv, self._parse_declareitem)) 723 724 if not expressions or self._curr: 725 self._retreat(index) 726 return self._parse_as_command(self._prev) 727 728 return self.expression(exp.Declare, expressions=expressions) 729 730 def _parse_declareitem(self) -> t.Optional[exp.DeclareItem]: 731 var = self._parse_id_var() 732 if not var: 733 return None 734 735 value = None 736 self._match(TokenType.ALIAS) 737 if self._match(TokenType.TABLE): 738 data_type = self._parse_schema() 739 else: 740 data_type = self._parse_types() 741 if self._match(TokenType.EQ): 742 value = self._parse_bitwise() 743 744 return self.expression(exp.DeclareItem, this=var, kind=data_type, default=value) 745 746 class Generator(generator.Generator): 747 LIMIT_IS_TOP = True 748 QUERY_HINTS = False 749 RETURNING_END = False 750 NVL2_SUPPORTED = False 751 ALTER_TABLE_INCLUDE_COLUMN_KEYWORD = False 752 LIMIT_FETCH = "FETCH" 753 COMPUTED_COLUMN_WITH_TYPE = False 754 CTE_RECURSIVE_KEYWORD_REQUIRED = False 755 ENSURE_BOOLS = True 756 NULL_ORDERING_SUPPORTED = None 757 SUPPORTS_SINGLE_ARG_CONCAT = False 758 TABLESAMPLE_SEED_KEYWORD = "REPEATABLE" 759 SUPPORTS_SELECT_INTO = True 760 JSON_PATH_BRACKETED_KEY_SUPPORTED = False 761 SUPPORTS_TO_NUMBER = False 762 OUTER_UNION_MODIFIERS = False 763 COPY_PARAMS_EQ_REQUIRED = True 764 765 EXPRESSIONS_WITHOUT_NESTED_CTES = { 766 exp.Delete, 767 exp.Insert, 768 exp.Merge, 769 exp.Select, 770 exp.Subquery, 771 exp.Union, 772 exp.Update, 773 } 774 775 SUPPORTED_JSON_PATH_PARTS = { 776 exp.JSONPathKey, 777 exp.JSONPathRoot, 778 exp.JSONPathSubscript, 779 } 780 781 TYPE_MAPPING = { 782 **generator.Generator.TYPE_MAPPING, 783 exp.DataType.Type.BOOLEAN: "BIT", 784 exp.DataType.Type.DECIMAL: "NUMERIC", 785 exp.DataType.Type.DATETIME: "DATETIME2", 786 exp.DataType.Type.DOUBLE: "FLOAT", 787 exp.DataType.Type.INT: "INTEGER", 788 exp.DataType.Type.ROWVERSION: "ROWVERSION", 789 exp.DataType.Type.TEXT: "VARCHAR(MAX)", 790 exp.DataType.Type.TIMESTAMP: "DATETIME2", 791 exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET", 792 exp.DataType.Type.UTINYINT: "TINYINT", 793 exp.DataType.Type.VARIANT: "SQL_VARIANT", 794 } 795 796 TYPE_MAPPING.pop(exp.DataType.Type.NCHAR) 797 TYPE_MAPPING.pop(exp.DataType.Type.NVARCHAR) 798 799 TRANSFORMS = { 800 **generator.Generator.TRANSFORMS, 801 exp.AnyValue: any_value_to_max_sql, 802 exp.ArrayToString: rename_func("STRING_AGG"), 803 exp.AutoIncrementColumnConstraint: lambda *_: "IDENTITY", 804 exp.DateAdd: date_delta_sql("DATEADD"), 805 exp.DateDiff: date_delta_sql("DATEDIFF"), 806 exp.CTE: transforms.preprocess([qualify_derived_table_outputs]), 807 exp.CurrentDate: rename_func("GETDATE"), 808 exp.CurrentTimestamp: rename_func("GETDATE"), 809 exp.DateStrToDate: datestrtodate_sql, 810 exp.Extract: rename_func("DATEPART"), 811 exp.GeneratedAsIdentityColumnConstraint: generatedasidentitycolumnconstraint_sql, 812 exp.GroupConcat: _string_agg_sql, 813 exp.If: rename_func("IIF"), 814 exp.JSONExtract: _json_extract_sql, 815 exp.JSONExtractScalar: _json_extract_sql, 816 exp.LastDay: lambda self, e: self.func("EOMONTH", e.this), 817 exp.Max: max_or_greatest, 818 exp.MD5: lambda self, e: self.func("HASHBYTES", exp.Literal.string("MD5"), e.this), 819 exp.Min: min_or_least, 820 exp.NumberToStr: _format_sql, 821 exp.ParseJSON: lambda self, e: self.sql(e, "this"), 822 exp.Select: transforms.preprocess( 823 [ 824 transforms.eliminate_distinct_on, 825 transforms.eliminate_semi_and_anti_joins, 826 transforms.eliminate_qualify, 827 ] 828 ), 829 exp.StrPosition: lambda self, e: self.func( 830 "CHARINDEX", e.args.get("substr"), e.this, e.args.get("position") 831 ), 832 exp.Subquery: transforms.preprocess([qualify_derived_table_outputs]), 833 exp.SHA: lambda self, e: self.func("HASHBYTES", exp.Literal.string("SHA1"), e.this), 834 exp.SHA2: lambda self, e: self.func( 835 "HASHBYTES", exp.Literal.string(f"SHA2_{e.args.get('length', 256)}"), e.this 836 ), 837 exp.TemporaryProperty: lambda self, e: "", 838 exp.TimeStrToTime: timestrtotime_sql, 839 exp.TimeToStr: _format_sql, 840 exp.Trim: trim_sql, 841 exp.TsOrDsAdd: date_delta_sql("DATEADD", cast=True), 842 exp.TsOrDsDiff: date_delta_sql("DATEDIFF"), 843 } 844 845 TRANSFORMS.pop(exp.ReturnsProperty) 846 847 PROPERTIES_LOCATION = { 848 **generator.Generator.PROPERTIES_LOCATION, 849 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 850 } 851 852 def select_sql(self, expression: exp.Select) -> str: 853 if expression.args.get("offset"): 854 if not expression.args.get("order"): 855 # ORDER BY is required in order to use OFFSET in a query, so we use 856 # a noop order by, since we don't really care about the order. 857 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 858 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 859 860 limit = expression.args.get("limit") 861 if isinstance(limit, exp.Limit): 862 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 863 # we replace here because otherwise TOP would be generated in select_sql 864 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 865 866 return super().select_sql(expression) 867 868 def convert_sql(self, expression: exp.Convert) -> str: 869 name = "TRY_CONVERT" if expression.args.get("safe") else "CONVERT" 870 return self.func( 871 name, expression.this, expression.expression, expression.args.get("style") 872 ) 873 874 def queryoption_sql(self, expression: exp.QueryOption) -> str: 875 option = self.sql(expression, "this") 876 value = self.sql(expression, "expression") 877 if value: 878 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 879 return f"{option} {optional_equal_sign}{value}" 880 return option 881 882 def lateral_op(self, expression: exp.Lateral) -> str: 883 cross_apply = expression.args.get("cross_apply") 884 if cross_apply is True: 885 return "CROSS APPLY" 886 if cross_apply is False: 887 return "OUTER APPLY" 888 889 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 890 self.unsupported("LATERAL clause is not supported.") 891 return "LATERAL" 892 893 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 894 nano = expression.args.get("nano") 895 if nano is not None: 896 nano.pop() 897 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 898 899 if expression.args.get("fractions") is None: 900 expression.set("fractions", exp.Literal.number(0)) 901 if expression.args.get("precision") is None: 902 expression.set("precision", exp.Literal.number(0)) 903 904 return rename_func("TIMEFROMPARTS")(self, expression) 905 906 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 907 zone = expression.args.get("zone") 908 if zone is not None: 909 zone.pop() 910 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 911 912 nano = expression.args.get("nano") 913 if nano is not None: 914 nano.pop() 915 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 916 917 if expression.args.get("milli") is None: 918 expression.set("milli", exp.Literal.number(0)) 919 920 return rename_func("DATETIMEFROMPARTS")(self, expression) 921 922 def setitem_sql(self, expression: exp.SetItem) -> str: 923 this = expression.this 924 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 925 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 926 return f"{self.sql(this.left)} {self.sql(this.right)}" 927 928 return super().setitem_sql(expression) 929 930 def boolean_sql(self, expression: exp.Boolean) -> str: 931 if type(expression.parent) in BIT_TYPES: 932 return "1" if expression.this else "0" 933 934 return "(1 = 1)" if expression.this else "(1 = 0)" 935 936 def is_sql(self, expression: exp.Is) -> str: 937 if isinstance(expression.expression, exp.Boolean): 938 return self.binary(expression, "=") 939 return self.binary(expression, "IS") 940 941 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 942 sql = self.sql(expression, "this") 943 properties = expression.args.get("properties") 944 945 if sql[:1] != "#" and any( 946 isinstance(prop, exp.TemporaryProperty) 947 for prop in (properties.expressions if properties else []) 948 ): 949 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 950 951 return sql 952 953 def create_sql(self, expression: exp.Create) -> str: 954 kind = expression.kind 955 exists = expression.args.pop("exists", None) 956 957 if kind == "VIEW": 958 expression.this.set("catalog", None) 959 960 sql = super().create_sql(expression) 961 962 like_property = expression.find(exp.LikeProperty) 963 if like_property: 964 ctas_expression = like_property.this 965 else: 966 ctas_expression = expression.expression 967 968 table = expression.find(exp.Table) 969 970 # Convert CTAS statement to SELECT .. INTO .. 971 if kind == "TABLE" and ctas_expression: 972 ctas_with = ctas_expression.args.get("with") 973 if ctas_with: 974 ctas_with = ctas_with.pop() 975 976 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 977 ctas_expression = ctas_expression.subquery() 978 979 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 980 select_into.set("into", exp.Into(this=table)) 981 select_into.set("with", ctas_with) 982 983 if like_property: 984 select_into.limit(0, copy=False) 985 986 sql = self.sql(select_into) 987 988 if exists: 989 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 990 sql = self.sql(exp.Literal.string(sql)) 991 if kind == "SCHEMA": 992 sql = f"""IF NOT EXISTS (SELECT * FROM information_schema.schemata WHERE schema_name = {identifier}) EXEC({sql})""" 993 elif kind == "TABLE": 994 assert table 995 where = exp.and_( 996 exp.column("table_name").eq(table.name), 997 exp.column("table_schema").eq(table.db) if table.db else None, 998 exp.column("table_catalog").eq(table.catalog) if table.catalog else None, 999 ) 1000 sql = f"""IF NOT EXISTS (SELECT * FROM information_schema.tables WHERE {where}) EXEC({sql})""" 1001 elif kind == "INDEX": 1002 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1003 sql = f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql})""" 1004 elif expression.args.get("replace"): 1005 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1006 1007 return self.prepend_ctes(expression, sql) 1008 1009 def offset_sql(self, expression: exp.Offset) -> str: 1010 return f"{super().offset_sql(expression)} ROWS" 1011 1012 def version_sql(self, expression: exp.Version) -> str: 1013 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1014 this = f"FOR {name}" 1015 expr = expression.expression 1016 kind = expression.text("kind") 1017 if kind in ("FROM", "BETWEEN"): 1018 args = expr.expressions 1019 sep = "TO" if kind == "FROM" else "AND" 1020 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1021 else: 1022 expr_sql = self.sql(expr) 1023 1024 expr_sql = f" {expr_sql}" if expr_sql else "" 1025 return f"{this} {kind}{expr_sql}" 1026 1027 def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str: 1028 table = expression.args.get("table") 1029 table = f"{table} " if table else "" 1030 return f"RETURNS {table}{self.sql(expression, 'this')}" 1031 1032 def returning_sql(self, expression: exp.Returning) -> str: 1033 into = self.sql(expression, "into") 1034 into = self.seg(f"INTO {into}") if into else "" 1035 return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}" 1036 1037 def transaction_sql(self, expression: exp.Transaction) -> str: 1038 this = self.sql(expression, "this") 1039 this = f" {this}" if this else "" 1040 mark = self.sql(expression, "mark") 1041 mark = f" WITH MARK {mark}" if mark else "" 1042 return f"BEGIN TRANSACTION{this}{mark}" 1043 1044 def commit_sql(self, expression: exp.Commit) -> str: 1045 this = self.sql(expression, "this") 1046 this = f" {this}" if this else "" 1047 durability = expression.args.get("durability") 1048 durability = ( 1049 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1050 if durability is not None 1051 else "" 1052 ) 1053 return f"COMMIT TRANSACTION{this}{durability}" 1054 1055 def rollback_sql(self, expression: exp.Rollback) -> str: 1056 this = self.sql(expression, "this") 1057 this = f" {this}" if this else "" 1058 return f"ROLLBACK TRANSACTION{this}" 1059 1060 def identifier_sql(self, expression: exp.Identifier) -> str: 1061 identifier = super().identifier_sql(expression) 1062 1063 if expression.args.get("global"): 1064 identifier = f"##{identifier}" 1065 elif expression.args.get("temporary"): 1066 identifier = f"#{identifier}" 1067 1068 return identifier 1069 1070 def constraint_sql(self, expression: exp.Constraint) -> str: 1071 this = self.sql(expression, "this") 1072 expressions = self.expressions(expression, flat=True, sep=" ") 1073 return f"CONSTRAINT {this} {expressions}" 1074 1075 def length_sql(self, expression: exp.Length) -> str: 1076 return self._uncast_text(expression, "LEN") 1077 1078 def right_sql(self, expression: exp.Right) -> str: 1079 return self._uncast_text(expression, "RIGHT") 1080 1081 def left_sql(self, expression: exp.Left) -> str: 1082 return self._uncast_text(expression, "LEFT") 1083 1084 def _uncast_text(self, expression: exp.Expression, name: str) -> str: 1085 this = expression.this 1086 if isinstance(this, exp.Cast) and this.is_type(exp.DataType.Type.TEXT): 1087 this_sql = self.sql(this, "this") 1088 else: 1089 this_sql = self.sql(this) 1090 expression_sql = self.sql(expression, "expression") 1091 return self.func(name, this_sql, expression_sql if expression_sql else None) 1092 1093 def partition_sql(self, expression: exp.Partition) -> str: 1094 return f"WITH (PARTITIONS({self.expressions(expression, flat=True)}))" 1095 1096 def altertable_sql(self, expression: exp.AlterTable) -> str: 1097 action = seq_get(expression.args.get("actions") or [], 0) 1098 if isinstance(action, exp.RenameTable): 1099 return f"EXEC sp_rename '{self.sql(expression.this)}', '{action.this.name}'" 1100 return super().altertable_sql(expression) 1101 1102 def drop_sql(self, expression: exp.Drop) -> str: 1103 if expression.args["kind"] == "VIEW": 1104 expression.this.set("catalog", None) 1105 return super().drop_sql(expression) 1106 1107 def declare_sql(self, expression: exp.Declare) -> str: 1108 return f"DECLARE {self.expressions(expression, flat=True)}" 1109 1110 def declareitem_sql(self, expression: exp.DeclareItem) -> str: 1111 variable = self.sql(expression, "this") 1112 default = self.sql(expression, "default") 1113 default = f" = {default}" if default else "" 1114 1115 kind = self.sql(expression, "kind") 1116 if isinstance(expression.args.get("kind"), exp.Schema): 1117 kind = f"TABLE {kind}" 1118 1119 return f"{variable} AS {kind}{default}"
Specifies the strategy according to which identifiers should be normalized.
Whether the base comes first in the LOG
function.
Possible values: True
, False
, None
(two arguments are not supported by LOG
)
Whether the behavior of a / b
depends on the types of a
and b
.
False means a / b
is always float division.
True means a / b
is integer division if both a
and b
are integers.
A NULL
arg in CONCAT
yields NULL
by default, but in some dialects it yields an empty string.
Associates this dialect's time formats with their equivalent Python strftime
formats.
Inherited Members
- sqlglot.dialects.dialect.Dialect
- Dialect
- INDEX_OFFSET
- WEEK_OFFSET
- UNNEST_COLUMN_ONLY
- ALIAS_POST_TABLESAMPLE
- TABLESAMPLE_SIZE_IS_PERCENT
- IDENTIFIERS_CAN_START_WITH_DIGIT
- DPIPE_IS_STRING_CONCAT
- STRICT_STRING_CONCAT
- SUPPORTS_USER_DEFINED_TYPES
- NORMALIZE_FUNCTIONS
- NULL_ORDERING
- SAFE_DIVISION
- HEX_LOWERCASE
- DATE_FORMAT
- DATEINT_FORMAT
- FORMAT_MAPPING
- UNESCAPED_SEQUENCES
- PSEUDOCOLUMNS
- PREFER_CTE_ALIAS_COLUMN
- COPY_PARAMS_ARE_CSV
- get_or_raise
- format_time
- normalize_identifier
- case_sensitive
- can_identify
- quote_identifier
- to_json_path
- parse
- parse_into
- generate
- transpile
- tokenize
- tokenizer
- parser
- generator
447 class Tokenizer(tokens.Tokenizer): 448 IDENTIFIERS = [("[", "]"), '"'] 449 QUOTES = ["'", '"'] 450 HEX_STRINGS = [("0x", ""), ("0X", "")] 451 VAR_SINGLE_TOKENS = {"@", "$", "#"} 452 453 KEYWORDS = { 454 **tokens.Tokenizer.KEYWORDS, 455 "DATETIME2": TokenType.DATETIME, 456 "DATETIMEOFFSET": TokenType.TIMESTAMPTZ, 457 "DECLARE": TokenType.DECLARE, 458 "EXEC": TokenType.COMMAND, 459 "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT, 460 "IMAGE": TokenType.IMAGE, 461 "MONEY": TokenType.MONEY, 462 "NTEXT": TokenType.TEXT, 463 "OPTION": TokenType.OPTION, 464 "OUTPUT": TokenType.RETURNING, 465 "PRINT": TokenType.COMMAND, 466 "PROC": TokenType.PROCEDURE, 467 "REAL": TokenType.FLOAT, 468 "ROWVERSION": TokenType.ROWVERSION, 469 "SMALLDATETIME": TokenType.DATETIME, 470 "SMALLMONEY": TokenType.SMALLMONEY, 471 "SQL_VARIANT": TokenType.VARIANT, 472 "SYSTEM_USER": TokenType.CURRENT_USER, 473 "TOP": TokenType.TOP, 474 "TIMESTAMP": TokenType.ROWVERSION, 475 "TINYINT": TokenType.UTINYINT, 476 "UNIQUEIDENTIFIER": TokenType.UNIQUEIDENTIFIER, 477 "UPDATE STATISTICS": TokenType.COMMAND, 478 "XML": TokenType.XML, 479 } 480 481 COMMANDS = {*tokens.Tokenizer.COMMANDS, TokenType.END}
Inherited Members
- sqlglot.tokens.Tokenizer
- Tokenizer
- SINGLE_TOKENS
- BIT_STRINGS
- BYTE_STRINGS
- RAW_STRINGS
- HEREDOC_STRINGS
- UNICODE_STRINGS
- IDENTIFIER_ESCAPES
- STRING_ESCAPES
- HEREDOC_TAG_IS_IDENTIFIER
- HEREDOC_STRING_ALTERNATIVE
- WHITE_SPACE
- COMMAND_PREFIX_TOKENS
- NUMERIC_LITERALS
- COMMENTS
- dialect
- reset
- tokenize
- tokenize_rs
- size
- sql
- tokens
483 class Parser(parser.Parser): 484 SET_REQUIRES_ASSIGNMENT_DELIMITER = False 485 LOG_DEFAULTS_TO_LN = True 486 ALTER_TABLE_ADD_REQUIRED_FOR_EACH_COLUMN = False 487 STRING_ALIASES = True 488 NO_PAREN_IF_COMMANDS = False 489 490 QUERY_MODIFIER_PARSERS = { 491 **parser.Parser.QUERY_MODIFIER_PARSERS, 492 TokenType.OPTION: lambda self: ("options", self._parse_options()), 493 } 494 495 FUNCTIONS = { 496 **parser.Parser.FUNCTIONS, 497 "CHARINDEX": lambda args: exp.StrPosition( 498 this=seq_get(args, 1), 499 substr=seq_get(args, 0), 500 position=seq_get(args, 2), 501 ), 502 "DATEADD": build_date_delta(exp.DateAdd, unit_mapping=DATE_DELTA_INTERVAL), 503 "DATEDIFF": _build_date_delta(exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL), 504 "DATENAME": _build_formatted_time(exp.TimeToStr, full_format_mapping=True), 505 "DATEPART": _build_formatted_time(exp.TimeToStr), 506 "DATETIMEFROMPARTS": _build_datetimefromparts, 507 "EOMONTH": _build_eomonth, 508 "FORMAT": _build_format, 509 "GETDATE": exp.CurrentTimestamp.from_arg_list, 510 "HASHBYTES": _build_hashbytes, 511 "ISNULL": exp.Coalesce.from_arg_list, 512 "JSON_QUERY": parser.build_extract_json_with_path(exp.JSONExtract), 513 "JSON_VALUE": parser.build_extract_json_with_path(exp.JSONExtractScalar), 514 "LEN": _build_with_arg_as_text(exp.Length), 515 "LEFT": _build_with_arg_as_text(exp.Left), 516 "RIGHT": _build_with_arg_as_text(exp.Right), 517 "REPLICATE": exp.Repeat.from_arg_list, 518 "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)), 519 "SYSDATETIME": exp.CurrentTimestamp.from_arg_list, 520 "SUSER_NAME": exp.CurrentUser.from_arg_list, 521 "SUSER_SNAME": exp.CurrentUser.from_arg_list, 522 "SYSTEM_USER": exp.CurrentUser.from_arg_list, 523 "TIMEFROMPARTS": _build_timefromparts, 524 } 525 526 JOIN_HINTS = {"LOOP", "HASH", "MERGE", "REMOTE"} 527 528 RETURNS_TABLE_TOKENS = parser.Parser.ID_VAR_TOKENS - { 529 TokenType.TABLE, 530 *parser.Parser.TYPE_TOKENS, 531 } 532 533 STATEMENT_PARSERS = { 534 **parser.Parser.STATEMENT_PARSERS, 535 TokenType.DECLARE: lambda self: self._parse_declare(), 536 } 537 538 def _parse_options(self) -> t.Optional[t.List[exp.Expression]]: 539 if not self._match(TokenType.OPTION): 540 return None 541 542 def _parse_option() -> t.Optional[exp.Expression]: 543 option = self._parse_var_from_options(OPTIONS) 544 if not option: 545 return None 546 547 self._match(TokenType.EQ) 548 return self.expression( 549 exp.QueryOption, this=option, expression=self._parse_primary_or_var() 550 ) 551 552 return self._parse_wrapped_csv(_parse_option) 553 554 def _parse_projections(self) -> t.List[exp.Expression]: 555 """ 556 T-SQL supports the syntax alias = expression in the SELECT's projection list, 557 so we transform all parsed Selects to convert their EQ projections into Aliases. 558 559 See: https://learn.microsoft.com/en-us/sql/t-sql/queries/select-clause-transact-sql?view=sql-server-ver16#syntax 560 """ 561 return [ 562 ( 563 exp.alias_(projection.expression, projection.this.this, copy=False) 564 if isinstance(projection, exp.EQ) and isinstance(projection.this, exp.Column) 565 else projection 566 ) 567 for projection in super()._parse_projections() 568 ] 569 570 def _parse_commit_or_rollback(self) -> exp.Commit | exp.Rollback: 571 """Applies to SQL Server and Azure SQL Database 572 COMMIT [ { TRAN | TRANSACTION } 573 [ transaction_name | @tran_name_variable ] ] 574 [ WITH ( DELAYED_DURABILITY = { OFF | ON } ) ] 575 576 ROLLBACK { TRAN | TRANSACTION } 577 [ transaction_name | @tran_name_variable 578 | savepoint_name | @savepoint_variable ] 579 """ 580 rollback = self._prev.token_type == TokenType.ROLLBACK 581 582 self._match_texts(("TRAN", "TRANSACTION")) 583 this = self._parse_id_var() 584 585 if rollback: 586 return self.expression(exp.Rollback, this=this) 587 588 durability = None 589 if self._match_pair(TokenType.WITH, TokenType.L_PAREN): 590 self._match_text_seq("DELAYED_DURABILITY") 591 self._match(TokenType.EQ) 592 593 if self._match_text_seq("OFF"): 594 durability = False 595 else: 596 self._match(TokenType.ON) 597 durability = True 598 599 self._match_r_paren() 600 601 return self.expression(exp.Commit, this=this, durability=durability) 602 603 def _parse_transaction(self) -> exp.Transaction | exp.Command: 604 """Applies to SQL Server and Azure SQL Database 605 BEGIN { TRAN | TRANSACTION } 606 [ { transaction_name | @tran_name_variable } 607 [ WITH MARK [ 'description' ] ] 608 ] 609 """ 610 if self._match_texts(("TRAN", "TRANSACTION")): 611 transaction = self.expression(exp.Transaction, this=self._parse_id_var()) 612 if self._match_text_seq("WITH", "MARK"): 613 transaction.set("mark", self._parse_string()) 614 615 return transaction 616 617 return self._parse_as_command(self._prev) 618 619 def _parse_returns(self) -> exp.ReturnsProperty: 620 table = self._parse_id_var(any_token=False, tokens=self.RETURNS_TABLE_TOKENS) 621 returns = super()._parse_returns() 622 returns.set("table", table) 623 return returns 624 625 def _parse_convert( 626 self, strict: bool, safe: t.Optional[bool] = None 627 ) -> t.Optional[exp.Expression]: 628 this = self._parse_types() 629 self._match(TokenType.COMMA) 630 args = [this, *self._parse_csv(self._parse_conjunction)] 631 convert = exp.Convert.from_arg_list(args) 632 convert.set("safe", safe) 633 convert.set("strict", strict) 634 return convert 635 636 def _parse_user_defined_function( 637 self, kind: t.Optional[TokenType] = None 638 ) -> t.Optional[exp.Expression]: 639 this = super()._parse_user_defined_function(kind=kind) 640 641 if ( 642 kind == TokenType.FUNCTION 643 or isinstance(this, exp.UserDefinedFunction) 644 or self._match(TokenType.ALIAS, advance=False) 645 ): 646 return this 647 648 expressions = self._parse_csv(self._parse_function_parameter) 649 return self.expression(exp.UserDefinedFunction, this=this, expressions=expressions) 650 651 def _parse_id_var( 652 self, 653 any_token: bool = True, 654 tokens: t.Optional[t.Collection[TokenType]] = None, 655 ) -> t.Optional[exp.Expression]: 656 is_temporary = self._match(TokenType.HASH) 657 is_global = is_temporary and self._match(TokenType.HASH) 658 659 this = super()._parse_id_var(any_token=any_token, tokens=tokens) 660 if this: 661 if is_global: 662 this.set("global", True) 663 elif is_temporary: 664 this.set("temporary", True) 665 666 return this 667 668 def _parse_create(self) -> exp.Create | exp.Command: 669 create = super()._parse_create() 670 671 if isinstance(create, exp.Create): 672 table = create.this.this if isinstance(create.this, exp.Schema) else create.this 673 if isinstance(table, exp.Table) and table.this.args.get("temporary"): 674 if not create.args.get("properties"): 675 create.set("properties", exp.Properties(expressions=[])) 676 677 create.args["properties"].append("expressions", exp.TemporaryProperty()) 678 679 return create 680 681 def _parse_if(self) -> t.Optional[exp.Expression]: 682 index = self._index 683 684 if self._match_text_seq("OBJECT_ID"): 685 self._parse_wrapped_csv(self._parse_string) 686 if self._match_text_seq("IS", "NOT", "NULL") and self._match(TokenType.DROP): 687 return self._parse_drop(exists=True) 688 self._retreat(index) 689 690 return super()._parse_if() 691 692 def _parse_unique(self) -> exp.UniqueColumnConstraint: 693 if self._match_texts(("CLUSTERED", "NONCLUSTERED")): 694 this = self.CONSTRAINT_PARSERS[self._prev.text.upper()](self) 695 else: 696 this = self._parse_schema(self._parse_id_var(any_token=False)) 697 698 return self.expression(exp.UniqueColumnConstraint, this=this) 699 700 def _parse_partition(self) -> t.Optional[exp.Partition]: 701 if not self._match_text_seq("WITH", "(", "PARTITIONS"): 702 return None 703 704 def parse_range(): 705 low = self._parse_bitwise() 706 high = self._parse_bitwise() if self._match_text_seq("TO") else None 707 708 return ( 709 self.expression(exp.PartitionRange, this=low, expression=high) if high else low 710 ) 711 712 partition = self.expression( 713 exp.Partition, expressions=self._parse_wrapped_csv(parse_range) 714 ) 715 716 self._match_r_paren() 717 718 return partition 719 720 def _parse_declare(self) -> exp.Declare | exp.Command: 721 index = self._index 722 expressions = self._try_parse(partial(self._parse_csv, self._parse_declareitem)) 723 724 if not expressions or self._curr: 725 self._retreat(index) 726 return self._parse_as_command(self._prev) 727 728 return self.expression(exp.Declare, expressions=expressions) 729 730 def _parse_declareitem(self) -> t.Optional[exp.DeclareItem]: 731 var = self._parse_id_var() 732 if not var: 733 return None 734 735 value = None 736 self._match(TokenType.ALIAS) 737 if self._match(TokenType.TABLE): 738 data_type = self._parse_schema() 739 else: 740 data_type = self._parse_types() 741 if self._match(TokenType.EQ): 742 value = self._parse_bitwise() 743 744 return self.expression(exp.DeclareItem, this=var, kind=data_type, default=value)
Parser consumes a list of tokens produced by the Tokenizer and produces a parsed syntax tree.
Arguments:
- error_level: The desired error level. Default: ErrorLevel.IMMEDIATE
- error_message_context: The amount of context to capture from a query string when displaying the error message (in number of characters). Default: 100
- max_errors: Maximum number of error messages to include in a raised ParseError. This is only relevant if error_level is ErrorLevel.RAISE. Default: 3
Inherited Members
- sqlglot.parser.Parser
- Parser
- NO_PAREN_FUNCTIONS
- STRUCT_TYPE_TOKENS
- NESTED_TYPE_TOKENS
- ENUM_TYPE_TOKENS
- AGGREGATE_TYPE_TOKENS
- TYPE_TOKENS
- SIGNED_TO_UNSIGNED_TYPE_TOKEN
- SUBQUERY_PREDICATES
- RESERVED_TOKENS
- DB_CREATABLES
- CREATABLES
- ID_VAR_TOKENS
- INTERVAL_VARS
- ALIAS_TOKENS
- COMMENT_TABLE_ALIAS_TOKENS
- UPDATE_ALIAS_TOKENS
- TRIM_TYPES
- FUNC_TOKENS
- CONJUNCTION
- EQUALITY
- COMPARISON
- BITWISE
- TERM
- FACTOR
- EXPONENT
- TIMES
- TIMESTAMPS
- SET_OPERATIONS
- JOIN_METHODS
- JOIN_SIDES
- JOIN_KINDS
- LAMBDAS
- COLUMN_OPERATORS
- EXPRESSION_PARSERS
- UNARY_PARSERS
- STRING_PARSERS
- NUMERIC_PARSERS
- PRIMARY_PARSERS
- PLACEHOLDER_PARSERS
- RANGE_PARSERS
- PROPERTY_PARSERS
- CONSTRAINT_PARSERS
- ALTER_PARSERS
- ALTER_ALTER_PARSERS
- SCHEMA_UNNAMED_CONSTRAINTS
- NO_PAREN_FUNCTION_PARSERS
- INVALID_FUNC_NAME_TOKENS
- FUNCTIONS_WITH_ALIASED_ARGS
- KEY_VALUE_DEFINITIONS
- FUNCTION_PARSERS
- SET_PARSERS
- SHOW_PARSERS
- TYPE_LITERAL_PARSERS
- TYPE_CONVERTER
- DDL_SELECT_TOKENS
- PRE_VOLATILE_TOKENS
- TRANSACTION_KIND
- TRANSACTION_CHARACTERISTICS
- CONFLICT_ACTIONS
- CREATE_SEQUENCE
- ISOLATED_LOADING_OPTIONS
- USABLES
- CAST_ACTIONS
- INSERT_ALTERNATIVES
- CLONE_KEYWORDS
- HISTORICAL_DATA_KIND
- OPCLASS_FOLLOW_KEYWORDS
- OPTYPE_FOLLOW_TOKENS
- TABLE_INDEX_HINT_TOKENS
- VIEW_ATTRIBUTES
- WINDOW_ALIAS_TOKENS
- WINDOW_BEFORE_PAREN_TOKENS
- WINDOW_SIDES
- JSON_KEY_VALUE_SEPARATOR_TOKENS
- FETCH_TOKENS
- ADD_CONSTRAINT_TOKENS
- DISTINCT_TOKENS
- NULL_TOKENS
- UNNEST_OFFSET_ALIAS_TOKENS
- SELECT_START_TOKENS
- STRICT_CAST
- PREFIXED_PIVOT_COLUMNS
- IDENTIFY_PIVOT_STRINGS
- TABLESAMPLE_CSV
- DEFAULT_SAMPLING_METHOD
- TRIM_PATTERN_FIRST
- MODIFIERS_ATTACHED_TO_UNION
- UNION_MODIFIERS
- JSON_ARROWS_REQUIRE_JSON_TYPE
- COLON_IS_JSON_EXTRACT
- VALUES_FOLLOWED_BY_PAREN
- SUPPORTS_IMPLICIT_UNNEST
- INTERVAL_SPANS
- SUPPORTS_PARTITION_SELECTION
- error_level
- error_message_context
- max_errors
- dialect
- reset
- parse
- parse_into
- check_errors
- raise_error
- expression
- validate_expression
- errors
- sql
746 class Generator(generator.Generator): 747 LIMIT_IS_TOP = True 748 QUERY_HINTS = False 749 RETURNING_END = False 750 NVL2_SUPPORTED = False 751 ALTER_TABLE_INCLUDE_COLUMN_KEYWORD = False 752 LIMIT_FETCH = "FETCH" 753 COMPUTED_COLUMN_WITH_TYPE = False 754 CTE_RECURSIVE_KEYWORD_REQUIRED = False 755 ENSURE_BOOLS = True 756 NULL_ORDERING_SUPPORTED = None 757 SUPPORTS_SINGLE_ARG_CONCAT = False 758 TABLESAMPLE_SEED_KEYWORD = "REPEATABLE" 759 SUPPORTS_SELECT_INTO = True 760 JSON_PATH_BRACKETED_KEY_SUPPORTED = False 761 SUPPORTS_TO_NUMBER = False 762 OUTER_UNION_MODIFIERS = False 763 COPY_PARAMS_EQ_REQUIRED = True 764 765 EXPRESSIONS_WITHOUT_NESTED_CTES = { 766 exp.Delete, 767 exp.Insert, 768 exp.Merge, 769 exp.Select, 770 exp.Subquery, 771 exp.Union, 772 exp.Update, 773 } 774 775 SUPPORTED_JSON_PATH_PARTS = { 776 exp.JSONPathKey, 777 exp.JSONPathRoot, 778 exp.JSONPathSubscript, 779 } 780 781 TYPE_MAPPING = { 782 **generator.Generator.TYPE_MAPPING, 783 exp.DataType.Type.BOOLEAN: "BIT", 784 exp.DataType.Type.DECIMAL: "NUMERIC", 785 exp.DataType.Type.DATETIME: "DATETIME2", 786 exp.DataType.Type.DOUBLE: "FLOAT", 787 exp.DataType.Type.INT: "INTEGER", 788 exp.DataType.Type.ROWVERSION: "ROWVERSION", 789 exp.DataType.Type.TEXT: "VARCHAR(MAX)", 790 exp.DataType.Type.TIMESTAMP: "DATETIME2", 791 exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET", 792 exp.DataType.Type.UTINYINT: "TINYINT", 793 exp.DataType.Type.VARIANT: "SQL_VARIANT", 794 } 795 796 TYPE_MAPPING.pop(exp.DataType.Type.NCHAR) 797 TYPE_MAPPING.pop(exp.DataType.Type.NVARCHAR) 798 799 TRANSFORMS = { 800 **generator.Generator.TRANSFORMS, 801 exp.AnyValue: any_value_to_max_sql, 802 exp.ArrayToString: rename_func("STRING_AGG"), 803 exp.AutoIncrementColumnConstraint: lambda *_: "IDENTITY", 804 exp.DateAdd: date_delta_sql("DATEADD"), 805 exp.DateDiff: date_delta_sql("DATEDIFF"), 806 exp.CTE: transforms.preprocess([qualify_derived_table_outputs]), 807 exp.CurrentDate: rename_func("GETDATE"), 808 exp.CurrentTimestamp: rename_func("GETDATE"), 809 exp.DateStrToDate: datestrtodate_sql, 810 exp.Extract: rename_func("DATEPART"), 811 exp.GeneratedAsIdentityColumnConstraint: generatedasidentitycolumnconstraint_sql, 812 exp.GroupConcat: _string_agg_sql, 813 exp.If: rename_func("IIF"), 814 exp.JSONExtract: _json_extract_sql, 815 exp.JSONExtractScalar: _json_extract_sql, 816 exp.LastDay: lambda self, e: self.func("EOMONTH", e.this), 817 exp.Max: max_or_greatest, 818 exp.MD5: lambda self, e: self.func("HASHBYTES", exp.Literal.string("MD5"), e.this), 819 exp.Min: min_or_least, 820 exp.NumberToStr: _format_sql, 821 exp.ParseJSON: lambda self, e: self.sql(e, "this"), 822 exp.Select: transforms.preprocess( 823 [ 824 transforms.eliminate_distinct_on, 825 transforms.eliminate_semi_and_anti_joins, 826 transforms.eliminate_qualify, 827 ] 828 ), 829 exp.StrPosition: lambda self, e: self.func( 830 "CHARINDEX", e.args.get("substr"), e.this, e.args.get("position") 831 ), 832 exp.Subquery: transforms.preprocess([qualify_derived_table_outputs]), 833 exp.SHA: lambda self, e: self.func("HASHBYTES", exp.Literal.string("SHA1"), e.this), 834 exp.SHA2: lambda self, e: self.func( 835 "HASHBYTES", exp.Literal.string(f"SHA2_{e.args.get('length', 256)}"), e.this 836 ), 837 exp.TemporaryProperty: lambda self, e: "", 838 exp.TimeStrToTime: timestrtotime_sql, 839 exp.TimeToStr: _format_sql, 840 exp.Trim: trim_sql, 841 exp.TsOrDsAdd: date_delta_sql("DATEADD", cast=True), 842 exp.TsOrDsDiff: date_delta_sql("DATEDIFF"), 843 } 844 845 TRANSFORMS.pop(exp.ReturnsProperty) 846 847 PROPERTIES_LOCATION = { 848 **generator.Generator.PROPERTIES_LOCATION, 849 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 850 } 851 852 def select_sql(self, expression: exp.Select) -> str: 853 if expression.args.get("offset"): 854 if not expression.args.get("order"): 855 # ORDER BY is required in order to use OFFSET in a query, so we use 856 # a noop order by, since we don't really care about the order. 857 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 858 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 859 860 limit = expression.args.get("limit") 861 if isinstance(limit, exp.Limit): 862 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 863 # we replace here because otherwise TOP would be generated in select_sql 864 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 865 866 return super().select_sql(expression) 867 868 def convert_sql(self, expression: exp.Convert) -> str: 869 name = "TRY_CONVERT" if expression.args.get("safe") else "CONVERT" 870 return self.func( 871 name, expression.this, expression.expression, expression.args.get("style") 872 ) 873 874 def queryoption_sql(self, expression: exp.QueryOption) -> str: 875 option = self.sql(expression, "this") 876 value = self.sql(expression, "expression") 877 if value: 878 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 879 return f"{option} {optional_equal_sign}{value}" 880 return option 881 882 def lateral_op(self, expression: exp.Lateral) -> str: 883 cross_apply = expression.args.get("cross_apply") 884 if cross_apply is True: 885 return "CROSS APPLY" 886 if cross_apply is False: 887 return "OUTER APPLY" 888 889 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 890 self.unsupported("LATERAL clause is not supported.") 891 return "LATERAL" 892 893 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 894 nano = expression.args.get("nano") 895 if nano is not None: 896 nano.pop() 897 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 898 899 if expression.args.get("fractions") is None: 900 expression.set("fractions", exp.Literal.number(0)) 901 if expression.args.get("precision") is None: 902 expression.set("precision", exp.Literal.number(0)) 903 904 return rename_func("TIMEFROMPARTS")(self, expression) 905 906 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 907 zone = expression.args.get("zone") 908 if zone is not None: 909 zone.pop() 910 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 911 912 nano = expression.args.get("nano") 913 if nano is not None: 914 nano.pop() 915 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 916 917 if expression.args.get("milli") is None: 918 expression.set("milli", exp.Literal.number(0)) 919 920 return rename_func("DATETIMEFROMPARTS")(self, expression) 921 922 def setitem_sql(self, expression: exp.SetItem) -> str: 923 this = expression.this 924 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 925 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 926 return f"{self.sql(this.left)} {self.sql(this.right)}" 927 928 return super().setitem_sql(expression) 929 930 def boolean_sql(self, expression: exp.Boolean) -> str: 931 if type(expression.parent) in BIT_TYPES: 932 return "1" if expression.this else "0" 933 934 return "(1 = 1)" if expression.this else "(1 = 0)" 935 936 def is_sql(self, expression: exp.Is) -> str: 937 if isinstance(expression.expression, exp.Boolean): 938 return self.binary(expression, "=") 939 return self.binary(expression, "IS") 940 941 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 942 sql = self.sql(expression, "this") 943 properties = expression.args.get("properties") 944 945 if sql[:1] != "#" and any( 946 isinstance(prop, exp.TemporaryProperty) 947 for prop in (properties.expressions if properties else []) 948 ): 949 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 950 951 return sql 952 953 def create_sql(self, expression: exp.Create) -> str: 954 kind = expression.kind 955 exists = expression.args.pop("exists", None) 956 957 if kind == "VIEW": 958 expression.this.set("catalog", None) 959 960 sql = super().create_sql(expression) 961 962 like_property = expression.find(exp.LikeProperty) 963 if like_property: 964 ctas_expression = like_property.this 965 else: 966 ctas_expression = expression.expression 967 968 table = expression.find(exp.Table) 969 970 # Convert CTAS statement to SELECT .. INTO .. 971 if kind == "TABLE" and ctas_expression: 972 ctas_with = ctas_expression.args.get("with") 973 if ctas_with: 974 ctas_with = ctas_with.pop() 975 976 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 977 ctas_expression = ctas_expression.subquery() 978 979 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 980 select_into.set("into", exp.Into(this=table)) 981 select_into.set("with", ctas_with) 982 983 if like_property: 984 select_into.limit(0, copy=False) 985 986 sql = self.sql(select_into) 987 988 if exists: 989 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 990 sql = self.sql(exp.Literal.string(sql)) 991 if kind == "SCHEMA": 992 sql = f"""IF NOT EXISTS (SELECT * FROM information_schema.schemata WHERE schema_name = {identifier}) EXEC({sql})""" 993 elif kind == "TABLE": 994 assert table 995 where = exp.and_( 996 exp.column("table_name").eq(table.name), 997 exp.column("table_schema").eq(table.db) if table.db else None, 998 exp.column("table_catalog").eq(table.catalog) if table.catalog else None, 999 ) 1000 sql = f"""IF NOT EXISTS (SELECT * FROM information_schema.tables WHERE {where}) EXEC({sql})""" 1001 elif kind == "INDEX": 1002 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1003 sql = f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql})""" 1004 elif expression.args.get("replace"): 1005 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1006 1007 return self.prepend_ctes(expression, sql) 1008 1009 def offset_sql(self, expression: exp.Offset) -> str: 1010 return f"{super().offset_sql(expression)} ROWS" 1011 1012 def version_sql(self, expression: exp.Version) -> str: 1013 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1014 this = f"FOR {name}" 1015 expr = expression.expression 1016 kind = expression.text("kind") 1017 if kind in ("FROM", "BETWEEN"): 1018 args = expr.expressions 1019 sep = "TO" if kind == "FROM" else "AND" 1020 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1021 else: 1022 expr_sql = self.sql(expr) 1023 1024 expr_sql = f" {expr_sql}" if expr_sql else "" 1025 return f"{this} {kind}{expr_sql}" 1026 1027 def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str: 1028 table = expression.args.get("table") 1029 table = f"{table} " if table else "" 1030 return f"RETURNS {table}{self.sql(expression, 'this')}" 1031 1032 def returning_sql(self, expression: exp.Returning) -> str: 1033 into = self.sql(expression, "into") 1034 into = self.seg(f"INTO {into}") if into else "" 1035 return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}" 1036 1037 def transaction_sql(self, expression: exp.Transaction) -> str: 1038 this = self.sql(expression, "this") 1039 this = f" {this}" if this else "" 1040 mark = self.sql(expression, "mark") 1041 mark = f" WITH MARK {mark}" if mark else "" 1042 return f"BEGIN TRANSACTION{this}{mark}" 1043 1044 def commit_sql(self, expression: exp.Commit) -> str: 1045 this = self.sql(expression, "this") 1046 this = f" {this}" if this else "" 1047 durability = expression.args.get("durability") 1048 durability = ( 1049 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1050 if durability is not None 1051 else "" 1052 ) 1053 return f"COMMIT TRANSACTION{this}{durability}" 1054 1055 def rollback_sql(self, expression: exp.Rollback) -> str: 1056 this = self.sql(expression, "this") 1057 this = f" {this}" if this else "" 1058 return f"ROLLBACK TRANSACTION{this}" 1059 1060 def identifier_sql(self, expression: exp.Identifier) -> str: 1061 identifier = super().identifier_sql(expression) 1062 1063 if expression.args.get("global"): 1064 identifier = f"##{identifier}" 1065 elif expression.args.get("temporary"): 1066 identifier = f"#{identifier}" 1067 1068 return identifier 1069 1070 def constraint_sql(self, expression: exp.Constraint) -> str: 1071 this = self.sql(expression, "this") 1072 expressions = self.expressions(expression, flat=True, sep=" ") 1073 return f"CONSTRAINT {this} {expressions}" 1074 1075 def length_sql(self, expression: exp.Length) -> str: 1076 return self._uncast_text(expression, "LEN") 1077 1078 def right_sql(self, expression: exp.Right) -> str: 1079 return self._uncast_text(expression, "RIGHT") 1080 1081 def left_sql(self, expression: exp.Left) -> str: 1082 return self._uncast_text(expression, "LEFT") 1083 1084 def _uncast_text(self, expression: exp.Expression, name: str) -> str: 1085 this = expression.this 1086 if isinstance(this, exp.Cast) and this.is_type(exp.DataType.Type.TEXT): 1087 this_sql = self.sql(this, "this") 1088 else: 1089 this_sql = self.sql(this) 1090 expression_sql = self.sql(expression, "expression") 1091 return self.func(name, this_sql, expression_sql if expression_sql else None) 1092 1093 def partition_sql(self, expression: exp.Partition) -> str: 1094 return f"WITH (PARTITIONS({self.expressions(expression, flat=True)}))" 1095 1096 def altertable_sql(self, expression: exp.AlterTable) -> str: 1097 action = seq_get(expression.args.get("actions") or [], 0) 1098 if isinstance(action, exp.RenameTable): 1099 return f"EXEC sp_rename '{self.sql(expression.this)}', '{action.this.name}'" 1100 return super().altertable_sql(expression) 1101 1102 def drop_sql(self, expression: exp.Drop) -> str: 1103 if expression.args["kind"] == "VIEW": 1104 expression.this.set("catalog", None) 1105 return super().drop_sql(expression) 1106 1107 def declare_sql(self, expression: exp.Declare) -> str: 1108 return f"DECLARE {self.expressions(expression, flat=True)}" 1109 1110 def declareitem_sql(self, expression: exp.DeclareItem) -> str: 1111 variable = self.sql(expression, "this") 1112 default = self.sql(expression, "default") 1113 default = f" = {default}" if default else "" 1114 1115 kind = self.sql(expression, "kind") 1116 if isinstance(expression.args.get("kind"), exp.Schema): 1117 kind = f"TABLE {kind}" 1118 1119 return f"{variable} AS {kind}{default}"
Generator converts a given syntax tree to the corresponding SQL string.
Arguments:
- pretty: Whether to format the produced SQL string. Default: False.
- identify: Determines when an identifier should be quoted. Possible values are: False (default): Never quote, except in cases where it's mandatory by the dialect. True or 'always': Always quote. 'safe': Only quote identifiers that are case insensitive.
- normalize: Whether to normalize identifiers to lowercase. Default: False.
- pad: The pad size in a formatted string. For example, this affects the indentation of a projection in a query, relative to its nesting level. Default: 2.
- indent: The indentation size in a formatted string. For example, this affects the
indentation of subqueries and filters under a
WHERE
clause. Default: 2. - normalize_functions: How to normalize function names. Possible values are: "upper" or True (default): Convert names to uppercase. "lower": Convert names to lowercase. False: Disables function name normalization.
- unsupported_level: Determines the generator's behavior when it encounters unsupported expressions. Default ErrorLevel.WARN.
- max_unsupported: Maximum number of unsupported messages to include in a raised UnsupportedError. This is only relevant if unsupported_level is ErrorLevel.RAISE. Default: 3
- leading_comma: Whether the comma is leading or trailing in select expressions. This is only relevant when generating in pretty mode. Default: False
- max_text_width: The max number of characters in a segment before creating new lines in pretty mode. The default is on the smaller end because the length only represents a segment and not the true line length. Default: 80
- comments: Whether to preserve comments in the output SQL code. Default: True
852 def select_sql(self, expression: exp.Select) -> str: 853 if expression.args.get("offset"): 854 if not expression.args.get("order"): 855 # ORDER BY is required in order to use OFFSET in a query, so we use 856 # a noop order by, since we don't really care about the order. 857 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 858 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 859 860 limit = expression.args.get("limit") 861 if isinstance(limit, exp.Limit): 862 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 863 # we replace here because otherwise TOP would be generated in select_sql 864 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 865 866 return super().select_sql(expression)
874 def queryoption_sql(self, expression: exp.QueryOption) -> str: 875 option = self.sql(expression, "this") 876 value = self.sql(expression, "expression") 877 if value: 878 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 879 return f"{option} {optional_equal_sign}{value}" 880 return option
882 def lateral_op(self, expression: exp.Lateral) -> str: 883 cross_apply = expression.args.get("cross_apply") 884 if cross_apply is True: 885 return "CROSS APPLY" 886 if cross_apply is False: 887 return "OUTER APPLY" 888 889 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 890 self.unsupported("LATERAL clause is not supported.") 891 return "LATERAL"
893 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 894 nano = expression.args.get("nano") 895 if nano is not None: 896 nano.pop() 897 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 898 899 if expression.args.get("fractions") is None: 900 expression.set("fractions", exp.Literal.number(0)) 901 if expression.args.get("precision") is None: 902 expression.set("precision", exp.Literal.number(0)) 903 904 return rename_func("TIMEFROMPARTS")(self, expression)
906 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 907 zone = expression.args.get("zone") 908 if zone is not None: 909 zone.pop() 910 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 911 912 nano = expression.args.get("nano") 913 if nano is not None: 914 nano.pop() 915 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 916 917 if expression.args.get("milli") is None: 918 expression.set("milli", exp.Literal.number(0)) 919 920 return rename_func("DATETIMEFROMPARTS")(self, expression)
922 def setitem_sql(self, expression: exp.SetItem) -> str: 923 this = expression.this 924 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 925 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 926 return f"{self.sql(this.left)} {self.sql(this.right)}" 927 928 return super().setitem_sql(expression)
941 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 942 sql = self.sql(expression, "this") 943 properties = expression.args.get("properties") 944 945 if sql[:1] != "#" and any( 946 isinstance(prop, exp.TemporaryProperty) 947 for prop in (properties.expressions if properties else []) 948 ): 949 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 950 951 return sql
953 def create_sql(self, expression: exp.Create) -> str: 954 kind = expression.kind 955 exists = expression.args.pop("exists", None) 956 957 if kind == "VIEW": 958 expression.this.set("catalog", None) 959 960 sql = super().create_sql(expression) 961 962 like_property = expression.find(exp.LikeProperty) 963 if like_property: 964 ctas_expression = like_property.this 965 else: 966 ctas_expression = expression.expression 967 968 table = expression.find(exp.Table) 969 970 # Convert CTAS statement to SELECT .. INTO .. 971 if kind == "TABLE" and ctas_expression: 972 ctas_with = ctas_expression.args.get("with") 973 if ctas_with: 974 ctas_with = ctas_with.pop() 975 976 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 977 ctas_expression = ctas_expression.subquery() 978 979 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 980 select_into.set("into", exp.Into(this=table)) 981 select_into.set("with", ctas_with) 982 983 if like_property: 984 select_into.limit(0, copy=False) 985 986 sql = self.sql(select_into) 987 988 if exists: 989 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 990 sql = self.sql(exp.Literal.string(sql)) 991 if kind == "SCHEMA": 992 sql = f"""IF NOT EXISTS (SELECT * FROM information_schema.schemata WHERE schema_name = {identifier}) EXEC({sql})""" 993 elif kind == "TABLE": 994 assert table 995 where = exp.and_( 996 exp.column("table_name").eq(table.name), 997 exp.column("table_schema").eq(table.db) if table.db else None, 998 exp.column("table_catalog").eq(table.catalog) if table.catalog else None, 999 ) 1000 sql = f"""IF NOT EXISTS (SELECT * FROM information_schema.tables WHERE {where}) EXEC({sql})""" 1001 elif kind == "INDEX": 1002 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1003 sql = f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql})""" 1004 elif expression.args.get("replace"): 1005 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1006 1007 return self.prepend_ctes(expression, sql)
1012 def version_sql(self, expression: exp.Version) -> str: 1013 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1014 this = f"FOR {name}" 1015 expr = expression.expression 1016 kind = expression.text("kind") 1017 if kind in ("FROM", "BETWEEN"): 1018 args = expr.expressions 1019 sep = "TO" if kind == "FROM" else "AND" 1020 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1021 else: 1022 expr_sql = self.sql(expr) 1023 1024 expr_sql = f" {expr_sql}" if expr_sql else "" 1025 return f"{this} {kind}{expr_sql}"
1044 def commit_sql(self, expression: exp.Commit) -> str: 1045 this = self.sql(expression, "this") 1046 this = f" {this}" if this else "" 1047 durability = expression.args.get("durability") 1048 durability = ( 1049 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1050 if durability is not None 1051 else "" 1052 ) 1053 return f"COMMIT TRANSACTION{this}{durability}"
1060 def identifier_sql(self, expression: exp.Identifier) -> str: 1061 identifier = super().identifier_sql(expression) 1062 1063 if expression.args.get("global"): 1064 identifier = f"##{identifier}" 1065 elif expression.args.get("temporary"): 1066 identifier = f"#{identifier}" 1067 1068 return identifier
1096 def altertable_sql(self, expression: exp.AlterTable) -> str: 1097 action = seq_get(expression.args.get("actions") or [], 0) 1098 if isinstance(action, exp.RenameTable): 1099 return f"EXEC sp_rename '{self.sql(expression.this)}', '{action.this.name}'" 1100 return super().altertable_sql(expression)
1110 def declareitem_sql(self, expression: exp.DeclareItem) -> str: 1111 variable = self.sql(expression, "this") 1112 default = self.sql(expression, "default") 1113 default = f" = {default}" if default else "" 1114 1115 kind = self.sql(expression, "kind") 1116 if isinstance(expression.args.get("kind"), exp.Schema): 1117 kind = f"TABLE {kind}" 1118 1119 return f"{variable} AS {kind}{default}"
Inherited Members
- sqlglot.generator.Generator
- Generator
- IGNORE_NULLS_IN_FUNC
- LOCKING_READS_SUPPORTED
- EXPLICIT_UNION
- WRAP_DERIVED_VALUES
- CREATE_FUNCTION_RETURN_AS
- MATCHED_BY_SOURCE
- SINGLE_STRING_INTERVAL
- INTERVAL_ALLOWS_PLURAL_FORM
- LIMIT_ONLY_LITERALS
- RENAME_TABLE_WITH_DB
- GROUPINGS_SEP
- INDEX_ON
- JOIN_HINTS
- TABLE_HINTS
- QUERY_HINT_SEP
- IS_BOOL_ALLOWED
- DUPLICATE_KEY_UPDATE_WITH_SET
- COLUMN_JOIN_MARKS_SUPPORTED
- EXTRACT_ALLOWS_QUOTES
- TZ_TO_WITH_TIME_ZONE
- VALUES_AS_TABLE
- UNNEST_WITH_ORDINALITY
- AGGREGATE_FILTER_SUPPORTED
- SEMI_ANTI_JOIN_WITH_SIDE
- SUPPORTS_TABLE_COPY
- TABLESAMPLE_REQUIRES_PARENS
- TABLESAMPLE_SIZE_IS_ROWS
- TABLESAMPLE_KEYWORDS
- TABLESAMPLE_WITH_METHOD
- COLLATE_IS_FUNC
- DATA_TYPE_SPECIFIERS_ALLOWED
- LAST_DAY_SUPPORTS_DATE_PART
- SUPPORTS_TABLE_ALIAS_COLUMNS
- UNPIVOT_ALIASES_ARE_IDENTIFIERS
- JSON_KEY_VALUE_PAIR_SEP
- INSERT_OVERWRITE
- SUPPORTS_UNLOGGED_TABLES
- SUPPORTS_CREATE_TABLE_LIKE
- LIKE_PROPERTY_INSIDE_SCHEMA
- MULTI_ARG_DISTINCT
- JSON_TYPE_REQUIRED_FOR_EXTRACTION
- JSON_PATH_SINGLE_QUOTE_ESCAPE
- CAN_IMPLEMENT_ARRAY_ANY
- COPY_PARAMS_ARE_WRAPPED
- COPY_HAS_INTO_KEYWORD
- STAR_EXCEPT
- HEX_FUNC
- WITH_PROPERTIES_PREFIX
- TIME_PART_SINGULARS
- TOKEN_MAPPING
- STRUCT_DELIMITER
- PARAMETER_TOKEN
- NAMED_PLACEHOLDER_TOKEN
- RESERVED_KEYWORDS
- WITH_SEPARATED_COMMENTS
- EXCLUDE_COMMENTS
- UNWRAPPED_INTERVAL_VALUES
- PARAMETERIZABLE_TEXT_TYPES
- SENTINEL_LINE_BREAK
- pretty
- identify
- normalize
- pad
- unsupported_level
- max_unsupported
- leading_comma
- max_text_width
- comments
- dialect
- normalize_functions
- unsupported_messages
- generate
- preprocess
- unsupported
- sep
- seg
- pad_comment
- maybe_comment
- wrap
- no_identify
- normalize_func
- indent
- sql
- uncache_sql
- cache_sql
- characterset_sql
- column_parts
- column_sql
- columnposition_sql
- columndef_sql
- columnconstraint_sql
- computedcolumnconstraint_sql
- autoincrementcolumnconstraint_sql
- compresscolumnconstraint_sql
- generatedasidentitycolumnconstraint_sql
- generatedasrowcolumnconstraint_sql
- periodforsystemtimeconstraint_sql
- notnullcolumnconstraint_sql
- transformcolumnconstraint_sql
- primarykeycolumnconstraint_sql
- uniquecolumnconstraint_sql
- sequenceproperties_sql
- clone_sql
- describe_sql
- heredoc_sql
- prepend_ctes
- with_sql
- cte_sql
- tablealias_sql
- bitstring_sql
- hexstring_sql
- bytestring_sql
- unicodestring_sql
- rawstring_sql
- datatypeparam_sql
- datatype_sql
- directory_sql
- delete_sql
- except_sql
- except_op
- fetch_sql
- filter_sql
- hint_sql
- indexparameters_sql
- index_sql
- hex_sql
- lowerhex_sql
- inputoutputformat_sql
- national_sql
- properties_sql
- root_properties
- properties
- with_properties
- locate_properties
- property_name
- property_sql
- likeproperty_sql
- fallbackproperty_sql
- journalproperty_sql
- freespaceproperty_sql
- checksumproperty_sql
- mergeblockratioproperty_sql
- datablocksizeproperty_sql
- blockcompressionproperty_sql
- isolatedloadingproperty_sql
- partitionboundspec_sql
- partitionedofproperty_sql
- lockingproperty_sql
- withdataproperty_sql
- withsystemversioningproperty_sql
- insert_sql
- intersect_sql
- intersect_op
- introducer_sql
- kill_sql
- pseudotype_sql
- objectidentifier_sql
- onconflict_sql
- rowformatdelimitedproperty_sql
- withtablehint_sql
- indextablehint_sql
- historicaldata_sql
- table_parts
- table_sql
- tablesample_sql
- pivot_sql
- tuple_sql
- update_sql
- values_sql
- var_sql
- into_sql
- from_sql
- group_sql
- having_sql
- connect_sql
- prior_sql
- join_sql
- lambda_sql
- lateral_sql
- limit_sql
- set_sql
- pragma_sql
- lock_sql
- literal_sql
- escape_str
- loaddata_sql
- null_sql
- order_sql
- withfill_sql
- cluster_sql
- distribute_sql
- sort_sql
- ordered_sql
- matchrecognizemeasure_sql
- matchrecognize_sql
- query_modifiers
- offset_limit_modifiers
- after_limit_modifiers
- schema_sql
- schema_columns_sql
- star_sql
- parameter_sql
- sessionparameter_sql
- placeholder_sql
- subquery_sql
- qualify_sql
- set_operations
- union_sql
- union_op
- unnest_sql
- prewhere_sql
- where_sql
- window_sql
- partition_by_sql
- windowspec_sql
- withingroup_sql
- between_sql
- bracket_offset_expressions
- bracket_sql
- all_sql
- any_sql
- exists_sql
- case_sql
- nextvaluefor_sql
- extract_sql
- trim_sql
- convert_concat_args
- concat_sql
- concatws_sql
- check_sql
- foreignkey_sql
- primarykey_sql
- if_sql
- matchagainst_sql
- jsonkeyvalue_sql
- jsonpath_sql
- json_path_part
- formatjson_sql
- jsonobject_sql
- jsonobjectagg_sql
- jsonarray_sql
- jsonarrayagg_sql
- jsoncolumndef_sql
- jsonschema_sql
- jsontable_sql
- openjsoncolumndef_sql
- openjson_sql
- in_sql
- in_unnest_op
- interval_sql
- return_sql
- reference_sql
- anonymous_sql
- paren_sql
- neg_sql
- not_sql
- alias_sql
- pivotalias_sql
- aliases_sql
- atindex_sql
- attimezone_sql
- fromtimezone_sql
- add_sql
- and_sql
- or_sql
- xor_sql
- connector_sql
- bitwiseand_sql
- bitwiseleftshift_sql
- bitwisenot_sql
- bitwiseor_sql
- bitwiserightshift_sql
- bitwisexor_sql
- cast_sql
- currentdate_sql
- currenttimestamp_sql
- collate_sql
- command_sql
- comment_sql
- mergetreettlaction_sql
- mergetreettl_sql
- altercolumn_sql
- alterdiststyle_sql
- altersortkey_sql
- renametable_sql
- renamecolumn_sql
- alterset_sql
- add_column_sql
- droppartition_sql
- addconstraint_sql
- distinct_sql
- ignorenulls_sql
- respectnulls_sql
- havingmax_sql
- intdiv_sql
- dpipe_sql
- div_sql
- overlaps_sql
- distance_sql
- dot_sql
- eq_sql
- propertyeq_sql
- escape_sql
- glob_sql
- gt_sql
- gte_sql
- ilike_sql
- ilikeany_sql
- like_sql
- likeany_sql
- similarto_sql
- lt_sql
- lte_sql
- mod_sql
- mul_sql
- neq_sql
- nullsafeeq_sql
- nullsafeneq_sql
- slice_sql
- sub_sql
- trycast_sql
- try_sql
- log_sql
- use_sql
- binary
- function_fallback_sql
- func
- format_args
- too_wide
- format_time
- expressions
- op_expressions
- naked_property
- tag_sql
- token_sql
- userdefinedfunction_sql
- joinhint_sql
- kwarg_sql
- when_sql
- merge_sql
- tochar_sql
- tonumber_sql
- dictproperty_sql
- dictrange_sql
- dictsubproperty_sql
- oncluster_sql
- clusteredbyproperty_sql
- anyvalue_sql
- querytransform_sql
- indexconstraintoption_sql
- checkcolumnconstraint_sql
- indexcolumnconstraint_sql
- nvl2_sql
- comprehension_sql
- columnprefix_sql
- opclass_sql
- predict_sql
- forin_sql
- refresh_sql
- operator_sql
- toarray_sql
- tsordstotime_sql
- tsordstotimestamp_sql
- tsordstodate_sql
- unixdate_sql
- lastday_sql
- dateadd_sql
- arrayany_sql
- generateseries_sql
- struct_sql
- partitionrange_sql
- truncatetable_sql
- copyparameter_sql
- credentials_sql
- copy_sql
- semicolon_sql
- datadeletionproperty_sql