sqlglot.dialects.tsql
1from __future__ import annotations 2 3import datetime 4import re 5import typing as t 6from functools import reduce 7 8from sqlglot import exp, generator, parser, tokens, transforms 9from sqlglot.dialects.dialect import ( 10 Dialect, 11 NormalizationStrategy, 12 any_value_to_max_sql, 13 build_date_delta, 14 date_delta_sql, 15 datestrtodate_sql, 16 generatedasidentitycolumnconstraint_sql, 17 max_or_greatest, 18 min_or_least, 19 rename_func, 20 strposition_sql, 21 timestrtotime_sql, 22 trim_sql, 23 map_date_part, 24) 25from sqlglot.helper import seq_get 26from sqlglot.parser import build_coalesce 27from sqlglot.time import format_time 28from sqlglot.tokens import TokenType 29from sqlglot.typing.tsql import EXPRESSION_METADATA 30 31if t.TYPE_CHECKING: 32 from sqlglot._typing import E 33 34FULL_FORMAT_TIME_MAPPING = { 35 "weekday": "%A", 36 "dw": "%A", 37 "w": "%A", 38 "month": "%B", 39 "mm": "%B", 40 "m": "%B", 41} 42 43DATE_DELTA_INTERVAL = { 44 "year": "year", 45 "yyyy": "year", 46 "yy": "year", 47 "quarter": "quarter", 48 "qq": "quarter", 49 "q": "quarter", 50 "month": "month", 51 "mm": "month", 52 "m": "month", 53 "week": "week", 54 "ww": "week", 55 "wk": "week", 56 "day": "day", 57 "dd": "day", 58 "d": "day", 59} 60 61DATE_PART_UNMAPPING = { 62 "WEEKISO": "ISO_WEEK", 63 "DAYOFWEEK": "WEEKDAY", 64 "TIMEZONE_MINUTE": "TZOFFSET", 65} 66 67DATE_FMT_RE = re.compile("([dD]{1,2})|([mM]{1,2})|([yY]{1,4})|([hH]{1,2})|([sS]{1,2})") 68 69# N = Numeric, C=Currency 70TRANSPILE_SAFE_NUMBER_FMT = {"N", "C"} 71 72DEFAULT_START_DATE = datetime.date(1900, 1, 1) 73 74BIT_TYPES = {exp.EQ, exp.NEQ, exp.Is, exp.In, exp.Select, exp.Alias} 75 76# Unsupported options: 77# - OPTIMIZE FOR ( @variable_name { UNKNOWN | = <literal_constant> } [ , ...n ] ) 78# - TABLE HINT 79OPTIONS: parser.OPTIONS_TYPE = { 80 **dict.fromkeys( 81 ( 82 "DISABLE_OPTIMIZED_PLAN_FORCING", 83 "FAST", 84 "IGNORE_NONCLUSTERED_COLUMNSTORE_INDEX", 85 "LABEL", 86 "MAXDOP", 87 "MAXRECURSION", 88 "MAX_GRANT_PERCENT", 89 "MIN_GRANT_PERCENT", 90 "NO_PERFORMANCE_SPOOL", 91 "QUERYTRACEON", 92 "RECOMPILE", 93 ), 94 tuple(), 95 ), 96 "CONCAT": ("UNION",), 97 "DISABLE": ("EXTERNALPUSHDOWN", "SCALEOUTEXECUTION"), 98 "EXPAND": ("VIEWS",), 99 "FORCE": ("EXTERNALPUSHDOWN", "ORDER", "SCALEOUTEXECUTION"), 100 "HASH": ("GROUP", "JOIN", "UNION"), 101 "KEEP": ("PLAN",), 102 "KEEPFIXED": ("PLAN",), 103 "LOOP": ("JOIN",), 104 "MERGE": ("JOIN", "UNION"), 105 "OPTIMIZE": (("FOR", "UNKNOWN"),), 106 "ORDER": ("GROUP",), 107 "PARAMETERIZATION": ("FORCED", "SIMPLE"), 108 "ROBUST": ("PLAN",), 109 "USE": ("PLAN",), 110} 111 112 113XML_OPTIONS: parser.OPTIONS_TYPE = { 114 **dict.fromkeys( 115 ( 116 "AUTO", 117 "EXPLICIT", 118 "TYPE", 119 ), 120 tuple(), 121 ), 122 "ELEMENTS": ( 123 "XSINIL", 124 "ABSENT", 125 ), 126 "BINARY": ("BASE64",), 127} 128 129 130OPTIONS_THAT_REQUIRE_EQUAL = ("MAX_GRANT_PERCENT", "MIN_GRANT_PERCENT", "LABEL") 131 132 133def _build_formatted_time( 134 exp_class: t.Type[E], full_format_mapping: t.Optional[bool] = None 135) -> t.Callable[[t.List], E]: 136 def _builder(args: t.List) -> E: 137 fmt = seq_get(args, 0) 138 if isinstance(fmt, exp.Expression): 139 fmt = exp.Literal.string( 140 format_time( 141 fmt.name.lower(), 142 ( 143 {**TSQL.TIME_MAPPING, **FULL_FORMAT_TIME_MAPPING} 144 if full_format_mapping 145 else TSQL.TIME_MAPPING 146 ), 147 ) 148 ) 149 150 this = seq_get(args, 1) 151 if isinstance(this, exp.Expression): 152 this = exp.cast(this, exp.DataType.Type.DATETIME2) 153 154 return exp_class(this=this, format=fmt) 155 156 return _builder 157 158 159def _build_format(args: t.List) -> exp.NumberToStr | exp.TimeToStr: 160 this = seq_get(args, 0) 161 fmt = seq_get(args, 1) 162 culture = seq_get(args, 2) 163 164 number_fmt = fmt and (fmt.name in TRANSPILE_SAFE_NUMBER_FMT or not DATE_FMT_RE.search(fmt.name)) 165 166 if number_fmt: 167 return exp.NumberToStr(this=this, format=fmt, culture=culture) 168 169 if fmt: 170 fmt = exp.Literal.string( 171 format_time(fmt.name, TSQL.FORMAT_TIME_MAPPING) 172 if len(fmt.name) == 1 173 else format_time(fmt.name, TSQL.TIME_MAPPING) 174 ) 175 176 return exp.TimeToStr(this=this, format=fmt, culture=culture) 177 178 179def _build_eomonth(args: t.List) -> exp.LastDay: 180 date = exp.TsOrDsToDate(this=seq_get(args, 0)) 181 month_lag = seq_get(args, 1) 182 183 if month_lag is None: 184 this: exp.Expression = date 185 else: 186 unit = DATE_DELTA_INTERVAL.get("month") 187 this = exp.DateAdd(this=date, expression=month_lag, unit=unit and exp.var(unit)) 188 189 return exp.LastDay(this=this) 190 191 192def _build_hashbytes(args: t.List) -> exp.Expression: 193 kind, data = args 194 kind = kind.name.upper() if kind.is_string else "" 195 196 if kind == "MD5": 197 args.pop(0) 198 return exp.MD5(this=data) 199 if kind in ("SHA", "SHA1"): 200 args.pop(0) 201 return exp.SHA(this=data) 202 if kind == "SHA2_256": 203 return exp.SHA2(this=data, length=exp.Literal.number(256)) 204 if kind == "SHA2_512": 205 return exp.SHA2(this=data, length=exp.Literal.number(512)) 206 207 return exp.func("HASHBYTES", *args) 208 209 210def _format_sql(self: TSQL.Generator, expression: exp.NumberToStr | exp.TimeToStr) -> str: 211 fmt = expression.args["format"] 212 213 if not isinstance(expression, exp.NumberToStr): 214 if fmt.is_string: 215 mapped_fmt = format_time(fmt.name, TSQL.INVERSE_TIME_MAPPING) 216 fmt_sql = self.sql(exp.Literal.string(mapped_fmt)) 217 else: 218 fmt_sql = self.format_time(expression) or self.sql(fmt) 219 else: 220 fmt_sql = self.sql(fmt) 221 222 return self.func("FORMAT", expression.this, fmt_sql, expression.args.get("culture")) 223 224 225def _string_agg_sql(self: TSQL.Generator, expression: exp.GroupConcat) -> str: 226 this = expression.this 227 distinct = expression.find(exp.Distinct) 228 if distinct: 229 # exp.Distinct can appear below an exp.Order or an exp.GroupConcat expression 230 self.unsupported("T-SQL STRING_AGG doesn't support DISTINCT.") 231 this = distinct.pop().expressions[0] 232 233 order = "" 234 if isinstance(expression.this, exp.Order): 235 if expression.this.this: 236 this = expression.this.this.pop() 237 # Order has a leading space 238 order = f" WITHIN GROUP ({self.sql(expression.this)[1:]})" 239 240 separator = expression.args.get("separator") or exp.Literal.string(",") 241 return f"STRING_AGG({self.format_args(this, separator)}){order}" 242 243 244def _build_date_delta( 245 exp_class: t.Type[E], unit_mapping: t.Optional[t.Dict[str, str]] = None, big_int: bool = False 246) -> t.Callable[[t.List], E]: 247 def _builder(args: t.List) -> E: 248 unit = seq_get(args, 0) 249 if unit and unit_mapping: 250 unit = exp.var(unit_mapping.get(unit.name.lower(), unit.name)) 251 252 start_date = seq_get(args, 1) 253 if start_date and start_date.is_number: 254 # Numeric types are valid DATETIME values 255 if start_date.is_int: 256 adds = DEFAULT_START_DATE + datetime.timedelta(days=start_date.to_py()) 257 start_date = exp.Literal.string(adds.strftime("%F")) 258 else: 259 # We currently don't handle float values, i.e. they're not converted to equivalent DATETIMEs. 260 # This is not a problem when generating T-SQL code, it is when transpiling to other dialects. 261 return exp_class( 262 this=seq_get(args, 2), expression=start_date, unit=unit, big_int=big_int 263 ) 264 265 return exp_class( 266 this=exp.TimeStrToTime(this=seq_get(args, 2)), 267 expression=exp.TimeStrToTime(this=start_date), 268 unit=unit, 269 big_int=big_int, 270 ) 271 272 return _builder 273 274 275def qualify_derived_table_outputs(expression: exp.Expression) -> exp.Expression: 276 """Ensures all (unnamed) output columns are aliased for CTEs and Subqueries.""" 277 alias = expression.args.get("alias") 278 279 if ( 280 isinstance(expression, (exp.CTE, exp.Subquery)) 281 and isinstance(alias, exp.TableAlias) 282 and not alias.columns 283 ): 284 from sqlglot.optimizer.qualify_columns import qualify_outputs 285 286 # We keep track of the unaliased column projection indexes instead of the expressions 287 # themselves, because the latter are going to be replaced by new nodes when the aliases 288 # are added and hence we won't be able to reach these newly added Alias parents 289 query = expression.this 290 unaliased_column_indexes = ( 291 i for i, c in enumerate(query.selects) if isinstance(c, exp.Column) and not c.alias 292 ) 293 294 qualify_outputs(query) 295 296 # Preserve the quoting information of columns for newly added Alias nodes 297 query_selects = query.selects 298 for select_index in unaliased_column_indexes: 299 alias = query_selects[select_index] 300 column = alias.this 301 if isinstance(column.this, exp.Identifier): 302 alias.args["alias"].set("quoted", column.this.quoted) 303 304 return expression 305 306 307# https://learn.microsoft.com/en-us/sql/t-sql/functions/datetimefromparts-transact-sql?view=sql-server-ver16#syntax 308def _build_datetimefromparts(args: t.List) -> exp.TimestampFromParts: 309 return exp.TimestampFromParts( 310 year=seq_get(args, 0), 311 month=seq_get(args, 1), 312 day=seq_get(args, 2), 313 hour=seq_get(args, 3), 314 min=seq_get(args, 4), 315 sec=seq_get(args, 5), 316 milli=seq_get(args, 6), 317 ) 318 319 320# https://learn.microsoft.com/en-us/sql/t-sql/functions/timefromparts-transact-sql?view=sql-server-ver16#syntax 321def _build_timefromparts(args: t.List) -> exp.TimeFromParts: 322 return exp.TimeFromParts( 323 hour=seq_get(args, 0), 324 min=seq_get(args, 1), 325 sec=seq_get(args, 2), 326 fractions=seq_get(args, 3), 327 precision=seq_get(args, 4), 328 ) 329 330 331def _build_with_arg_as_text( 332 klass: t.Type[exp.Expression], 333) -> t.Callable[[t.List[exp.Expression]], exp.Expression]: 334 def _parse(args: t.List[exp.Expression]) -> exp.Expression: 335 this = seq_get(args, 0) 336 337 if this and not this.is_string: 338 this = exp.cast(this, exp.DataType.Type.TEXT) 339 340 expression = seq_get(args, 1) 341 kwargs = {"this": this} 342 343 if expression: 344 kwargs["expression"] = expression 345 346 return klass(**kwargs) 347 348 return _parse 349 350 351# https://learn.microsoft.com/en-us/sql/t-sql/functions/parsename-transact-sql?view=sql-server-ver16 352def _build_parsename(args: t.List) -> exp.SplitPart | exp.Anonymous: 353 # PARSENAME(...) will be stored into exp.SplitPart if: 354 # - All args are literals 355 # - The part index (2nd arg) is <= 4 (max valid value, otherwise TSQL returns NULL) 356 if len(args) == 2 and all(isinstance(arg, exp.Literal) for arg in args): 357 this = args[0] 358 part_index = args[1] 359 split_count = len(this.name.split(".")) 360 if split_count <= 4: 361 return exp.SplitPart( 362 this=this, 363 delimiter=exp.Literal.string("."), 364 part_index=exp.Literal.number(split_count + 1 - part_index.to_py()), 365 ) 366 367 return exp.Anonymous(this="PARSENAME", expressions=args) 368 369 370def _build_json_query(args: t.List, dialect: Dialect) -> exp.JSONExtract: 371 if len(args) == 1: 372 # The default value for path is '$'. As a result, if you don't provide a 373 # value for path, JSON_QUERY returns the input expression. 374 args.append(exp.Literal.string("$")) 375 376 return parser.build_extract_json_with_path(exp.JSONExtract)(args, dialect) 377 378 379def _json_extract_sql( 380 self: TSQL.Generator, expression: exp.JSONExtract | exp.JSONExtractScalar 381) -> str: 382 json_query = self.func("JSON_QUERY", expression.this, expression.expression) 383 json_value = self.func("JSON_VALUE", expression.this, expression.expression) 384 return self.func("ISNULL", json_query, json_value) 385 386 387def _timestrtotime_sql(self: TSQL.Generator, expression: exp.TimeStrToTime): 388 sql = timestrtotime_sql(self, expression) 389 if expression.args.get("zone"): 390 # If there is a timezone, produce an expression like: 391 # CAST('2020-01-01 12:13:14-08:00' AS DATETIMEOFFSET) AT TIME ZONE 'UTC' 392 # If you dont have AT TIME ZONE 'UTC', wrapping that expression in another cast back to DATETIME2 just drops the timezone information 393 return self.sql(exp.AtTimeZone(this=sql, zone=exp.Literal.string("UTC"))) 394 return sql 395 396 397def _build_datetrunc(args: t.List) -> exp.TimestampTrunc: 398 unit = seq_get(args, 0) 399 this = seq_get(args, 1) 400 401 if this and this.is_string: 402 this = exp.cast(this, exp.DataType.Type.DATETIME2) 403 404 return exp.TimestampTrunc(this=this, unit=unit) 405 406 407class TSQL(Dialect): 408 SUPPORTS_SEMI_ANTI_JOIN = False 409 LOG_BASE_FIRST = False 410 TYPED_DIVISION = True 411 CONCAT_COALESCE = True 412 NORMALIZATION_STRATEGY = NormalizationStrategy.CASE_INSENSITIVE 413 ALTER_TABLE_ADD_REQUIRED_FOR_EACH_COLUMN = False 414 415 TIME_FORMAT = "'yyyy-mm-dd hh:mm:ss'" 416 417 EXPRESSION_METADATA = EXPRESSION_METADATA.copy() 418 419 DATE_PART_MAPPING = { 420 **Dialect.DATE_PART_MAPPING, 421 "QQ": "QUARTER", 422 "M": "MONTH", 423 "Y": "DAYOFYEAR", 424 "WW": "WEEK", 425 "N": "MINUTE", 426 "SS": "SECOND", 427 "MCS": "MICROSECOND", 428 "TZOFFSET": "TIMEZONE_MINUTE", 429 "TZ": "TIMEZONE_MINUTE", 430 "ISO_WEEK": "WEEKISO", 431 "ISOWK": "WEEKISO", 432 "ISOWW": "WEEKISO", 433 } 434 435 TIME_MAPPING = { 436 "year": "%Y", 437 "dayofyear": "%j", 438 "day": "%d", 439 "dy": "%d", 440 "y": "%Y", 441 "week": "%W", 442 "ww": "%W", 443 "wk": "%W", 444 "isowk": "%V", 445 "isoww": "%V", 446 "iso_week": "%V", 447 "hour": "%h", 448 "hh": "%I", 449 "minute": "%M", 450 "mi": "%M", 451 "n": "%M", 452 "second": "%S", 453 "ss": "%S", 454 "s": "%-S", 455 "millisecond": "%f", 456 "ms": "%f", 457 "weekday": "%w", 458 "dw": "%w", 459 "month": "%m", 460 "mm": "%M", 461 "m": "%-M", 462 "Y": "%Y", 463 "YYYY": "%Y", 464 "YY": "%y", 465 "MMMM": "%B", 466 "MMM": "%b", 467 "MM": "%m", 468 "M": "%-m", 469 "dddd": "%A", 470 "dd": "%d", 471 "d": "%-d", 472 "HH": "%H", 473 "H": "%-H", 474 "h": "%-I", 475 "ffffff": "%f", 476 "yyyy": "%Y", 477 "yy": "%y", 478 } 479 480 CONVERT_FORMAT_MAPPING = { 481 "0": "%b %d %Y %-I:%M%p", 482 "1": "%m/%d/%y", 483 "2": "%y.%m.%d", 484 "3": "%d/%m/%y", 485 "4": "%d.%m.%y", 486 "5": "%d-%m-%y", 487 "6": "%d %b %y", 488 "7": "%b %d, %y", 489 "8": "%H:%M:%S", 490 "9": "%b %d %Y %-I:%M:%S:%f%p", 491 "10": "mm-dd-yy", 492 "11": "yy/mm/dd", 493 "12": "yymmdd", 494 "13": "%d %b %Y %H:%M:ss:%f", 495 "14": "%H:%M:%S:%f", 496 "20": "%Y-%m-%d %H:%M:%S", 497 "21": "%Y-%m-%d %H:%M:%S.%f", 498 "22": "%m/%d/%y %-I:%M:%S %p", 499 "23": "%Y-%m-%d", 500 "24": "%H:%M:%S", 501 "25": "%Y-%m-%d %H:%M:%S.%f", 502 "100": "%b %d %Y %-I:%M%p", 503 "101": "%m/%d/%Y", 504 "102": "%Y.%m.%d", 505 "103": "%d/%m/%Y", 506 "104": "%d.%m.%Y", 507 "105": "%d-%m-%Y", 508 "106": "%d %b %Y", 509 "107": "%b %d, %Y", 510 "108": "%H:%M:%S", 511 "109": "%b %d %Y %-I:%M:%S:%f%p", 512 "110": "%m-%d-%Y", 513 "111": "%Y/%m/%d", 514 "112": "%Y%m%d", 515 "113": "%d %b %Y %H:%M:%S:%f", 516 "114": "%H:%M:%S:%f", 517 "120": "%Y-%m-%d %H:%M:%S", 518 "121": "%Y-%m-%d %H:%M:%S.%f", 519 "126": "%Y-%m-%dT%H:%M:%S.%f", 520 } 521 522 FORMAT_TIME_MAPPING = { 523 "y": "%B %Y", 524 "d": "%m/%d/%Y", 525 "H": "%-H", 526 "h": "%-I", 527 "s": "%Y-%m-%d %H:%M:%S", 528 "D": "%A,%B,%Y", 529 "f": "%A,%B,%Y %-I:%M %p", 530 "F": "%A,%B,%Y %-I:%M:%S %p", 531 "g": "%m/%d/%Y %-I:%M %p", 532 "G": "%m/%d/%Y %-I:%M:%S %p", 533 "M": "%B %-d", 534 "m": "%B %-d", 535 "O": "%Y-%m-%dT%H:%M:%S", 536 "u": "%Y-%M-%D %H:%M:%S%z", 537 "U": "%A, %B %D, %Y %H:%M:%S%z", 538 "T": "%-I:%M:%S %p", 539 "t": "%-I:%M", 540 "Y": "%a %Y", 541 } 542 543 class Tokenizer(tokens.Tokenizer): 544 IDENTIFIERS = [("[", "]"), '"'] 545 QUOTES = ["'", '"'] 546 HEX_STRINGS = [("0x", ""), ("0X", "")] 547 VAR_SINGLE_TOKENS = {"@", "$", "#"} 548 549 KEYWORDS = { 550 **tokens.Tokenizer.KEYWORDS, 551 "CLUSTERED INDEX": TokenType.INDEX, 552 "DATETIME2": TokenType.DATETIME2, 553 "DATETIMEOFFSET": TokenType.TIMESTAMPTZ, 554 "DECLARE": TokenType.DECLARE, 555 "EXEC": TokenType.EXECUTE, 556 "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT, 557 "GO": TokenType.COMMAND, 558 "IMAGE": TokenType.IMAGE, 559 "MONEY": TokenType.MONEY, 560 "NONCLUSTERED INDEX": TokenType.INDEX, 561 "NTEXT": TokenType.TEXT, 562 "OPTION": TokenType.OPTION, 563 "OUTPUT": TokenType.RETURNING, 564 "PRINT": TokenType.COMMAND, 565 "PROC": TokenType.PROCEDURE, 566 "REAL": TokenType.FLOAT, 567 "ROWVERSION": TokenType.ROWVERSION, 568 "SMALLDATETIME": TokenType.SMALLDATETIME, 569 "SMALLMONEY": TokenType.SMALLMONEY, 570 "SQL_VARIANT": TokenType.VARIANT, 571 "SYSTEM_USER": TokenType.CURRENT_USER, 572 "TOP": TokenType.TOP, 573 "TIMESTAMP": TokenType.ROWVERSION, 574 "TINYINT": TokenType.UTINYINT, 575 "UNIQUEIDENTIFIER": TokenType.UUID, 576 "UPDATE STATISTICS": TokenType.COMMAND, 577 "XML": TokenType.XML, 578 } 579 KEYWORDS.pop("/*+") 580 581 COMMANDS = {*tokens.Tokenizer.COMMANDS, TokenType.END} - {TokenType.EXECUTE} 582 583 class Parser(parser.Parser): 584 SET_REQUIRES_ASSIGNMENT_DELIMITER = False 585 LOG_DEFAULTS_TO_LN = True 586 STRING_ALIASES = True 587 NO_PAREN_IF_COMMANDS = False 588 589 QUERY_MODIFIER_PARSERS = { 590 **parser.Parser.QUERY_MODIFIER_PARSERS, 591 TokenType.OPTION: lambda self: ("options", self._parse_options()), 592 TokenType.FOR: lambda self: ("for_", self._parse_for()), 593 } 594 595 # T-SQL does not allow BEGIN to be used as an identifier 596 ID_VAR_TOKENS = parser.Parser.ID_VAR_TOKENS - {TokenType.BEGIN} 597 ALIAS_TOKENS = parser.Parser.ALIAS_TOKENS - {TokenType.BEGIN} 598 TABLE_ALIAS_TOKENS = parser.Parser.TABLE_ALIAS_TOKENS - {TokenType.BEGIN} 599 COMMENT_TABLE_ALIAS_TOKENS = parser.Parser.COMMENT_TABLE_ALIAS_TOKENS - {TokenType.BEGIN} 600 UPDATE_ALIAS_TOKENS = parser.Parser.UPDATE_ALIAS_TOKENS - {TokenType.BEGIN} 601 602 FUNCTIONS = { 603 **parser.Parser.FUNCTIONS, 604 "ATN2": exp.Atan2.from_arg_list, 605 "CHARINDEX": lambda args: exp.StrPosition( 606 this=seq_get(args, 1), 607 substr=seq_get(args, 0), 608 position=seq_get(args, 2), 609 ), 610 "COUNT": lambda args: exp.Count( 611 this=seq_get(args, 0), expressions=args[1:], big_int=False 612 ), 613 "COUNT_BIG": lambda args: exp.Count( 614 this=seq_get(args, 0), expressions=args[1:], big_int=True 615 ), 616 "DATEADD": build_date_delta(exp.DateAdd, unit_mapping=DATE_DELTA_INTERVAL), 617 "DATEDIFF": _build_date_delta(exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL), 618 "DATEDIFF_BIG": _build_date_delta( 619 exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL, big_int=True 620 ), 621 "DATENAME": _build_formatted_time(exp.TimeToStr, full_format_mapping=True), 622 "DATETIMEFROMPARTS": _build_datetimefromparts, 623 "EOMONTH": _build_eomonth, 624 "FORMAT": _build_format, 625 "GETDATE": exp.CurrentTimestamp.from_arg_list, 626 "HASHBYTES": _build_hashbytes, 627 "ISNULL": lambda args: build_coalesce(args=args, is_null=True), 628 "JSON_QUERY": _build_json_query, 629 "JSON_VALUE": parser.build_extract_json_with_path(exp.JSONExtractScalar), 630 "LEN": _build_with_arg_as_text(exp.Length), 631 "LEFT": _build_with_arg_as_text(exp.Left), 632 "NEWID": exp.Uuid.from_arg_list, 633 "RIGHT": _build_with_arg_as_text(exp.Right), 634 "PARSENAME": _build_parsename, 635 "REPLICATE": exp.Repeat.from_arg_list, 636 "SCHEMA_NAME": exp.CurrentSchema.from_arg_list, 637 "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)), 638 "SYSDATETIME": exp.CurrentTimestamp.from_arg_list, 639 "SUSER_NAME": exp.CurrentUser.from_arg_list, 640 "SUSER_SNAME": exp.CurrentUser.from_arg_list, 641 "SYSDATETIMEOFFSET": exp.CurrentTimestampLTZ.from_arg_list, 642 "SYSTEM_USER": exp.CurrentUser.from_arg_list, 643 "TIMEFROMPARTS": _build_timefromparts, 644 "DATETRUNC": _build_datetrunc, 645 } 646 647 JOIN_HINTS = {"LOOP", "HASH", "MERGE", "REMOTE"} 648 649 PROCEDURE_OPTIONS = dict.fromkeys( 650 ("ENCRYPTION", "RECOMPILE", "SCHEMABINDING", "NATIVE_COMPILATION", "EXECUTE"), tuple() 651 ) 652 653 COLUMN_DEFINITION_MODES = {"OUT", "OUTPUT", "READONLY"} 654 655 RETURNS_TABLE_TOKENS = parser.Parser.ID_VAR_TOKENS - { 656 TokenType.TABLE, 657 *parser.Parser.TYPE_TOKENS, 658 } 659 660 STATEMENT_PARSERS = { 661 **parser.Parser.STATEMENT_PARSERS, 662 TokenType.DECLARE: lambda self: self._parse_declare(), 663 TokenType.EXECUTE: lambda self: self._parse_execute(), 664 } 665 666 RANGE_PARSERS = { 667 **parser.Parser.RANGE_PARSERS, 668 TokenType.DCOLON: lambda self, this: self.expression( 669 exp.ScopeResolution, 670 this=this, 671 expression=self._parse_function() or self._parse_var(any_token=True), 672 ), 673 } 674 675 NO_PAREN_FUNCTION_PARSERS = { 676 **parser.Parser.NO_PAREN_FUNCTION_PARSERS, 677 "NEXT": lambda self: self._parse_next_value_for(), 678 } 679 680 FUNCTION_PARSERS: t.Dict[str, t.Callable] = { 681 **parser.Parser.FUNCTION_PARSERS, 682 "JSON_ARRAYAGG": lambda self: self.expression( 683 exp.JSONArrayAgg, 684 this=self._parse_bitwise(), 685 order=self._parse_order(), 686 null_handling=self._parse_on_handling("NULL", "NULL", "ABSENT"), 687 ), 688 "DATEPART": lambda self: self._parse_datepart(), 689 } 690 691 # The DCOLON (::) operator serves as a scope resolution (exp.ScopeResolution) operator in T-SQL 692 COLUMN_OPERATORS = { 693 **parser.Parser.COLUMN_OPERATORS, 694 TokenType.DCOLON: lambda self, this, to: self.expression(exp.Cast, this=this, to=to) 695 if isinstance(to, exp.DataType) and to.this != exp.DataType.Type.USERDEFINED 696 else self.expression(exp.ScopeResolution, this=this, expression=to), 697 } 698 699 SET_OP_MODIFIERS = {"offset"} 700 701 ODBC_DATETIME_LITERALS = { 702 "d": exp.Date, 703 "t": exp.Time, 704 "ts": exp.Timestamp, 705 } 706 707 def _parse_execute(self) -> exp.Execute: 708 execute = self.expression( 709 exp.Execute, 710 this=self._parse_table(schema=True), 711 expressions=self._parse_csv(self._parse_expression), 712 ) 713 714 if execute.name.lower() == "sp_executesql": 715 execute = self.expression(exp.ExecuteSql, **execute.args) 716 717 return execute 718 719 def _parse_datepart(self) -> exp.Extract: 720 this = self._parse_var(tokens=[TokenType.IDENTIFIER]) 721 expression = self._match(TokenType.COMMA) and self._parse_bitwise() 722 name = map_date_part(this, self.dialect) 723 724 return self.expression(exp.Extract, this=name, expression=expression) 725 726 def _parse_alter_table_set(self) -> exp.AlterSet: 727 return self._parse_wrapped(super()._parse_alter_table_set) 728 729 def _parse_wrapped_select(self, table: bool = False) -> t.Optional[exp.Expression]: 730 if self._match(TokenType.MERGE): 731 comments = self._prev_comments 732 merge = self._parse_merge() 733 merge.add_comments(comments, prepend=True) 734 return merge 735 736 return super()._parse_wrapped_select(table=table) 737 738 def _parse_dcolon(self) -> t.Optional[exp.Expression]: 739 # We want to use _parse_types() if the first token after :: is a known type, 740 # otherwise we could parse something like x::varchar(max) into a function 741 if self._match_set(self.TYPE_TOKENS, advance=False): 742 return self._parse_types() 743 744 return self._parse_function() or self._parse_types() 745 746 def _parse_options(self) -> t.Optional[t.List[exp.Expression]]: 747 if not self._match(TokenType.OPTION): 748 return None 749 750 def _parse_option() -> t.Optional[exp.Expression]: 751 option = self._parse_var_from_options(OPTIONS) 752 if not option: 753 return None 754 755 self._match(TokenType.EQ) 756 return self.expression( 757 exp.QueryOption, this=option, expression=self._parse_primary_or_var() 758 ) 759 760 return self._parse_wrapped_csv(_parse_option) 761 762 def _parse_xml_key_value_option(self) -> exp.XMLKeyValueOption: 763 this = self._parse_primary_or_var() 764 if self._match(TokenType.L_PAREN, advance=False): 765 expression = self._parse_wrapped(self._parse_string) 766 else: 767 expression = None 768 769 return exp.XMLKeyValueOption(this=this, expression=expression) 770 771 def _parse_for(self) -> t.Optional[t.List[exp.Expression]]: 772 if not self._match_pair(TokenType.FOR, TokenType.XML): 773 return None 774 775 def _parse_for_xml() -> t.Optional[exp.Expression]: 776 return self.expression( 777 exp.QueryOption, 778 this=self._parse_var_from_options(XML_OPTIONS, raise_unmatched=False) 779 or self._parse_xml_key_value_option(), 780 ) 781 782 return self._parse_csv(_parse_for_xml) 783 784 def _parse_projections( 785 self, 786 ) -> t.Tuple[t.List[exp.Expression], t.Optional[t.List[exp.Expression]]]: 787 """ 788 T-SQL supports the syntax alias = expression in the SELECT's projection list, 789 so we transform all parsed Selects to convert their EQ projections into Aliases. 790 791 See: https://learn.microsoft.com/en-us/sql/t-sql/queries/select-clause-transact-sql?view=sql-server-ver16#syntax 792 """ 793 projections, _ = super()._parse_projections() 794 return [ 795 ( 796 exp.alias_(projection.expression, projection.this.this, copy=False) 797 if isinstance(projection, exp.EQ) and isinstance(projection.this, exp.Column) 798 else projection 799 ) 800 for projection in projections 801 ], None 802 803 def _parse_commit_or_rollback(self) -> exp.Commit | exp.Rollback: 804 """Applies to SQL Server and Azure SQL Database 805 COMMIT [ { TRAN | TRANSACTION } 806 [ transaction_name | @tran_name_variable ] ] 807 [ WITH ( DELAYED_DURABILITY = { OFF | ON } ) ] 808 809 ROLLBACK { TRAN | TRANSACTION } 810 [ transaction_name | @tran_name_variable 811 | savepoint_name | @savepoint_variable ] 812 """ 813 rollback = self._prev.token_type == TokenType.ROLLBACK 814 815 self._match_texts(("TRAN", "TRANSACTION")) 816 this = self._parse_id_var() 817 818 if rollback: 819 return self.expression(exp.Rollback, this=this) 820 821 durability = None 822 if self._match_pair(TokenType.WITH, TokenType.L_PAREN): 823 self._match_text_seq("DELAYED_DURABILITY") 824 self._match(TokenType.EQ) 825 826 if self._match_text_seq("OFF"): 827 durability = False 828 else: 829 self._match(TokenType.ON) 830 durability = True 831 832 self._match_r_paren() 833 834 return self.expression(exp.Commit, this=this, durability=durability) 835 836 def _parse_transaction(self) -> exp.Transaction | exp.Command: 837 """Applies to SQL Server and Azure SQL Database 838 BEGIN { TRAN | TRANSACTION } 839 [ { transaction_name | @tran_name_variable } 840 [ WITH MARK [ 'description' ] ] 841 ] 842 """ 843 if self._match_texts(("TRAN", "TRANSACTION")): 844 transaction = self.expression(exp.Transaction, this=self._parse_id_var()) 845 if self._match_text_seq("WITH", "MARK"): 846 transaction.set("mark", self._parse_string()) 847 848 return transaction 849 850 return self._parse_as_command(self._prev) 851 852 def _parse_returns(self) -> exp.ReturnsProperty: 853 table = self._parse_id_var(any_token=False, tokens=self.RETURNS_TABLE_TOKENS) 854 returns = super()._parse_returns() 855 returns.set("table", table) 856 return returns 857 858 def _parse_convert( 859 self, strict: bool, safe: t.Optional[bool] = None 860 ) -> t.Optional[exp.Expression]: 861 this = self._parse_types() 862 self._match(TokenType.COMMA) 863 args = [this, *self._parse_csv(self._parse_assignment)] 864 convert = exp.Convert.from_arg_list(args) 865 convert.set("safe", safe) 866 return convert 867 868 def _parse_column_def( 869 self, this: t.Optional[exp.Expression], computed_column: bool = True 870 ) -> t.Optional[exp.Expression]: 871 this = super()._parse_column_def(this=this, computed_column=computed_column) 872 if not this: 873 return None 874 if self._match(TokenType.EQ): 875 this.set("default", self._parse_disjunction()) 876 if self._match_texts(self.COLUMN_DEFINITION_MODES): 877 this.set("output", self._prev.text) 878 return this 879 880 def _parse_user_defined_function( 881 self, kind: t.Optional[TokenType] = None 882 ) -> t.Optional[exp.Expression]: 883 this = super()._parse_user_defined_function(kind=kind) 884 885 if kind == TokenType.FUNCTION or isinstance(this, exp.UserDefinedFunction): 886 return this 887 888 if kind == TokenType.PROCEDURE and this: 889 expressions = this.expressions 890 if not ( 891 expressions or self._match_set((TokenType.ALIAS, TokenType.WITH), advance=False) 892 ): 893 expressions = self._parse_csv(self._parse_function_parameter) 894 895 return self.expression( 896 exp.StoredProcedure, 897 this=this if isinstance(this, exp.Table) else this.this, 898 expressions=expressions, 899 wrapped=this.args.get("wrapped"), 900 ) 901 902 return self.expression(exp.UserDefinedFunction, this=this) 903 904 def _parse_into(self) -> t.Optional[exp.Into]: 905 into = super()._parse_into() 906 907 table = isinstance(into, exp.Into) and into.find(exp.Table) 908 if isinstance(table, exp.Table): 909 table_identifier = table.this 910 if table_identifier.args.get("temporary"): 911 # Promote the temporary property from the Identifier to the Into expression 912 t.cast(exp.Into, into).set("temporary", True) 913 914 return into 915 916 def _parse_id_var( 917 self, 918 any_token: bool = True, 919 tokens: t.Optional[t.Collection[TokenType]] = None, 920 ) -> t.Optional[exp.Expression]: 921 is_temporary = self._match(TokenType.HASH) 922 is_global = is_temporary and self._match(TokenType.HASH) 923 924 this = super()._parse_id_var(any_token=any_token, tokens=tokens) 925 if this: 926 if is_global: 927 this.set("global_", True) 928 elif is_temporary: 929 this.set("temporary", True) 930 931 return this 932 933 def _parse_create(self) -> exp.Create | exp.Command: 934 create = super()._parse_create() 935 936 if isinstance(create, exp.Create): 937 table = create.this.this if isinstance(create.this, exp.Schema) else create.this 938 if isinstance(table, exp.Table) and table.this and table.this.args.get("temporary"): 939 if not create.args.get("properties"): 940 create.set("properties", exp.Properties(expressions=[])) 941 942 create.args["properties"].append("expressions", exp.TemporaryProperty()) 943 944 return create 945 946 def _parse_if(self) -> exp.IfBlock: 947 this = self._parse_condition() 948 true = self._parse_block() 949 950 false = self._match(TokenType.ELSE) and self._parse_block() 951 952 return self.expression(exp.IfBlock, this=this, true=true, false=false) 953 954 def _parse_unique(self) -> exp.UniqueColumnConstraint: 955 if self._match_texts(("CLUSTERED", "NONCLUSTERED")): 956 this = self.CONSTRAINT_PARSERS[self._prev.text.upper()](self) 957 else: 958 this = self._parse_schema(self._parse_id_var(any_token=False)) 959 960 return self.expression(exp.UniqueColumnConstraint, this=this) 961 962 def _parse_update(self) -> exp.Update: 963 expression = super()._parse_update() 964 expression.set("options", self._parse_options()) 965 return expression 966 967 def _parse_partition(self) -> t.Optional[exp.Partition]: 968 if not self._match_text_seq("WITH", "(", "PARTITIONS"): 969 return None 970 971 def parse_range(): 972 low = self._parse_bitwise() 973 high = self._parse_bitwise() if self._match_text_seq("TO") else None 974 975 return ( 976 self.expression(exp.PartitionRange, this=low, expression=high) if high else low 977 ) 978 979 partition = self.expression( 980 exp.Partition, expressions=self._parse_wrapped_csv(parse_range) 981 ) 982 983 self._match_r_paren() 984 985 return partition 986 987 def _parse_alter_table_alter(self) -> t.Optional[exp.Expression]: 988 expression = super()._parse_alter_table_alter() 989 990 if expression is not None: 991 collation = expression.args.get("collate") 992 if isinstance(collation, exp.Column) and isinstance(collation.this, exp.Identifier): 993 identifier = collation.this 994 collation.set("this", exp.Var(this=identifier.name)) 995 996 return expression 997 998 def _parse_primary_key_part(self) -> t.Optional[exp.Expression]: 999 return self._parse_ordered() 1000 1001 class Generator(generator.Generator): 1002 LIMIT_IS_TOP = True 1003 QUERY_HINTS = False 1004 RETURNING_END = False 1005 NVL2_SUPPORTED = False 1006 ALTER_TABLE_INCLUDE_COLUMN_KEYWORD = False 1007 LIMIT_FETCH = "FETCH" 1008 COMPUTED_COLUMN_WITH_TYPE = False 1009 CTE_RECURSIVE_KEYWORD_REQUIRED = False 1010 ENSURE_BOOLS = True 1011 NULL_ORDERING_SUPPORTED = None 1012 SUPPORTS_SINGLE_ARG_CONCAT = False 1013 TABLESAMPLE_SEED_KEYWORD = "REPEATABLE" 1014 SUPPORTS_SELECT_INTO = True 1015 JSON_PATH_BRACKETED_KEY_SUPPORTED = False 1016 SUPPORTS_TO_NUMBER = False 1017 SET_OP_MODIFIERS = False 1018 COPY_PARAMS_EQ_REQUIRED = True 1019 PARSE_JSON_NAME = None 1020 EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE = False 1021 ALTER_SET_WRAPPED = True 1022 ALTER_SET_TYPE = "" 1023 1024 EXPRESSIONS_WITHOUT_NESTED_CTES = { 1025 exp.Create, 1026 exp.Delete, 1027 exp.Insert, 1028 exp.Intersect, 1029 exp.Except, 1030 exp.Merge, 1031 exp.Select, 1032 exp.Subquery, 1033 exp.Union, 1034 exp.Update, 1035 } 1036 1037 SUPPORTED_JSON_PATH_PARTS = { 1038 exp.JSONPathKey, 1039 exp.JSONPathRoot, 1040 exp.JSONPathSubscript, 1041 } 1042 1043 TYPE_MAPPING = { 1044 **generator.Generator.TYPE_MAPPING, 1045 exp.DataType.Type.BOOLEAN: "BIT", 1046 exp.DataType.Type.DATETIME2: "DATETIME2", 1047 exp.DataType.Type.DECIMAL: "NUMERIC", 1048 exp.DataType.Type.DOUBLE: "FLOAT", 1049 exp.DataType.Type.INT: "INTEGER", 1050 exp.DataType.Type.ROWVERSION: "ROWVERSION", 1051 exp.DataType.Type.TEXT: "VARCHAR(MAX)", 1052 exp.DataType.Type.TIMESTAMP: "DATETIME2", 1053 exp.DataType.Type.TIMESTAMPNTZ: "DATETIME2", 1054 exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET", 1055 exp.DataType.Type.SMALLDATETIME: "SMALLDATETIME", 1056 exp.DataType.Type.UTINYINT: "TINYINT", 1057 exp.DataType.Type.VARIANT: "SQL_VARIANT", 1058 exp.DataType.Type.UUID: "UNIQUEIDENTIFIER", 1059 } 1060 1061 TYPE_MAPPING.pop(exp.DataType.Type.NCHAR) 1062 TYPE_MAPPING.pop(exp.DataType.Type.NVARCHAR) 1063 1064 TRANSFORMS = { 1065 **generator.Generator.TRANSFORMS, 1066 exp.AnyValue: any_value_to_max_sql, 1067 exp.Atan2: rename_func("ATN2"), 1068 exp.ArrayToString: rename_func("STRING_AGG"), 1069 exp.AutoIncrementColumnConstraint: lambda *_: "IDENTITY", 1070 exp.Ceil: rename_func("CEILING"), 1071 exp.Chr: rename_func("CHAR"), 1072 exp.DateAdd: date_delta_sql("DATEADD"), 1073 exp.CTE: transforms.preprocess([qualify_derived_table_outputs]), 1074 exp.CurrentDate: rename_func("GETDATE"), 1075 exp.CurrentTimestamp: rename_func("GETDATE"), 1076 exp.CurrentTimestampLTZ: rename_func("SYSDATETIMEOFFSET"), 1077 exp.DateStrToDate: datestrtodate_sql, 1078 exp.GeneratedAsIdentityColumnConstraint: generatedasidentitycolumnconstraint_sql, 1079 exp.GroupConcat: _string_agg_sql, 1080 exp.If: rename_func("IIF"), 1081 exp.JSONExtract: _json_extract_sql, 1082 exp.JSONExtractScalar: _json_extract_sql, 1083 exp.LastDay: lambda self, e: self.func("EOMONTH", e.this), 1084 exp.Ln: rename_func("LOG"), 1085 exp.Max: max_or_greatest, 1086 exp.MD5: lambda self, e: self.func("HASHBYTES", exp.Literal.string("MD5"), e.this), 1087 exp.Min: min_or_least, 1088 exp.NumberToStr: _format_sql, 1089 exp.Repeat: rename_func("REPLICATE"), 1090 exp.CurrentSchema: rename_func("SCHEMA_NAME"), 1091 exp.Select: transforms.preprocess( 1092 [ 1093 transforms.eliminate_distinct_on, 1094 transforms.eliminate_semi_and_anti_joins, 1095 transforms.eliminate_qualify, 1096 transforms.unnest_generate_date_array_using_recursive_cte, 1097 ] 1098 ), 1099 exp.Stddev: rename_func("STDEV"), 1100 exp.StrPosition: lambda self, e: strposition_sql( 1101 self, e, func_name="CHARINDEX", supports_position=True 1102 ), 1103 exp.Subquery: transforms.preprocess([qualify_derived_table_outputs]), 1104 exp.SHA: lambda self, e: self.func("HASHBYTES", exp.Literal.string("SHA1"), e.this), 1105 exp.SHA1Digest: lambda self, e: self.func( 1106 "HASHBYTES", exp.Literal.string("SHA1"), e.this 1107 ), 1108 exp.SHA2: lambda self, e: self.func( 1109 "HASHBYTES", exp.Literal.string(f"SHA2_{e.args.get('length', 256)}"), e.this 1110 ), 1111 exp.TemporaryProperty: lambda self, e: "", 1112 exp.TimeStrToTime: _timestrtotime_sql, 1113 exp.TimeToStr: _format_sql, 1114 exp.Trim: trim_sql, 1115 exp.TsOrDsAdd: date_delta_sql("DATEADD", cast=True), 1116 exp.TsOrDsDiff: date_delta_sql("DATEDIFF"), 1117 exp.TimestampTrunc: lambda self, e: self.func("DATETRUNC", e.unit, e.this), 1118 exp.Trunc: lambda self, e: self.func( 1119 "ROUND", 1120 e.this, 1121 e.args.get("decimals") or exp.Literal.number(0), 1122 exp.Literal.number(1), 1123 ), 1124 exp.Uuid: lambda *_: "NEWID()", 1125 exp.DateFromParts: rename_func("DATEFROMPARTS"), 1126 } 1127 1128 TRANSFORMS.pop(exp.ReturnsProperty) 1129 1130 PROPERTIES_LOCATION = { 1131 **generator.Generator.PROPERTIES_LOCATION, 1132 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 1133 } 1134 1135 def scope_resolution(self, rhs: str, scope_name: str) -> str: 1136 return f"{scope_name}::{rhs}" 1137 1138 def select_sql(self, expression: exp.Select) -> str: 1139 limit = expression.args.get("limit") 1140 offset = expression.args.get("offset") 1141 1142 if isinstance(limit, exp.Fetch) and not offset: 1143 # Dialects like Oracle can FETCH directly from a row set but 1144 # T-SQL requires an ORDER BY + OFFSET clause in order to FETCH 1145 offset = exp.Offset(expression=exp.Literal.number(0)) 1146 expression.set("offset", offset) 1147 1148 if offset: 1149 if not expression.args.get("order"): 1150 # ORDER BY is required in order to use OFFSET in a query, so we use 1151 # a noop order by, since we don't really care about the order. 1152 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 1153 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 1154 1155 if isinstance(limit, exp.Limit): 1156 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 1157 # we replace here because otherwise TOP would be generated in select_sql 1158 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 1159 1160 return super().select_sql(expression) 1161 1162 def convert_sql(self, expression: exp.Convert) -> str: 1163 name = "TRY_CONVERT" if expression.args.get("safe") else "CONVERT" 1164 return self.func( 1165 name, expression.this, expression.expression, expression.args.get("style") 1166 ) 1167 1168 def queryoption_sql(self, expression: exp.QueryOption) -> str: 1169 option = self.sql(expression, "this") 1170 value = self.sql(expression, "expression") 1171 if value: 1172 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 1173 return f"{option} {optional_equal_sign}{value}" 1174 return option 1175 1176 def lateral_op(self, expression: exp.Lateral) -> str: 1177 cross_apply = expression.args.get("cross_apply") 1178 if cross_apply is True: 1179 return "CROSS APPLY" 1180 if cross_apply is False: 1181 return "OUTER APPLY" 1182 1183 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 1184 self.unsupported("LATERAL clause is not supported.") 1185 return "LATERAL" 1186 1187 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 1188 this = expression.this 1189 split_count = len(this.name.split(".")) 1190 delimiter = expression.args.get("delimiter") 1191 part_index = expression.args.get("part_index") 1192 1193 if ( 1194 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 1195 or (delimiter and delimiter.name != ".") 1196 or not part_index 1197 or split_count > 4 1198 ): 1199 self.unsupported( 1200 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 1201 ) 1202 return "" 1203 1204 return self.func( 1205 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 1206 ) 1207 1208 def extract_sql(self, expression: exp.Extract) -> str: 1209 part = expression.this 1210 name = DATE_PART_UNMAPPING.get(part.name.upper()) or part 1211 1212 return self.func("DATEPART", name, expression.expression) 1213 1214 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1215 nano = expression.args.get("nano") 1216 if nano is not None: 1217 nano.pop() 1218 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1219 1220 if expression.args.get("fractions") is None: 1221 expression.set("fractions", exp.Literal.number(0)) 1222 if expression.args.get("precision") is None: 1223 expression.set("precision", exp.Literal.number(0)) 1224 1225 return rename_func("TIMEFROMPARTS")(self, expression) 1226 1227 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1228 zone = expression.args.get("zone") 1229 if zone is not None: 1230 zone.pop() 1231 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1232 1233 nano = expression.args.get("nano") 1234 if nano is not None: 1235 nano.pop() 1236 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1237 1238 if expression.args.get("milli") is None: 1239 expression.set("milli", exp.Literal.number(0)) 1240 1241 return rename_func("DATETIMEFROMPARTS")(self, expression) 1242 1243 def setitem_sql(self, expression: exp.SetItem) -> str: 1244 this = expression.this 1245 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1246 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1247 return f"{self.sql(this.left)} {self.sql(this.right)}" 1248 1249 return super().setitem_sql(expression) 1250 1251 def boolean_sql(self, expression: exp.Boolean) -> str: 1252 if type(expression.parent) in BIT_TYPES or isinstance( 1253 expression.find_ancestor(exp.Values, exp.Select), exp.Values 1254 ): 1255 return "1" if expression.this else "0" 1256 1257 return "(1 = 1)" if expression.this else "(1 = 0)" 1258 1259 def is_sql(self, expression: exp.Is) -> str: 1260 if isinstance(expression.expression, exp.Boolean): 1261 return self.binary(expression, "=") 1262 return self.binary(expression, "IS") 1263 1264 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1265 sql = self.sql(expression, "this") 1266 properties = expression.args.get("properties") 1267 1268 if sql[:1] != "#" and any( 1269 isinstance(prop, exp.TemporaryProperty) 1270 for prop in (properties.expressions if properties else []) 1271 ): 1272 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1273 1274 return sql 1275 1276 def create_sql(self, expression: exp.Create) -> str: 1277 kind = expression.kind 1278 exists = expression.args.get("exists") 1279 expression.set("exists", None) 1280 1281 like_property = expression.find(exp.LikeProperty) 1282 if like_property: 1283 ctas_expression = like_property.this 1284 else: 1285 ctas_expression = expression.expression 1286 1287 if kind == "VIEW": 1288 expression.this.set("catalog", None) 1289 with_ = expression.args.get("with_") 1290 if ctas_expression and with_: 1291 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1292 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1293 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1294 ctas_expression.set("with_", with_.pop()) 1295 1296 table = expression.find(exp.Table) 1297 1298 # Convert CTAS statement to SELECT .. INTO .. 1299 if kind == "TABLE" and ctas_expression: 1300 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1301 ctas_expression = ctas_expression.subquery() 1302 1303 properties = expression.args.get("properties") or exp.Properties() 1304 is_temp = any(isinstance(p, exp.TemporaryProperty) for p in properties.expressions) 1305 1306 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1307 select_into.set("into", exp.Into(this=table, temporary=is_temp)) 1308 1309 if like_property: 1310 select_into.limit(0, copy=False) 1311 1312 sql = self.sql(select_into) 1313 else: 1314 sql = super().create_sql(expression) 1315 1316 if exists: 1317 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1318 sql_with_ctes = self.prepend_ctes(expression, sql) 1319 sql_literal = self.sql(exp.Literal.string(sql_with_ctes)) 1320 if kind == "SCHEMA": 1321 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = {identifier}) EXEC({sql_literal})""" 1322 elif kind == "TABLE": 1323 assert table 1324 where = exp.and_( 1325 exp.column("TABLE_NAME").eq(table.name), 1326 exp.column("TABLE_SCHEMA").eq(table.db) if table.db else None, 1327 exp.column("TABLE_CATALOG").eq(table.catalog) if table.catalog else None, 1328 ) 1329 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE {where}) EXEC({sql_literal})""" 1330 elif kind == "INDEX": 1331 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1332 return f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql_literal})""" 1333 elif expression.args.get("replace"): 1334 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1335 1336 return self.prepend_ctes(expression, sql) 1337 1338 @generator.unsupported_args("unlogged", "expressions") 1339 def into_sql(self, expression: exp.Into) -> str: 1340 if expression.args.get("temporary"): 1341 # If the Into expression has a temporary property, push this down to the Identifier 1342 table = expression.find(exp.Table) 1343 if table and isinstance(table.this, exp.Identifier): 1344 table.this.set("temporary", True) 1345 1346 return f"{self.seg('INTO')} {self.sql(expression, 'this')}" 1347 1348 def count_sql(self, expression: exp.Count) -> str: 1349 func_name = "COUNT_BIG" if expression.args.get("big_int") else "COUNT" 1350 return rename_func(func_name)(self, expression) 1351 1352 def datediff_sql(self, expression: exp.DateDiff) -> str: 1353 func_name = "DATEDIFF_BIG" if expression.args.get("big_int") else "DATEDIFF" 1354 return date_delta_sql(func_name)(self, expression) 1355 1356 def offset_sql(self, expression: exp.Offset) -> str: 1357 return f"{super().offset_sql(expression)} ROWS" 1358 1359 def version_sql(self, expression: exp.Version) -> str: 1360 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1361 this = f"FOR {name}" 1362 expr = expression.expression 1363 kind = expression.text("kind") 1364 if kind in ("FROM", "BETWEEN"): 1365 args = expr.expressions 1366 sep = "TO" if kind == "FROM" else "AND" 1367 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1368 else: 1369 expr_sql = self.sql(expr) 1370 1371 expr_sql = f" {expr_sql}" if expr_sql else "" 1372 return f"{this} {kind}{expr_sql}" 1373 1374 def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str: 1375 table = expression.args.get("table") 1376 table = f"{table} " if table else "" 1377 return f"RETURNS {table}{self.sql(expression, 'this')}" 1378 1379 def returning_sql(self, expression: exp.Returning) -> str: 1380 into = self.sql(expression, "into") 1381 into = self.seg(f"INTO {into}") if into else "" 1382 return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}" 1383 1384 def transaction_sql(self, expression: exp.Transaction) -> str: 1385 this = self.sql(expression, "this") 1386 this = f" {this}" if this else "" 1387 mark = self.sql(expression, "mark") 1388 mark = f" WITH MARK {mark}" if mark else "" 1389 return f"BEGIN TRANSACTION{this}{mark}" 1390 1391 def commit_sql(self, expression: exp.Commit) -> str: 1392 this = self.sql(expression, "this") 1393 this = f" {this}" if this else "" 1394 durability = expression.args.get("durability") 1395 durability = ( 1396 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1397 if durability is not None 1398 else "" 1399 ) 1400 return f"COMMIT TRANSACTION{this}{durability}" 1401 1402 def rollback_sql(self, expression: exp.Rollback) -> str: 1403 this = self.sql(expression, "this") 1404 this = f" {this}" if this else "" 1405 return f"ROLLBACK TRANSACTION{this}" 1406 1407 def identifier_sql(self, expression: exp.Identifier) -> str: 1408 identifier = super().identifier_sql(expression) 1409 1410 if expression.args.get("global_"): 1411 identifier = f"##{identifier}" 1412 elif expression.args.get("temporary"): 1413 identifier = f"#{identifier}" 1414 1415 return identifier 1416 1417 def constraint_sql(self, expression: exp.Constraint) -> str: 1418 this = self.sql(expression, "this") 1419 expressions = self.expressions(expression, flat=True, sep=" ") 1420 return f"CONSTRAINT {this} {expressions}" 1421 1422 def length_sql(self, expression: exp.Length) -> str: 1423 return self._uncast_text(expression, "LEN") 1424 1425 def right_sql(self, expression: exp.Right) -> str: 1426 return self._uncast_text(expression, "RIGHT") 1427 1428 def left_sql(self, expression: exp.Left) -> str: 1429 return self._uncast_text(expression, "LEFT") 1430 1431 def _uncast_text(self, expression: exp.Expression, name: str) -> str: 1432 this = expression.this 1433 if isinstance(this, exp.Cast) and this.is_type(exp.DataType.Type.TEXT): 1434 this_sql = self.sql(this, "this") 1435 else: 1436 this_sql = self.sql(this) 1437 expression_sql = self.sql(expression, "expression") 1438 return self.func(name, this_sql, expression_sql if expression_sql else None) 1439 1440 def partition_sql(self, expression: exp.Partition) -> str: 1441 return f"WITH (PARTITIONS({self.expressions(expression, flat=True)}))" 1442 1443 def alter_sql(self, expression: exp.Alter) -> str: 1444 action = seq_get(expression.args.get("actions") or [], 0) 1445 if isinstance(action, exp.AlterRename): 1446 return f"EXEC sp_rename '{self.sql(expression.this)}', '{action.this.name}'" 1447 return super().alter_sql(expression) 1448 1449 def drop_sql(self, expression: exp.Drop) -> str: 1450 if expression.args["kind"] == "VIEW": 1451 expression.this.set("catalog", None) 1452 return super().drop_sql(expression) 1453 1454 def options_modifier(self, expression: exp.Expression) -> str: 1455 options = self.expressions(expression, key="options") 1456 return f" OPTION{self.wrap(options)}" if options else "" 1457 1458 def dpipe_sql(self, expression: exp.DPipe) -> str: 1459 return self.sql( 1460 reduce(lambda x, y: exp.Add(this=x, expression=y), expression.flatten()) 1461 ) 1462 1463 def isascii_sql(self, expression: exp.IsAscii) -> str: 1464 return f"(PATINDEX(CONVERT(VARCHAR(MAX), 0x255b5e002d7f5d25) COLLATE Latin1_General_BIN, {self.sql(expression.this)}) = 0)" 1465 1466 def columndef_sql(self, expression: exp.ColumnDef, sep: str = " ") -> str: 1467 this = super().columndef_sql(expression, sep) 1468 default = self.sql(expression, "default") 1469 default = f" = {default}" if default else "" 1470 output = self.sql(expression, "output") 1471 output = f" {output}" if output else "" 1472 return f"{this}{default}{output}" 1473 1474 def coalesce_sql(self, expression: exp.Coalesce) -> str: 1475 func_name = "ISNULL" if expression.args.get("is_null") else "COALESCE" 1476 return rename_func(func_name)(self, expression) 1477 1478 def storedprocedure_sql(self, expression: exp.StoredProcedure) -> str: 1479 this = self.sql(expression, "this") 1480 expressions = self.expressions(expression) 1481 expressions = ( 1482 self.wrap(expressions) if expression.args.get("wrapped") else f" {expressions}" 1483 ) 1484 return f"{this}{expressions}" if expressions.strip() != "" else this 1485 1486 def ifblock_sql(self, expression: exp.IfBlock) -> str: 1487 this = self.sql(expression, "this") 1488 true = self.sql(expression, "true") 1489 true = f" {true}" if true else " " 1490 false = self.sql(expression, "false") 1491 false = f"; ELSE BEGIN {false}" if false else "" 1492 return f"IF {this} BEGIN{true}{false}" 1493 1494 def whileblock_sql(self, expression: exp.WhileBlock) -> str: 1495 this = self.sql(expression, "this") 1496 body = self.sql(expression, "body") 1497 body = f" {body}" if body else " " 1498 return f"WHILE {this} BEGIN{body}" 1499 1500 def execute_sql(self, expression: exp.Execute) -> str: 1501 this = self.sql(expression, "this") 1502 expressions = self.expressions(expression) 1503 expressions = f" {expressions}" if expressions else "" 1504 return f"EXECUTE {this}{expressions}" 1505 1506 def executesql_sql(self, expression: exp.ExecuteSql) -> str: 1507 return self.execute_sql(expression)
276def qualify_derived_table_outputs(expression: exp.Expression) -> exp.Expression: 277 """Ensures all (unnamed) output columns are aliased for CTEs and Subqueries.""" 278 alias = expression.args.get("alias") 279 280 if ( 281 isinstance(expression, (exp.CTE, exp.Subquery)) 282 and isinstance(alias, exp.TableAlias) 283 and not alias.columns 284 ): 285 from sqlglot.optimizer.qualify_columns import qualify_outputs 286 287 # We keep track of the unaliased column projection indexes instead of the expressions 288 # themselves, because the latter are going to be replaced by new nodes when the aliases 289 # are added and hence we won't be able to reach these newly added Alias parents 290 query = expression.this 291 unaliased_column_indexes = ( 292 i for i, c in enumerate(query.selects) if isinstance(c, exp.Column) and not c.alias 293 ) 294 295 qualify_outputs(query) 296 297 # Preserve the quoting information of columns for newly added Alias nodes 298 query_selects = query.selects 299 for select_index in unaliased_column_indexes: 300 alias = query_selects[select_index] 301 column = alias.this 302 if isinstance(column.this, exp.Identifier): 303 alias.args["alias"].set("quoted", column.this.quoted) 304 305 return expression
Ensures all (unnamed) output columns are aliased for CTEs and Subqueries.
408class TSQL(Dialect): 409 SUPPORTS_SEMI_ANTI_JOIN = False 410 LOG_BASE_FIRST = False 411 TYPED_DIVISION = True 412 CONCAT_COALESCE = True 413 NORMALIZATION_STRATEGY = NormalizationStrategy.CASE_INSENSITIVE 414 ALTER_TABLE_ADD_REQUIRED_FOR_EACH_COLUMN = False 415 416 TIME_FORMAT = "'yyyy-mm-dd hh:mm:ss'" 417 418 EXPRESSION_METADATA = EXPRESSION_METADATA.copy() 419 420 DATE_PART_MAPPING = { 421 **Dialect.DATE_PART_MAPPING, 422 "QQ": "QUARTER", 423 "M": "MONTH", 424 "Y": "DAYOFYEAR", 425 "WW": "WEEK", 426 "N": "MINUTE", 427 "SS": "SECOND", 428 "MCS": "MICROSECOND", 429 "TZOFFSET": "TIMEZONE_MINUTE", 430 "TZ": "TIMEZONE_MINUTE", 431 "ISO_WEEK": "WEEKISO", 432 "ISOWK": "WEEKISO", 433 "ISOWW": "WEEKISO", 434 } 435 436 TIME_MAPPING = { 437 "year": "%Y", 438 "dayofyear": "%j", 439 "day": "%d", 440 "dy": "%d", 441 "y": "%Y", 442 "week": "%W", 443 "ww": "%W", 444 "wk": "%W", 445 "isowk": "%V", 446 "isoww": "%V", 447 "iso_week": "%V", 448 "hour": "%h", 449 "hh": "%I", 450 "minute": "%M", 451 "mi": "%M", 452 "n": "%M", 453 "second": "%S", 454 "ss": "%S", 455 "s": "%-S", 456 "millisecond": "%f", 457 "ms": "%f", 458 "weekday": "%w", 459 "dw": "%w", 460 "month": "%m", 461 "mm": "%M", 462 "m": "%-M", 463 "Y": "%Y", 464 "YYYY": "%Y", 465 "YY": "%y", 466 "MMMM": "%B", 467 "MMM": "%b", 468 "MM": "%m", 469 "M": "%-m", 470 "dddd": "%A", 471 "dd": "%d", 472 "d": "%-d", 473 "HH": "%H", 474 "H": "%-H", 475 "h": "%-I", 476 "ffffff": "%f", 477 "yyyy": "%Y", 478 "yy": "%y", 479 } 480 481 CONVERT_FORMAT_MAPPING = { 482 "0": "%b %d %Y %-I:%M%p", 483 "1": "%m/%d/%y", 484 "2": "%y.%m.%d", 485 "3": "%d/%m/%y", 486 "4": "%d.%m.%y", 487 "5": "%d-%m-%y", 488 "6": "%d %b %y", 489 "7": "%b %d, %y", 490 "8": "%H:%M:%S", 491 "9": "%b %d %Y %-I:%M:%S:%f%p", 492 "10": "mm-dd-yy", 493 "11": "yy/mm/dd", 494 "12": "yymmdd", 495 "13": "%d %b %Y %H:%M:ss:%f", 496 "14": "%H:%M:%S:%f", 497 "20": "%Y-%m-%d %H:%M:%S", 498 "21": "%Y-%m-%d %H:%M:%S.%f", 499 "22": "%m/%d/%y %-I:%M:%S %p", 500 "23": "%Y-%m-%d", 501 "24": "%H:%M:%S", 502 "25": "%Y-%m-%d %H:%M:%S.%f", 503 "100": "%b %d %Y %-I:%M%p", 504 "101": "%m/%d/%Y", 505 "102": "%Y.%m.%d", 506 "103": "%d/%m/%Y", 507 "104": "%d.%m.%Y", 508 "105": "%d-%m-%Y", 509 "106": "%d %b %Y", 510 "107": "%b %d, %Y", 511 "108": "%H:%M:%S", 512 "109": "%b %d %Y %-I:%M:%S:%f%p", 513 "110": "%m-%d-%Y", 514 "111": "%Y/%m/%d", 515 "112": "%Y%m%d", 516 "113": "%d %b %Y %H:%M:%S:%f", 517 "114": "%H:%M:%S:%f", 518 "120": "%Y-%m-%d %H:%M:%S", 519 "121": "%Y-%m-%d %H:%M:%S.%f", 520 "126": "%Y-%m-%dT%H:%M:%S.%f", 521 } 522 523 FORMAT_TIME_MAPPING = { 524 "y": "%B %Y", 525 "d": "%m/%d/%Y", 526 "H": "%-H", 527 "h": "%-I", 528 "s": "%Y-%m-%d %H:%M:%S", 529 "D": "%A,%B,%Y", 530 "f": "%A,%B,%Y %-I:%M %p", 531 "F": "%A,%B,%Y %-I:%M:%S %p", 532 "g": "%m/%d/%Y %-I:%M %p", 533 "G": "%m/%d/%Y %-I:%M:%S %p", 534 "M": "%B %-d", 535 "m": "%B %-d", 536 "O": "%Y-%m-%dT%H:%M:%S", 537 "u": "%Y-%M-%D %H:%M:%S%z", 538 "U": "%A, %B %D, %Y %H:%M:%S%z", 539 "T": "%-I:%M:%S %p", 540 "t": "%-I:%M", 541 "Y": "%a %Y", 542 } 543 544 class Tokenizer(tokens.Tokenizer): 545 IDENTIFIERS = [("[", "]"), '"'] 546 QUOTES = ["'", '"'] 547 HEX_STRINGS = [("0x", ""), ("0X", "")] 548 VAR_SINGLE_TOKENS = {"@", "$", "#"} 549 550 KEYWORDS = { 551 **tokens.Tokenizer.KEYWORDS, 552 "CLUSTERED INDEX": TokenType.INDEX, 553 "DATETIME2": TokenType.DATETIME2, 554 "DATETIMEOFFSET": TokenType.TIMESTAMPTZ, 555 "DECLARE": TokenType.DECLARE, 556 "EXEC": TokenType.EXECUTE, 557 "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT, 558 "GO": TokenType.COMMAND, 559 "IMAGE": TokenType.IMAGE, 560 "MONEY": TokenType.MONEY, 561 "NONCLUSTERED INDEX": TokenType.INDEX, 562 "NTEXT": TokenType.TEXT, 563 "OPTION": TokenType.OPTION, 564 "OUTPUT": TokenType.RETURNING, 565 "PRINT": TokenType.COMMAND, 566 "PROC": TokenType.PROCEDURE, 567 "REAL": TokenType.FLOAT, 568 "ROWVERSION": TokenType.ROWVERSION, 569 "SMALLDATETIME": TokenType.SMALLDATETIME, 570 "SMALLMONEY": TokenType.SMALLMONEY, 571 "SQL_VARIANT": TokenType.VARIANT, 572 "SYSTEM_USER": TokenType.CURRENT_USER, 573 "TOP": TokenType.TOP, 574 "TIMESTAMP": TokenType.ROWVERSION, 575 "TINYINT": TokenType.UTINYINT, 576 "UNIQUEIDENTIFIER": TokenType.UUID, 577 "UPDATE STATISTICS": TokenType.COMMAND, 578 "XML": TokenType.XML, 579 } 580 KEYWORDS.pop("/*+") 581 582 COMMANDS = {*tokens.Tokenizer.COMMANDS, TokenType.END} - {TokenType.EXECUTE} 583 584 class Parser(parser.Parser): 585 SET_REQUIRES_ASSIGNMENT_DELIMITER = False 586 LOG_DEFAULTS_TO_LN = True 587 STRING_ALIASES = True 588 NO_PAREN_IF_COMMANDS = False 589 590 QUERY_MODIFIER_PARSERS = { 591 **parser.Parser.QUERY_MODIFIER_PARSERS, 592 TokenType.OPTION: lambda self: ("options", self._parse_options()), 593 TokenType.FOR: lambda self: ("for_", self._parse_for()), 594 } 595 596 # T-SQL does not allow BEGIN to be used as an identifier 597 ID_VAR_TOKENS = parser.Parser.ID_VAR_TOKENS - {TokenType.BEGIN} 598 ALIAS_TOKENS = parser.Parser.ALIAS_TOKENS - {TokenType.BEGIN} 599 TABLE_ALIAS_TOKENS = parser.Parser.TABLE_ALIAS_TOKENS - {TokenType.BEGIN} 600 COMMENT_TABLE_ALIAS_TOKENS = parser.Parser.COMMENT_TABLE_ALIAS_TOKENS - {TokenType.BEGIN} 601 UPDATE_ALIAS_TOKENS = parser.Parser.UPDATE_ALIAS_TOKENS - {TokenType.BEGIN} 602 603 FUNCTIONS = { 604 **parser.Parser.FUNCTIONS, 605 "ATN2": exp.Atan2.from_arg_list, 606 "CHARINDEX": lambda args: exp.StrPosition( 607 this=seq_get(args, 1), 608 substr=seq_get(args, 0), 609 position=seq_get(args, 2), 610 ), 611 "COUNT": lambda args: exp.Count( 612 this=seq_get(args, 0), expressions=args[1:], big_int=False 613 ), 614 "COUNT_BIG": lambda args: exp.Count( 615 this=seq_get(args, 0), expressions=args[1:], big_int=True 616 ), 617 "DATEADD": build_date_delta(exp.DateAdd, unit_mapping=DATE_DELTA_INTERVAL), 618 "DATEDIFF": _build_date_delta(exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL), 619 "DATEDIFF_BIG": _build_date_delta( 620 exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL, big_int=True 621 ), 622 "DATENAME": _build_formatted_time(exp.TimeToStr, full_format_mapping=True), 623 "DATETIMEFROMPARTS": _build_datetimefromparts, 624 "EOMONTH": _build_eomonth, 625 "FORMAT": _build_format, 626 "GETDATE": exp.CurrentTimestamp.from_arg_list, 627 "HASHBYTES": _build_hashbytes, 628 "ISNULL": lambda args: build_coalesce(args=args, is_null=True), 629 "JSON_QUERY": _build_json_query, 630 "JSON_VALUE": parser.build_extract_json_with_path(exp.JSONExtractScalar), 631 "LEN": _build_with_arg_as_text(exp.Length), 632 "LEFT": _build_with_arg_as_text(exp.Left), 633 "NEWID": exp.Uuid.from_arg_list, 634 "RIGHT": _build_with_arg_as_text(exp.Right), 635 "PARSENAME": _build_parsename, 636 "REPLICATE": exp.Repeat.from_arg_list, 637 "SCHEMA_NAME": exp.CurrentSchema.from_arg_list, 638 "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)), 639 "SYSDATETIME": exp.CurrentTimestamp.from_arg_list, 640 "SUSER_NAME": exp.CurrentUser.from_arg_list, 641 "SUSER_SNAME": exp.CurrentUser.from_arg_list, 642 "SYSDATETIMEOFFSET": exp.CurrentTimestampLTZ.from_arg_list, 643 "SYSTEM_USER": exp.CurrentUser.from_arg_list, 644 "TIMEFROMPARTS": _build_timefromparts, 645 "DATETRUNC": _build_datetrunc, 646 } 647 648 JOIN_HINTS = {"LOOP", "HASH", "MERGE", "REMOTE"} 649 650 PROCEDURE_OPTIONS = dict.fromkeys( 651 ("ENCRYPTION", "RECOMPILE", "SCHEMABINDING", "NATIVE_COMPILATION", "EXECUTE"), tuple() 652 ) 653 654 COLUMN_DEFINITION_MODES = {"OUT", "OUTPUT", "READONLY"} 655 656 RETURNS_TABLE_TOKENS = parser.Parser.ID_VAR_TOKENS - { 657 TokenType.TABLE, 658 *parser.Parser.TYPE_TOKENS, 659 } 660 661 STATEMENT_PARSERS = { 662 **parser.Parser.STATEMENT_PARSERS, 663 TokenType.DECLARE: lambda self: self._parse_declare(), 664 TokenType.EXECUTE: lambda self: self._parse_execute(), 665 } 666 667 RANGE_PARSERS = { 668 **parser.Parser.RANGE_PARSERS, 669 TokenType.DCOLON: lambda self, this: self.expression( 670 exp.ScopeResolution, 671 this=this, 672 expression=self._parse_function() or self._parse_var(any_token=True), 673 ), 674 } 675 676 NO_PAREN_FUNCTION_PARSERS = { 677 **parser.Parser.NO_PAREN_FUNCTION_PARSERS, 678 "NEXT": lambda self: self._parse_next_value_for(), 679 } 680 681 FUNCTION_PARSERS: t.Dict[str, t.Callable] = { 682 **parser.Parser.FUNCTION_PARSERS, 683 "JSON_ARRAYAGG": lambda self: self.expression( 684 exp.JSONArrayAgg, 685 this=self._parse_bitwise(), 686 order=self._parse_order(), 687 null_handling=self._parse_on_handling("NULL", "NULL", "ABSENT"), 688 ), 689 "DATEPART": lambda self: self._parse_datepart(), 690 } 691 692 # The DCOLON (::) operator serves as a scope resolution (exp.ScopeResolution) operator in T-SQL 693 COLUMN_OPERATORS = { 694 **parser.Parser.COLUMN_OPERATORS, 695 TokenType.DCOLON: lambda self, this, to: self.expression(exp.Cast, this=this, to=to) 696 if isinstance(to, exp.DataType) and to.this != exp.DataType.Type.USERDEFINED 697 else self.expression(exp.ScopeResolution, this=this, expression=to), 698 } 699 700 SET_OP_MODIFIERS = {"offset"} 701 702 ODBC_DATETIME_LITERALS = { 703 "d": exp.Date, 704 "t": exp.Time, 705 "ts": exp.Timestamp, 706 } 707 708 def _parse_execute(self) -> exp.Execute: 709 execute = self.expression( 710 exp.Execute, 711 this=self._parse_table(schema=True), 712 expressions=self._parse_csv(self._parse_expression), 713 ) 714 715 if execute.name.lower() == "sp_executesql": 716 execute = self.expression(exp.ExecuteSql, **execute.args) 717 718 return execute 719 720 def _parse_datepart(self) -> exp.Extract: 721 this = self._parse_var(tokens=[TokenType.IDENTIFIER]) 722 expression = self._match(TokenType.COMMA) and self._parse_bitwise() 723 name = map_date_part(this, self.dialect) 724 725 return self.expression(exp.Extract, this=name, expression=expression) 726 727 def _parse_alter_table_set(self) -> exp.AlterSet: 728 return self._parse_wrapped(super()._parse_alter_table_set) 729 730 def _parse_wrapped_select(self, table: bool = False) -> t.Optional[exp.Expression]: 731 if self._match(TokenType.MERGE): 732 comments = self._prev_comments 733 merge = self._parse_merge() 734 merge.add_comments(comments, prepend=True) 735 return merge 736 737 return super()._parse_wrapped_select(table=table) 738 739 def _parse_dcolon(self) -> t.Optional[exp.Expression]: 740 # We want to use _parse_types() if the first token after :: is a known type, 741 # otherwise we could parse something like x::varchar(max) into a function 742 if self._match_set(self.TYPE_TOKENS, advance=False): 743 return self._parse_types() 744 745 return self._parse_function() or self._parse_types() 746 747 def _parse_options(self) -> t.Optional[t.List[exp.Expression]]: 748 if not self._match(TokenType.OPTION): 749 return None 750 751 def _parse_option() -> t.Optional[exp.Expression]: 752 option = self._parse_var_from_options(OPTIONS) 753 if not option: 754 return None 755 756 self._match(TokenType.EQ) 757 return self.expression( 758 exp.QueryOption, this=option, expression=self._parse_primary_or_var() 759 ) 760 761 return self._parse_wrapped_csv(_parse_option) 762 763 def _parse_xml_key_value_option(self) -> exp.XMLKeyValueOption: 764 this = self._parse_primary_or_var() 765 if self._match(TokenType.L_PAREN, advance=False): 766 expression = self._parse_wrapped(self._parse_string) 767 else: 768 expression = None 769 770 return exp.XMLKeyValueOption(this=this, expression=expression) 771 772 def _parse_for(self) -> t.Optional[t.List[exp.Expression]]: 773 if not self._match_pair(TokenType.FOR, TokenType.XML): 774 return None 775 776 def _parse_for_xml() -> t.Optional[exp.Expression]: 777 return self.expression( 778 exp.QueryOption, 779 this=self._parse_var_from_options(XML_OPTIONS, raise_unmatched=False) 780 or self._parse_xml_key_value_option(), 781 ) 782 783 return self._parse_csv(_parse_for_xml) 784 785 def _parse_projections( 786 self, 787 ) -> t.Tuple[t.List[exp.Expression], t.Optional[t.List[exp.Expression]]]: 788 """ 789 T-SQL supports the syntax alias = expression in the SELECT's projection list, 790 so we transform all parsed Selects to convert their EQ projections into Aliases. 791 792 See: https://learn.microsoft.com/en-us/sql/t-sql/queries/select-clause-transact-sql?view=sql-server-ver16#syntax 793 """ 794 projections, _ = super()._parse_projections() 795 return [ 796 ( 797 exp.alias_(projection.expression, projection.this.this, copy=False) 798 if isinstance(projection, exp.EQ) and isinstance(projection.this, exp.Column) 799 else projection 800 ) 801 for projection in projections 802 ], None 803 804 def _parse_commit_or_rollback(self) -> exp.Commit | exp.Rollback: 805 """Applies to SQL Server and Azure SQL Database 806 COMMIT [ { TRAN | TRANSACTION } 807 [ transaction_name | @tran_name_variable ] ] 808 [ WITH ( DELAYED_DURABILITY = { OFF | ON } ) ] 809 810 ROLLBACK { TRAN | TRANSACTION } 811 [ transaction_name | @tran_name_variable 812 | savepoint_name | @savepoint_variable ] 813 """ 814 rollback = self._prev.token_type == TokenType.ROLLBACK 815 816 self._match_texts(("TRAN", "TRANSACTION")) 817 this = self._parse_id_var() 818 819 if rollback: 820 return self.expression(exp.Rollback, this=this) 821 822 durability = None 823 if self._match_pair(TokenType.WITH, TokenType.L_PAREN): 824 self._match_text_seq("DELAYED_DURABILITY") 825 self._match(TokenType.EQ) 826 827 if self._match_text_seq("OFF"): 828 durability = False 829 else: 830 self._match(TokenType.ON) 831 durability = True 832 833 self._match_r_paren() 834 835 return self.expression(exp.Commit, this=this, durability=durability) 836 837 def _parse_transaction(self) -> exp.Transaction | exp.Command: 838 """Applies to SQL Server and Azure SQL Database 839 BEGIN { TRAN | TRANSACTION } 840 [ { transaction_name | @tran_name_variable } 841 [ WITH MARK [ 'description' ] ] 842 ] 843 """ 844 if self._match_texts(("TRAN", "TRANSACTION")): 845 transaction = self.expression(exp.Transaction, this=self._parse_id_var()) 846 if self._match_text_seq("WITH", "MARK"): 847 transaction.set("mark", self._parse_string()) 848 849 return transaction 850 851 return self._parse_as_command(self._prev) 852 853 def _parse_returns(self) -> exp.ReturnsProperty: 854 table = self._parse_id_var(any_token=False, tokens=self.RETURNS_TABLE_TOKENS) 855 returns = super()._parse_returns() 856 returns.set("table", table) 857 return returns 858 859 def _parse_convert( 860 self, strict: bool, safe: t.Optional[bool] = None 861 ) -> t.Optional[exp.Expression]: 862 this = self._parse_types() 863 self._match(TokenType.COMMA) 864 args = [this, *self._parse_csv(self._parse_assignment)] 865 convert = exp.Convert.from_arg_list(args) 866 convert.set("safe", safe) 867 return convert 868 869 def _parse_column_def( 870 self, this: t.Optional[exp.Expression], computed_column: bool = True 871 ) -> t.Optional[exp.Expression]: 872 this = super()._parse_column_def(this=this, computed_column=computed_column) 873 if not this: 874 return None 875 if self._match(TokenType.EQ): 876 this.set("default", self._parse_disjunction()) 877 if self._match_texts(self.COLUMN_DEFINITION_MODES): 878 this.set("output", self._prev.text) 879 return this 880 881 def _parse_user_defined_function( 882 self, kind: t.Optional[TokenType] = None 883 ) -> t.Optional[exp.Expression]: 884 this = super()._parse_user_defined_function(kind=kind) 885 886 if kind == TokenType.FUNCTION or isinstance(this, exp.UserDefinedFunction): 887 return this 888 889 if kind == TokenType.PROCEDURE and this: 890 expressions = this.expressions 891 if not ( 892 expressions or self._match_set((TokenType.ALIAS, TokenType.WITH), advance=False) 893 ): 894 expressions = self._parse_csv(self._parse_function_parameter) 895 896 return self.expression( 897 exp.StoredProcedure, 898 this=this if isinstance(this, exp.Table) else this.this, 899 expressions=expressions, 900 wrapped=this.args.get("wrapped"), 901 ) 902 903 return self.expression(exp.UserDefinedFunction, this=this) 904 905 def _parse_into(self) -> t.Optional[exp.Into]: 906 into = super()._parse_into() 907 908 table = isinstance(into, exp.Into) and into.find(exp.Table) 909 if isinstance(table, exp.Table): 910 table_identifier = table.this 911 if table_identifier.args.get("temporary"): 912 # Promote the temporary property from the Identifier to the Into expression 913 t.cast(exp.Into, into).set("temporary", True) 914 915 return into 916 917 def _parse_id_var( 918 self, 919 any_token: bool = True, 920 tokens: t.Optional[t.Collection[TokenType]] = None, 921 ) -> t.Optional[exp.Expression]: 922 is_temporary = self._match(TokenType.HASH) 923 is_global = is_temporary and self._match(TokenType.HASH) 924 925 this = super()._parse_id_var(any_token=any_token, tokens=tokens) 926 if this: 927 if is_global: 928 this.set("global_", True) 929 elif is_temporary: 930 this.set("temporary", True) 931 932 return this 933 934 def _parse_create(self) -> exp.Create | exp.Command: 935 create = super()._parse_create() 936 937 if isinstance(create, exp.Create): 938 table = create.this.this if isinstance(create.this, exp.Schema) else create.this 939 if isinstance(table, exp.Table) and table.this and table.this.args.get("temporary"): 940 if not create.args.get("properties"): 941 create.set("properties", exp.Properties(expressions=[])) 942 943 create.args["properties"].append("expressions", exp.TemporaryProperty()) 944 945 return create 946 947 def _parse_if(self) -> exp.IfBlock: 948 this = self._parse_condition() 949 true = self._parse_block() 950 951 false = self._match(TokenType.ELSE) and self._parse_block() 952 953 return self.expression(exp.IfBlock, this=this, true=true, false=false) 954 955 def _parse_unique(self) -> exp.UniqueColumnConstraint: 956 if self._match_texts(("CLUSTERED", "NONCLUSTERED")): 957 this = self.CONSTRAINT_PARSERS[self._prev.text.upper()](self) 958 else: 959 this = self._parse_schema(self._parse_id_var(any_token=False)) 960 961 return self.expression(exp.UniqueColumnConstraint, this=this) 962 963 def _parse_update(self) -> exp.Update: 964 expression = super()._parse_update() 965 expression.set("options", self._parse_options()) 966 return expression 967 968 def _parse_partition(self) -> t.Optional[exp.Partition]: 969 if not self._match_text_seq("WITH", "(", "PARTITIONS"): 970 return None 971 972 def parse_range(): 973 low = self._parse_bitwise() 974 high = self._parse_bitwise() if self._match_text_seq("TO") else None 975 976 return ( 977 self.expression(exp.PartitionRange, this=low, expression=high) if high else low 978 ) 979 980 partition = self.expression( 981 exp.Partition, expressions=self._parse_wrapped_csv(parse_range) 982 ) 983 984 self._match_r_paren() 985 986 return partition 987 988 def _parse_alter_table_alter(self) -> t.Optional[exp.Expression]: 989 expression = super()._parse_alter_table_alter() 990 991 if expression is not None: 992 collation = expression.args.get("collate") 993 if isinstance(collation, exp.Column) and isinstance(collation.this, exp.Identifier): 994 identifier = collation.this 995 collation.set("this", exp.Var(this=identifier.name)) 996 997 return expression 998 999 def _parse_primary_key_part(self) -> t.Optional[exp.Expression]: 1000 return self._parse_ordered() 1001 1002 class Generator(generator.Generator): 1003 LIMIT_IS_TOP = True 1004 QUERY_HINTS = False 1005 RETURNING_END = False 1006 NVL2_SUPPORTED = False 1007 ALTER_TABLE_INCLUDE_COLUMN_KEYWORD = False 1008 LIMIT_FETCH = "FETCH" 1009 COMPUTED_COLUMN_WITH_TYPE = False 1010 CTE_RECURSIVE_KEYWORD_REQUIRED = False 1011 ENSURE_BOOLS = True 1012 NULL_ORDERING_SUPPORTED = None 1013 SUPPORTS_SINGLE_ARG_CONCAT = False 1014 TABLESAMPLE_SEED_KEYWORD = "REPEATABLE" 1015 SUPPORTS_SELECT_INTO = True 1016 JSON_PATH_BRACKETED_KEY_SUPPORTED = False 1017 SUPPORTS_TO_NUMBER = False 1018 SET_OP_MODIFIERS = False 1019 COPY_PARAMS_EQ_REQUIRED = True 1020 PARSE_JSON_NAME = None 1021 EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE = False 1022 ALTER_SET_WRAPPED = True 1023 ALTER_SET_TYPE = "" 1024 1025 EXPRESSIONS_WITHOUT_NESTED_CTES = { 1026 exp.Create, 1027 exp.Delete, 1028 exp.Insert, 1029 exp.Intersect, 1030 exp.Except, 1031 exp.Merge, 1032 exp.Select, 1033 exp.Subquery, 1034 exp.Union, 1035 exp.Update, 1036 } 1037 1038 SUPPORTED_JSON_PATH_PARTS = { 1039 exp.JSONPathKey, 1040 exp.JSONPathRoot, 1041 exp.JSONPathSubscript, 1042 } 1043 1044 TYPE_MAPPING = { 1045 **generator.Generator.TYPE_MAPPING, 1046 exp.DataType.Type.BOOLEAN: "BIT", 1047 exp.DataType.Type.DATETIME2: "DATETIME2", 1048 exp.DataType.Type.DECIMAL: "NUMERIC", 1049 exp.DataType.Type.DOUBLE: "FLOAT", 1050 exp.DataType.Type.INT: "INTEGER", 1051 exp.DataType.Type.ROWVERSION: "ROWVERSION", 1052 exp.DataType.Type.TEXT: "VARCHAR(MAX)", 1053 exp.DataType.Type.TIMESTAMP: "DATETIME2", 1054 exp.DataType.Type.TIMESTAMPNTZ: "DATETIME2", 1055 exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET", 1056 exp.DataType.Type.SMALLDATETIME: "SMALLDATETIME", 1057 exp.DataType.Type.UTINYINT: "TINYINT", 1058 exp.DataType.Type.VARIANT: "SQL_VARIANT", 1059 exp.DataType.Type.UUID: "UNIQUEIDENTIFIER", 1060 } 1061 1062 TYPE_MAPPING.pop(exp.DataType.Type.NCHAR) 1063 TYPE_MAPPING.pop(exp.DataType.Type.NVARCHAR) 1064 1065 TRANSFORMS = { 1066 **generator.Generator.TRANSFORMS, 1067 exp.AnyValue: any_value_to_max_sql, 1068 exp.Atan2: rename_func("ATN2"), 1069 exp.ArrayToString: rename_func("STRING_AGG"), 1070 exp.AutoIncrementColumnConstraint: lambda *_: "IDENTITY", 1071 exp.Ceil: rename_func("CEILING"), 1072 exp.Chr: rename_func("CHAR"), 1073 exp.DateAdd: date_delta_sql("DATEADD"), 1074 exp.CTE: transforms.preprocess([qualify_derived_table_outputs]), 1075 exp.CurrentDate: rename_func("GETDATE"), 1076 exp.CurrentTimestamp: rename_func("GETDATE"), 1077 exp.CurrentTimestampLTZ: rename_func("SYSDATETIMEOFFSET"), 1078 exp.DateStrToDate: datestrtodate_sql, 1079 exp.GeneratedAsIdentityColumnConstraint: generatedasidentitycolumnconstraint_sql, 1080 exp.GroupConcat: _string_agg_sql, 1081 exp.If: rename_func("IIF"), 1082 exp.JSONExtract: _json_extract_sql, 1083 exp.JSONExtractScalar: _json_extract_sql, 1084 exp.LastDay: lambda self, e: self.func("EOMONTH", e.this), 1085 exp.Ln: rename_func("LOG"), 1086 exp.Max: max_or_greatest, 1087 exp.MD5: lambda self, e: self.func("HASHBYTES", exp.Literal.string("MD5"), e.this), 1088 exp.Min: min_or_least, 1089 exp.NumberToStr: _format_sql, 1090 exp.Repeat: rename_func("REPLICATE"), 1091 exp.CurrentSchema: rename_func("SCHEMA_NAME"), 1092 exp.Select: transforms.preprocess( 1093 [ 1094 transforms.eliminate_distinct_on, 1095 transforms.eliminate_semi_and_anti_joins, 1096 transforms.eliminate_qualify, 1097 transforms.unnest_generate_date_array_using_recursive_cte, 1098 ] 1099 ), 1100 exp.Stddev: rename_func("STDEV"), 1101 exp.StrPosition: lambda self, e: strposition_sql( 1102 self, e, func_name="CHARINDEX", supports_position=True 1103 ), 1104 exp.Subquery: transforms.preprocess([qualify_derived_table_outputs]), 1105 exp.SHA: lambda self, e: self.func("HASHBYTES", exp.Literal.string("SHA1"), e.this), 1106 exp.SHA1Digest: lambda self, e: self.func( 1107 "HASHBYTES", exp.Literal.string("SHA1"), e.this 1108 ), 1109 exp.SHA2: lambda self, e: self.func( 1110 "HASHBYTES", exp.Literal.string(f"SHA2_{e.args.get('length', 256)}"), e.this 1111 ), 1112 exp.TemporaryProperty: lambda self, e: "", 1113 exp.TimeStrToTime: _timestrtotime_sql, 1114 exp.TimeToStr: _format_sql, 1115 exp.Trim: trim_sql, 1116 exp.TsOrDsAdd: date_delta_sql("DATEADD", cast=True), 1117 exp.TsOrDsDiff: date_delta_sql("DATEDIFF"), 1118 exp.TimestampTrunc: lambda self, e: self.func("DATETRUNC", e.unit, e.this), 1119 exp.Trunc: lambda self, e: self.func( 1120 "ROUND", 1121 e.this, 1122 e.args.get("decimals") or exp.Literal.number(0), 1123 exp.Literal.number(1), 1124 ), 1125 exp.Uuid: lambda *_: "NEWID()", 1126 exp.DateFromParts: rename_func("DATEFROMPARTS"), 1127 } 1128 1129 TRANSFORMS.pop(exp.ReturnsProperty) 1130 1131 PROPERTIES_LOCATION = { 1132 **generator.Generator.PROPERTIES_LOCATION, 1133 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 1134 } 1135 1136 def scope_resolution(self, rhs: str, scope_name: str) -> str: 1137 return f"{scope_name}::{rhs}" 1138 1139 def select_sql(self, expression: exp.Select) -> str: 1140 limit = expression.args.get("limit") 1141 offset = expression.args.get("offset") 1142 1143 if isinstance(limit, exp.Fetch) and not offset: 1144 # Dialects like Oracle can FETCH directly from a row set but 1145 # T-SQL requires an ORDER BY + OFFSET clause in order to FETCH 1146 offset = exp.Offset(expression=exp.Literal.number(0)) 1147 expression.set("offset", offset) 1148 1149 if offset: 1150 if not expression.args.get("order"): 1151 # ORDER BY is required in order to use OFFSET in a query, so we use 1152 # a noop order by, since we don't really care about the order. 1153 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 1154 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 1155 1156 if isinstance(limit, exp.Limit): 1157 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 1158 # we replace here because otherwise TOP would be generated in select_sql 1159 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 1160 1161 return super().select_sql(expression) 1162 1163 def convert_sql(self, expression: exp.Convert) -> str: 1164 name = "TRY_CONVERT" if expression.args.get("safe") else "CONVERT" 1165 return self.func( 1166 name, expression.this, expression.expression, expression.args.get("style") 1167 ) 1168 1169 def queryoption_sql(self, expression: exp.QueryOption) -> str: 1170 option = self.sql(expression, "this") 1171 value = self.sql(expression, "expression") 1172 if value: 1173 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 1174 return f"{option} {optional_equal_sign}{value}" 1175 return option 1176 1177 def lateral_op(self, expression: exp.Lateral) -> str: 1178 cross_apply = expression.args.get("cross_apply") 1179 if cross_apply is True: 1180 return "CROSS APPLY" 1181 if cross_apply is False: 1182 return "OUTER APPLY" 1183 1184 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 1185 self.unsupported("LATERAL clause is not supported.") 1186 return "LATERAL" 1187 1188 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 1189 this = expression.this 1190 split_count = len(this.name.split(".")) 1191 delimiter = expression.args.get("delimiter") 1192 part_index = expression.args.get("part_index") 1193 1194 if ( 1195 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 1196 or (delimiter and delimiter.name != ".") 1197 or not part_index 1198 or split_count > 4 1199 ): 1200 self.unsupported( 1201 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 1202 ) 1203 return "" 1204 1205 return self.func( 1206 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 1207 ) 1208 1209 def extract_sql(self, expression: exp.Extract) -> str: 1210 part = expression.this 1211 name = DATE_PART_UNMAPPING.get(part.name.upper()) or part 1212 1213 return self.func("DATEPART", name, expression.expression) 1214 1215 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1216 nano = expression.args.get("nano") 1217 if nano is not None: 1218 nano.pop() 1219 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1220 1221 if expression.args.get("fractions") is None: 1222 expression.set("fractions", exp.Literal.number(0)) 1223 if expression.args.get("precision") is None: 1224 expression.set("precision", exp.Literal.number(0)) 1225 1226 return rename_func("TIMEFROMPARTS")(self, expression) 1227 1228 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1229 zone = expression.args.get("zone") 1230 if zone is not None: 1231 zone.pop() 1232 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1233 1234 nano = expression.args.get("nano") 1235 if nano is not None: 1236 nano.pop() 1237 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1238 1239 if expression.args.get("milli") is None: 1240 expression.set("milli", exp.Literal.number(0)) 1241 1242 return rename_func("DATETIMEFROMPARTS")(self, expression) 1243 1244 def setitem_sql(self, expression: exp.SetItem) -> str: 1245 this = expression.this 1246 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1247 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1248 return f"{self.sql(this.left)} {self.sql(this.right)}" 1249 1250 return super().setitem_sql(expression) 1251 1252 def boolean_sql(self, expression: exp.Boolean) -> str: 1253 if type(expression.parent) in BIT_TYPES or isinstance( 1254 expression.find_ancestor(exp.Values, exp.Select), exp.Values 1255 ): 1256 return "1" if expression.this else "0" 1257 1258 return "(1 = 1)" if expression.this else "(1 = 0)" 1259 1260 def is_sql(self, expression: exp.Is) -> str: 1261 if isinstance(expression.expression, exp.Boolean): 1262 return self.binary(expression, "=") 1263 return self.binary(expression, "IS") 1264 1265 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1266 sql = self.sql(expression, "this") 1267 properties = expression.args.get("properties") 1268 1269 if sql[:1] != "#" and any( 1270 isinstance(prop, exp.TemporaryProperty) 1271 for prop in (properties.expressions if properties else []) 1272 ): 1273 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1274 1275 return sql 1276 1277 def create_sql(self, expression: exp.Create) -> str: 1278 kind = expression.kind 1279 exists = expression.args.get("exists") 1280 expression.set("exists", None) 1281 1282 like_property = expression.find(exp.LikeProperty) 1283 if like_property: 1284 ctas_expression = like_property.this 1285 else: 1286 ctas_expression = expression.expression 1287 1288 if kind == "VIEW": 1289 expression.this.set("catalog", None) 1290 with_ = expression.args.get("with_") 1291 if ctas_expression and with_: 1292 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1293 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1294 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1295 ctas_expression.set("with_", with_.pop()) 1296 1297 table = expression.find(exp.Table) 1298 1299 # Convert CTAS statement to SELECT .. INTO .. 1300 if kind == "TABLE" and ctas_expression: 1301 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1302 ctas_expression = ctas_expression.subquery() 1303 1304 properties = expression.args.get("properties") or exp.Properties() 1305 is_temp = any(isinstance(p, exp.TemporaryProperty) for p in properties.expressions) 1306 1307 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1308 select_into.set("into", exp.Into(this=table, temporary=is_temp)) 1309 1310 if like_property: 1311 select_into.limit(0, copy=False) 1312 1313 sql = self.sql(select_into) 1314 else: 1315 sql = super().create_sql(expression) 1316 1317 if exists: 1318 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1319 sql_with_ctes = self.prepend_ctes(expression, sql) 1320 sql_literal = self.sql(exp.Literal.string(sql_with_ctes)) 1321 if kind == "SCHEMA": 1322 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = {identifier}) EXEC({sql_literal})""" 1323 elif kind == "TABLE": 1324 assert table 1325 where = exp.and_( 1326 exp.column("TABLE_NAME").eq(table.name), 1327 exp.column("TABLE_SCHEMA").eq(table.db) if table.db else None, 1328 exp.column("TABLE_CATALOG").eq(table.catalog) if table.catalog else None, 1329 ) 1330 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE {where}) EXEC({sql_literal})""" 1331 elif kind == "INDEX": 1332 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1333 return f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql_literal})""" 1334 elif expression.args.get("replace"): 1335 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1336 1337 return self.prepend_ctes(expression, sql) 1338 1339 @generator.unsupported_args("unlogged", "expressions") 1340 def into_sql(self, expression: exp.Into) -> str: 1341 if expression.args.get("temporary"): 1342 # If the Into expression has a temporary property, push this down to the Identifier 1343 table = expression.find(exp.Table) 1344 if table and isinstance(table.this, exp.Identifier): 1345 table.this.set("temporary", True) 1346 1347 return f"{self.seg('INTO')} {self.sql(expression, 'this')}" 1348 1349 def count_sql(self, expression: exp.Count) -> str: 1350 func_name = "COUNT_BIG" if expression.args.get("big_int") else "COUNT" 1351 return rename_func(func_name)(self, expression) 1352 1353 def datediff_sql(self, expression: exp.DateDiff) -> str: 1354 func_name = "DATEDIFF_BIG" if expression.args.get("big_int") else "DATEDIFF" 1355 return date_delta_sql(func_name)(self, expression) 1356 1357 def offset_sql(self, expression: exp.Offset) -> str: 1358 return f"{super().offset_sql(expression)} ROWS" 1359 1360 def version_sql(self, expression: exp.Version) -> str: 1361 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1362 this = f"FOR {name}" 1363 expr = expression.expression 1364 kind = expression.text("kind") 1365 if kind in ("FROM", "BETWEEN"): 1366 args = expr.expressions 1367 sep = "TO" if kind == "FROM" else "AND" 1368 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1369 else: 1370 expr_sql = self.sql(expr) 1371 1372 expr_sql = f" {expr_sql}" if expr_sql else "" 1373 return f"{this} {kind}{expr_sql}" 1374 1375 def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str: 1376 table = expression.args.get("table") 1377 table = f"{table} " if table else "" 1378 return f"RETURNS {table}{self.sql(expression, 'this')}" 1379 1380 def returning_sql(self, expression: exp.Returning) -> str: 1381 into = self.sql(expression, "into") 1382 into = self.seg(f"INTO {into}") if into else "" 1383 return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}" 1384 1385 def transaction_sql(self, expression: exp.Transaction) -> str: 1386 this = self.sql(expression, "this") 1387 this = f" {this}" if this else "" 1388 mark = self.sql(expression, "mark") 1389 mark = f" WITH MARK {mark}" if mark else "" 1390 return f"BEGIN TRANSACTION{this}{mark}" 1391 1392 def commit_sql(self, expression: exp.Commit) -> str: 1393 this = self.sql(expression, "this") 1394 this = f" {this}" if this else "" 1395 durability = expression.args.get("durability") 1396 durability = ( 1397 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1398 if durability is not None 1399 else "" 1400 ) 1401 return f"COMMIT TRANSACTION{this}{durability}" 1402 1403 def rollback_sql(self, expression: exp.Rollback) -> str: 1404 this = self.sql(expression, "this") 1405 this = f" {this}" if this else "" 1406 return f"ROLLBACK TRANSACTION{this}" 1407 1408 def identifier_sql(self, expression: exp.Identifier) -> str: 1409 identifier = super().identifier_sql(expression) 1410 1411 if expression.args.get("global_"): 1412 identifier = f"##{identifier}" 1413 elif expression.args.get("temporary"): 1414 identifier = f"#{identifier}" 1415 1416 return identifier 1417 1418 def constraint_sql(self, expression: exp.Constraint) -> str: 1419 this = self.sql(expression, "this") 1420 expressions = self.expressions(expression, flat=True, sep=" ") 1421 return f"CONSTRAINT {this} {expressions}" 1422 1423 def length_sql(self, expression: exp.Length) -> str: 1424 return self._uncast_text(expression, "LEN") 1425 1426 def right_sql(self, expression: exp.Right) -> str: 1427 return self._uncast_text(expression, "RIGHT") 1428 1429 def left_sql(self, expression: exp.Left) -> str: 1430 return self._uncast_text(expression, "LEFT") 1431 1432 def _uncast_text(self, expression: exp.Expression, name: str) -> str: 1433 this = expression.this 1434 if isinstance(this, exp.Cast) and this.is_type(exp.DataType.Type.TEXT): 1435 this_sql = self.sql(this, "this") 1436 else: 1437 this_sql = self.sql(this) 1438 expression_sql = self.sql(expression, "expression") 1439 return self.func(name, this_sql, expression_sql if expression_sql else None) 1440 1441 def partition_sql(self, expression: exp.Partition) -> str: 1442 return f"WITH (PARTITIONS({self.expressions(expression, flat=True)}))" 1443 1444 def alter_sql(self, expression: exp.Alter) -> str: 1445 action = seq_get(expression.args.get("actions") or [], 0) 1446 if isinstance(action, exp.AlterRename): 1447 return f"EXEC sp_rename '{self.sql(expression.this)}', '{action.this.name}'" 1448 return super().alter_sql(expression) 1449 1450 def drop_sql(self, expression: exp.Drop) -> str: 1451 if expression.args["kind"] == "VIEW": 1452 expression.this.set("catalog", None) 1453 return super().drop_sql(expression) 1454 1455 def options_modifier(self, expression: exp.Expression) -> str: 1456 options = self.expressions(expression, key="options") 1457 return f" OPTION{self.wrap(options)}" if options else "" 1458 1459 def dpipe_sql(self, expression: exp.DPipe) -> str: 1460 return self.sql( 1461 reduce(lambda x, y: exp.Add(this=x, expression=y), expression.flatten()) 1462 ) 1463 1464 def isascii_sql(self, expression: exp.IsAscii) -> str: 1465 return f"(PATINDEX(CONVERT(VARCHAR(MAX), 0x255b5e002d7f5d25) COLLATE Latin1_General_BIN, {self.sql(expression.this)}) = 0)" 1466 1467 def columndef_sql(self, expression: exp.ColumnDef, sep: str = " ") -> str: 1468 this = super().columndef_sql(expression, sep) 1469 default = self.sql(expression, "default") 1470 default = f" = {default}" if default else "" 1471 output = self.sql(expression, "output") 1472 output = f" {output}" if output else "" 1473 return f"{this}{default}{output}" 1474 1475 def coalesce_sql(self, expression: exp.Coalesce) -> str: 1476 func_name = "ISNULL" if expression.args.get("is_null") else "COALESCE" 1477 return rename_func(func_name)(self, expression) 1478 1479 def storedprocedure_sql(self, expression: exp.StoredProcedure) -> str: 1480 this = self.sql(expression, "this") 1481 expressions = self.expressions(expression) 1482 expressions = ( 1483 self.wrap(expressions) if expression.args.get("wrapped") else f" {expressions}" 1484 ) 1485 return f"{this}{expressions}" if expressions.strip() != "" else this 1486 1487 def ifblock_sql(self, expression: exp.IfBlock) -> str: 1488 this = self.sql(expression, "this") 1489 true = self.sql(expression, "true") 1490 true = f" {true}" if true else " " 1491 false = self.sql(expression, "false") 1492 false = f"; ELSE BEGIN {false}" if false else "" 1493 return f"IF {this} BEGIN{true}{false}" 1494 1495 def whileblock_sql(self, expression: exp.WhileBlock) -> str: 1496 this = self.sql(expression, "this") 1497 body = self.sql(expression, "body") 1498 body = f" {body}" if body else " " 1499 return f"WHILE {this} BEGIN{body}" 1500 1501 def execute_sql(self, expression: exp.Execute) -> str: 1502 this = self.sql(expression, "this") 1503 expressions = self.expressions(expression) 1504 expressions = f" {expressions}" if expressions else "" 1505 return f"EXECUTE {this}{expressions}" 1506 1507 def executesql_sql(self, expression: exp.ExecuteSql) -> str: 1508 return self.execute_sql(expression)
Whether the base comes first in the LOG function.
Possible values: True, False, None (two arguments are not supported by LOG)
Whether the behavior of a / b depends on the types of a and b.
False means a / b is always float division.
True means a / b is integer division if both a and b are integers.
A NULL arg in CONCAT yields NULL by default, but in some dialects it yields an empty string.
Specifies the strategy according to which identifiers should be normalized.
Associates this dialect's time formats with their equivalent Python strftime formats.
544 class Tokenizer(tokens.Tokenizer): 545 IDENTIFIERS = [("[", "]"), '"'] 546 QUOTES = ["'", '"'] 547 HEX_STRINGS = [("0x", ""), ("0X", "")] 548 VAR_SINGLE_TOKENS = {"@", "$", "#"} 549 550 KEYWORDS = { 551 **tokens.Tokenizer.KEYWORDS, 552 "CLUSTERED INDEX": TokenType.INDEX, 553 "DATETIME2": TokenType.DATETIME2, 554 "DATETIMEOFFSET": TokenType.TIMESTAMPTZ, 555 "DECLARE": TokenType.DECLARE, 556 "EXEC": TokenType.EXECUTE, 557 "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT, 558 "GO": TokenType.COMMAND, 559 "IMAGE": TokenType.IMAGE, 560 "MONEY": TokenType.MONEY, 561 "NONCLUSTERED INDEX": TokenType.INDEX, 562 "NTEXT": TokenType.TEXT, 563 "OPTION": TokenType.OPTION, 564 "OUTPUT": TokenType.RETURNING, 565 "PRINT": TokenType.COMMAND, 566 "PROC": TokenType.PROCEDURE, 567 "REAL": TokenType.FLOAT, 568 "ROWVERSION": TokenType.ROWVERSION, 569 "SMALLDATETIME": TokenType.SMALLDATETIME, 570 "SMALLMONEY": TokenType.SMALLMONEY, 571 "SQL_VARIANT": TokenType.VARIANT, 572 "SYSTEM_USER": TokenType.CURRENT_USER, 573 "TOP": TokenType.TOP, 574 "TIMESTAMP": TokenType.ROWVERSION, 575 "TINYINT": TokenType.UTINYINT, 576 "UNIQUEIDENTIFIER": TokenType.UUID, 577 "UPDATE STATISTICS": TokenType.COMMAND, 578 "XML": TokenType.XML, 579 } 580 KEYWORDS.pop("/*+") 581 582 COMMANDS = {*tokens.Tokenizer.COMMANDS, TokenType.END} - {TokenType.EXECUTE}
Inherited Members
- sqlglot.tokens.Tokenizer
- Tokenizer
- SINGLE_TOKENS
- BIT_STRINGS
- BYTE_STRINGS
- RAW_STRINGS
- HEREDOC_STRINGS
- UNICODE_STRINGS
- STRING_ESCAPES
- ESCAPE_FOLLOW_CHARS
- IDENTIFIER_ESCAPES
- HEREDOC_TAG_IS_IDENTIFIER
- HEREDOC_STRING_ALTERNATIVE
- STRING_ESCAPES_ALLOWED_IN_RAW_STRINGS
- NESTED_COMMENTS
- HINT_START
- TOKENS_PRECEDING_HINT
- COMMAND_PREFIX_TOKENS
- NUMERIC_LITERALS
- COMMENTS
- dialect
- tokenize
- sql
- size
- tokens
584 class Parser(parser.Parser): 585 SET_REQUIRES_ASSIGNMENT_DELIMITER = False 586 LOG_DEFAULTS_TO_LN = True 587 STRING_ALIASES = True 588 NO_PAREN_IF_COMMANDS = False 589 590 QUERY_MODIFIER_PARSERS = { 591 **parser.Parser.QUERY_MODIFIER_PARSERS, 592 TokenType.OPTION: lambda self: ("options", self._parse_options()), 593 TokenType.FOR: lambda self: ("for_", self._parse_for()), 594 } 595 596 # T-SQL does not allow BEGIN to be used as an identifier 597 ID_VAR_TOKENS = parser.Parser.ID_VAR_TOKENS - {TokenType.BEGIN} 598 ALIAS_TOKENS = parser.Parser.ALIAS_TOKENS - {TokenType.BEGIN} 599 TABLE_ALIAS_TOKENS = parser.Parser.TABLE_ALIAS_TOKENS - {TokenType.BEGIN} 600 COMMENT_TABLE_ALIAS_TOKENS = parser.Parser.COMMENT_TABLE_ALIAS_TOKENS - {TokenType.BEGIN} 601 UPDATE_ALIAS_TOKENS = parser.Parser.UPDATE_ALIAS_TOKENS - {TokenType.BEGIN} 602 603 FUNCTIONS = { 604 **parser.Parser.FUNCTIONS, 605 "ATN2": exp.Atan2.from_arg_list, 606 "CHARINDEX": lambda args: exp.StrPosition( 607 this=seq_get(args, 1), 608 substr=seq_get(args, 0), 609 position=seq_get(args, 2), 610 ), 611 "COUNT": lambda args: exp.Count( 612 this=seq_get(args, 0), expressions=args[1:], big_int=False 613 ), 614 "COUNT_BIG": lambda args: exp.Count( 615 this=seq_get(args, 0), expressions=args[1:], big_int=True 616 ), 617 "DATEADD": build_date_delta(exp.DateAdd, unit_mapping=DATE_DELTA_INTERVAL), 618 "DATEDIFF": _build_date_delta(exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL), 619 "DATEDIFF_BIG": _build_date_delta( 620 exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL, big_int=True 621 ), 622 "DATENAME": _build_formatted_time(exp.TimeToStr, full_format_mapping=True), 623 "DATETIMEFROMPARTS": _build_datetimefromparts, 624 "EOMONTH": _build_eomonth, 625 "FORMAT": _build_format, 626 "GETDATE": exp.CurrentTimestamp.from_arg_list, 627 "HASHBYTES": _build_hashbytes, 628 "ISNULL": lambda args: build_coalesce(args=args, is_null=True), 629 "JSON_QUERY": _build_json_query, 630 "JSON_VALUE": parser.build_extract_json_with_path(exp.JSONExtractScalar), 631 "LEN": _build_with_arg_as_text(exp.Length), 632 "LEFT": _build_with_arg_as_text(exp.Left), 633 "NEWID": exp.Uuid.from_arg_list, 634 "RIGHT": _build_with_arg_as_text(exp.Right), 635 "PARSENAME": _build_parsename, 636 "REPLICATE": exp.Repeat.from_arg_list, 637 "SCHEMA_NAME": exp.CurrentSchema.from_arg_list, 638 "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)), 639 "SYSDATETIME": exp.CurrentTimestamp.from_arg_list, 640 "SUSER_NAME": exp.CurrentUser.from_arg_list, 641 "SUSER_SNAME": exp.CurrentUser.from_arg_list, 642 "SYSDATETIMEOFFSET": exp.CurrentTimestampLTZ.from_arg_list, 643 "SYSTEM_USER": exp.CurrentUser.from_arg_list, 644 "TIMEFROMPARTS": _build_timefromparts, 645 "DATETRUNC": _build_datetrunc, 646 } 647 648 JOIN_HINTS = {"LOOP", "HASH", "MERGE", "REMOTE"} 649 650 PROCEDURE_OPTIONS = dict.fromkeys( 651 ("ENCRYPTION", "RECOMPILE", "SCHEMABINDING", "NATIVE_COMPILATION", "EXECUTE"), tuple() 652 ) 653 654 COLUMN_DEFINITION_MODES = {"OUT", "OUTPUT", "READONLY"} 655 656 RETURNS_TABLE_TOKENS = parser.Parser.ID_VAR_TOKENS - { 657 TokenType.TABLE, 658 *parser.Parser.TYPE_TOKENS, 659 } 660 661 STATEMENT_PARSERS = { 662 **parser.Parser.STATEMENT_PARSERS, 663 TokenType.DECLARE: lambda self: self._parse_declare(), 664 TokenType.EXECUTE: lambda self: self._parse_execute(), 665 } 666 667 RANGE_PARSERS = { 668 **parser.Parser.RANGE_PARSERS, 669 TokenType.DCOLON: lambda self, this: self.expression( 670 exp.ScopeResolution, 671 this=this, 672 expression=self._parse_function() or self._parse_var(any_token=True), 673 ), 674 } 675 676 NO_PAREN_FUNCTION_PARSERS = { 677 **parser.Parser.NO_PAREN_FUNCTION_PARSERS, 678 "NEXT": lambda self: self._parse_next_value_for(), 679 } 680 681 FUNCTION_PARSERS: t.Dict[str, t.Callable] = { 682 **parser.Parser.FUNCTION_PARSERS, 683 "JSON_ARRAYAGG": lambda self: self.expression( 684 exp.JSONArrayAgg, 685 this=self._parse_bitwise(), 686 order=self._parse_order(), 687 null_handling=self._parse_on_handling("NULL", "NULL", "ABSENT"), 688 ), 689 "DATEPART": lambda self: self._parse_datepart(), 690 } 691 692 # The DCOLON (::) operator serves as a scope resolution (exp.ScopeResolution) operator in T-SQL 693 COLUMN_OPERATORS = { 694 **parser.Parser.COLUMN_OPERATORS, 695 TokenType.DCOLON: lambda self, this, to: self.expression(exp.Cast, this=this, to=to) 696 if isinstance(to, exp.DataType) and to.this != exp.DataType.Type.USERDEFINED 697 else self.expression(exp.ScopeResolution, this=this, expression=to), 698 } 699 700 SET_OP_MODIFIERS = {"offset"} 701 702 ODBC_DATETIME_LITERALS = { 703 "d": exp.Date, 704 "t": exp.Time, 705 "ts": exp.Timestamp, 706 } 707 708 def _parse_execute(self) -> exp.Execute: 709 execute = self.expression( 710 exp.Execute, 711 this=self._parse_table(schema=True), 712 expressions=self._parse_csv(self._parse_expression), 713 ) 714 715 if execute.name.lower() == "sp_executesql": 716 execute = self.expression(exp.ExecuteSql, **execute.args) 717 718 return execute 719 720 def _parse_datepart(self) -> exp.Extract: 721 this = self._parse_var(tokens=[TokenType.IDENTIFIER]) 722 expression = self._match(TokenType.COMMA) and self._parse_bitwise() 723 name = map_date_part(this, self.dialect) 724 725 return self.expression(exp.Extract, this=name, expression=expression) 726 727 def _parse_alter_table_set(self) -> exp.AlterSet: 728 return self._parse_wrapped(super()._parse_alter_table_set) 729 730 def _parse_wrapped_select(self, table: bool = False) -> t.Optional[exp.Expression]: 731 if self._match(TokenType.MERGE): 732 comments = self._prev_comments 733 merge = self._parse_merge() 734 merge.add_comments(comments, prepend=True) 735 return merge 736 737 return super()._parse_wrapped_select(table=table) 738 739 def _parse_dcolon(self) -> t.Optional[exp.Expression]: 740 # We want to use _parse_types() if the first token after :: is a known type, 741 # otherwise we could parse something like x::varchar(max) into a function 742 if self._match_set(self.TYPE_TOKENS, advance=False): 743 return self._parse_types() 744 745 return self._parse_function() or self._parse_types() 746 747 def _parse_options(self) -> t.Optional[t.List[exp.Expression]]: 748 if not self._match(TokenType.OPTION): 749 return None 750 751 def _parse_option() -> t.Optional[exp.Expression]: 752 option = self._parse_var_from_options(OPTIONS) 753 if not option: 754 return None 755 756 self._match(TokenType.EQ) 757 return self.expression( 758 exp.QueryOption, this=option, expression=self._parse_primary_or_var() 759 ) 760 761 return self._parse_wrapped_csv(_parse_option) 762 763 def _parse_xml_key_value_option(self) -> exp.XMLKeyValueOption: 764 this = self._parse_primary_or_var() 765 if self._match(TokenType.L_PAREN, advance=False): 766 expression = self._parse_wrapped(self._parse_string) 767 else: 768 expression = None 769 770 return exp.XMLKeyValueOption(this=this, expression=expression) 771 772 def _parse_for(self) -> t.Optional[t.List[exp.Expression]]: 773 if not self._match_pair(TokenType.FOR, TokenType.XML): 774 return None 775 776 def _parse_for_xml() -> t.Optional[exp.Expression]: 777 return self.expression( 778 exp.QueryOption, 779 this=self._parse_var_from_options(XML_OPTIONS, raise_unmatched=False) 780 or self._parse_xml_key_value_option(), 781 ) 782 783 return self._parse_csv(_parse_for_xml) 784 785 def _parse_projections( 786 self, 787 ) -> t.Tuple[t.List[exp.Expression], t.Optional[t.List[exp.Expression]]]: 788 """ 789 T-SQL supports the syntax alias = expression in the SELECT's projection list, 790 so we transform all parsed Selects to convert their EQ projections into Aliases. 791 792 See: https://learn.microsoft.com/en-us/sql/t-sql/queries/select-clause-transact-sql?view=sql-server-ver16#syntax 793 """ 794 projections, _ = super()._parse_projections() 795 return [ 796 ( 797 exp.alias_(projection.expression, projection.this.this, copy=False) 798 if isinstance(projection, exp.EQ) and isinstance(projection.this, exp.Column) 799 else projection 800 ) 801 for projection in projections 802 ], None 803 804 def _parse_commit_or_rollback(self) -> exp.Commit | exp.Rollback: 805 """Applies to SQL Server and Azure SQL Database 806 COMMIT [ { TRAN | TRANSACTION } 807 [ transaction_name | @tran_name_variable ] ] 808 [ WITH ( DELAYED_DURABILITY = { OFF | ON } ) ] 809 810 ROLLBACK { TRAN | TRANSACTION } 811 [ transaction_name | @tran_name_variable 812 | savepoint_name | @savepoint_variable ] 813 """ 814 rollback = self._prev.token_type == TokenType.ROLLBACK 815 816 self._match_texts(("TRAN", "TRANSACTION")) 817 this = self._parse_id_var() 818 819 if rollback: 820 return self.expression(exp.Rollback, this=this) 821 822 durability = None 823 if self._match_pair(TokenType.WITH, TokenType.L_PAREN): 824 self._match_text_seq("DELAYED_DURABILITY") 825 self._match(TokenType.EQ) 826 827 if self._match_text_seq("OFF"): 828 durability = False 829 else: 830 self._match(TokenType.ON) 831 durability = True 832 833 self._match_r_paren() 834 835 return self.expression(exp.Commit, this=this, durability=durability) 836 837 def _parse_transaction(self) -> exp.Transaction | exp.Command: 838 """Applies to SQL Server and Azure SQL Database 839 BEGIN { TRAN | TRANSACTION } 840 [ { transaction_name | @tran_name_variable } 841 [ WITH MARK [ 'description' ] ] 842 ] 843 """ 844 if self._match_texts(("TRAN", "TRANSACTION")): 845 transaction = self.expression(exp.Transaction, this=self._parse_id_var()) 846 if self._match_text_seq("WITH", "MARK"): 847 transaction.set("mark", self._parse_string()) 848 849 return transaction 850 851 return self._parse_as_command(self._prev) 852 853 def _parse_returns(self) -> exp.ReturnsProperty: 854 table = self._parse_id_var(any_token=False, tokens=self.RETURNS_TABLE_TOKENS) 855 returns = super()._parse_returns() 856 returns.set("table", table) 857 return returns 858 859 def _parse_convert( 860 self, strict: bool, safe: t.Optional[bool] = None 861 ) -> t.Optional[exp.Expression]: 862 this = self._parse_types() 863 self._match(TokenType.COMMA) 864 args = [this, *self._parse_csv(self._parse_assignment)] 865 convert = exp.Convert.from_arg_list(args) 866 convert.set("safe", safe) 867 return convert 868 869 def _parse_column_def( 870 self, this: t.Optional[exp.Expression], computed_column: bool = True 871 ) -> t.Optional[exp.Expression]: 872 this = super()._parse_column_def(this=this, computed_column=computed_column) 873 if not this: 874 return None 875 if self._match(TokenType.EQ): 876 this.set("default", self._parse_disjunction()) 877 if self._match_texts(self.COLUMN_DEFINITION_MODES): 878 this.set("output", self._prev.text) 879 return this 880 881 def _parse_user_defined_function( 882 self, kind: t.Optional[TokenType] = None 883 ) -> t.Optional[exp.Expression]: 884 this = super()._parse_user_defined_function(kind=kind) 885 886 if kind == TokenType.FUNCTION or isinstance(this, exp.UserDefinedFunction): 887 return this 888 889 if kind == TokenType.PROCEDURE and this: 890 expressions = this.expressions 891 if not ( 892 expressions or self._match_set((TokenType.ALIAS, TokenType.WITH), advance=False) 893 ): 894 expressions = self._parse_csv(self._parse_function_parameter) 895 896 return self.expression( 897 exp.StoredProcedure, 898 this=this if isinstance(this, exp.Table) else this.this, 899 expressions=expressions, 900 wrapped=this.args.get("wrapped"), 901 ) 902 903 return self.expression(exp.UserDefinedFunction, this=this) 904 905 def _parse_into(self) -> t.Optional[exp.Into]: 906 into = super()._parse_into() 907 908 table = isinstance(into, exp.Into) and into.find(exp.Table) 909 if isinstance(table, exp.Table): 910 table_identifier = table.this 911 if table_identifier.args.get("temporary"): 912 # Promote the temporary property from the Identifier to the Into expression 913 t.cast(exp.Into, into).set("temporary", True) 914 915 return into 916 917 def _parse_id_var( 918 self, 919 any_token: bool = True, 920 tokens: t.Optional[t.Collection[TokenType]] = None, 921 ) -> t.Optional[exp.Expression]: 922 is_temporary = self._match(TokenType.HASH) 923 is_global = is_temporary and self._match(TokenType.HASH) 924 925 this = super()._parse_id_var(any_token=any_token, tokens=tokens) 926 if this: 927 if is_global: 928 this.set("global_", True) 929 elif is_temporary: 930 this.set("temporary", True) 931 932 return this 933 934 def _parse_create(self) -> exp.Create | exp.Command: 935 create = super()._parse_create() 936 937 if isinstance(create, exp.Create): 938 table = create.this.this if isinstance(create.this, exp.Schema) else create.this 939 if isinstance(table, exp.Table) and table.this and table.this.args.get("temporary"): 940 if not create.args.get("properties"): 941 create.set("properties", exp.Properties(expressions=[])) 942 943 create.args["properties"].append("expressions", exp.TemporaryProperty()) 944 945 return create 946 947 def _parse_if(self) -> exp.IfBlock: 948 this = self._parse_condition() 949 true = self._parse_block() 950 951 false = self._match(TokenType.ELSE) and self._parse_block() 952 953 return self.expression(exp.IfBlock, this=this, true=true, false=false) 954 955 def _parse_unique(self) -> exp.UniqueColumnConstraint: 956 if self._match_texts(("CLUSTERED", "NONCLUSTERED")): 957 this = self.CONSTRAINT_PARSERS[self._prev.text.upper()](self) 958 else: 959 this = self._parse_schema(self._parse_id_var(any_token=False)) 960 961 return self.expression(exp.UniqueColumnConstraint, this=this) 962 963 def _parse_update(self) -> exp.Update: 964 expression = super()._parse_update() 965 expression.set("options", self._parse_options()) 966 return expression 967 968 def _parse_partition(self) -> t.Optional[exp.Partition]: 969 if not self._match_text_seq("WITH", "(", "PARTITIONS"): 970 return None 971 972 def parse_range(): 973 low = self._parse_bitwise() 974 high = self._parse_bitwise() if self._match_text_seq("TO") else None 975 976 return ( 977 self.expression(exp.PartitionRange, this=low, expression=high) if high else low 978 ) 979 980 partition = self.expression( 981 exp.Partition, expressions=self._parse_wrapped_csv(parse_range) 982 ) 983 984 self._match_r_paren() 985 986 return partition 987 988 def _parse_alter_table_alter(self) -> t.Optional[exp.Expression]: 989 expression = super()._parse_alter_table_alter() 990 991 if expression is not None: 992 collation = expression.args.get("collate") 993 if isinstance(collation, exp.Column) and isinstance(collation.this, exp.Identifier): 994 identifier = collation.this 995 collation.set("this", exp.Var(this=identifier.name)) 996 997 return expression 998 999 def _parse_primary_key_part(self) -> t.Optional[exp.Expression]: 1000 return self._parse_ordered()
Parser consumes a list of tokens produced by the Tokenizer and produces a parsed syntax tree.
Arguments:
- error_level: The desired error level. Default: ErrorLevel.IMMEDIATE
- error_message_context: The amount of context to capture from a query string when displaying the error message (in number of characters). Default: 100
- max_errors: Maximum number of error messages to include in a raised ParseError. This is only relevant if error_level is ErrorLevel.RAISE. Default: 3
Inherited Members
- sqlglot.parser.Parser
- Parser
- STRUCT_TYPE_TOKENS
- NESTED_TYPE_TOKENS
- ENUM_TYPE_TOKENS
- AGGREGATE_TYPE_TOKENS
- TYPE_TOKENS
- SIGNED_TO_UNSIGNED_TYPE_TOKEN
- SUBQUERY_PREDICATES
- RESERVED_TOKENS
- DB_CREATABLES
- CREATABLES
- TRIGGER_EVENTS
- ALTERABLES
- COLON_PLACEHOLDER_TOKENS
- ARRAY_CONSTRUCTORS
- TRIM_TYPES
- FUNC_TOKENS
- CONJUNCTION
- ASSIGNMENT
- DISJUNCTION
- EQUALITY
- COMPARISON
- BITWISE
- TERM
- FACTOR
- EXPONENT
- TIMES
- TIMESTAMPS
- SET_OPERATIONS
- JOIN_METHODS
- JOIN_SIDES
- JOIN_KINDS
- LAMBDAS
- CAST_COLUMN_OPERATORS
- EXPRESSION_PARSERS
- UNARY_PARSERS
- STRING_PARSERS
- NUMERIC_PARSERS
- PRIMARY_PARSERS
- PLACEHOLDER_PARSERS
- PIPE_SYNTAX_TRANSFORM_PARSERS
- PROPERTY_PARSERS
- CONSTRAINT_PARSERS
- ALTER_PARSERS
- ALTER_ALTER_PARSERS
- SCHEMA_UNNAMED_CONSTRAINTS
- INVALID_FUNC_NAME_TOKENS
- FUNCTIONS_WITH_ALIASED_ARGS
- KEY_VALUE_DEFINITIONS
- QUERY_MODIFIER_TOKENS
- SET_PARSERS
- SHOW_PARSERS
- TYPE_LITERAL_PARSERS
- TYPE_CONVERTERS
- DDL_SELECT_TOKENS
- PRE_VOLATILE_TOKENS
- TRANSACTION_KIND
- TRANSACTION_CHARACTERISTICS
- CONFLICT_ACTIONS
- TRIGGER_TIMING
- TRIGGER_DEFERRABLE
- CREATE_SEQUENCE
- ISOLATED_LOADING_OPTIONS
- USABLES
- CAST_ACTIONS
- SCHEMA_BINDING_OPTIONS
- EXECUTE_AS_OPTIONS
- KEY_CONSTRAINT_OPTIONS
- WINDOW_EXCLUDE_OPTIONS
- INSERT_ALTERNATIVES
- CLONE_KEYWORDS
- HISTORICAL_DATA_PREFIX
- HISTORICAL_DATA_KIND
- OPCLASS_FOLLOW_KEYWORDS
- OPTYPE_FOLLOW_TOKENS
- TABLE_INDEX_HINT_TOKENS
- VIEW_ATTRIBUTES
- WINDOW_ALIAS_TOKENS
- WINDOW_BEFORE_PAREN_TOKENS
- WINDOW_SIDES
- JSON_KEY_VALUE_SEPARATOR_TOKENS
- FETCH_TOKENS
- ADD_CONSTRAINT_TOKENS
- DISTINCT_TOKENS
- UNNEST_OFFSET_ALIAS_TOKENS
- SELECT_START_TOKENS
- COPY_INTO_VARLEN_OPTIONS
- IS_JSON_PREDICATE_KIND
- ON_CONDITION_TOKENS
- PRIVILEGE_FOLLOW_TOKENS
- DESCRIBE_STYLES
- SET_ASSIGNMENT_DELIMITERS
- ANALYZE_STYLES
- ANALYZE_EXPRESSION_PARSERS
- PARTITION_KEYWORDS
- AMBIGUOUS_ALIAS_TOKENS
- OPERATION_MODIFIERS
- RECURSIVE_CTE_SEARCH_KIND
- MODIFIABLES
- STRICT_CAST
- PREFIXED_PIVOT_COLUMNS
- IDENTIFY_PIVOT_STRINGS
- TABLESAMPLE_CSV
- DEFAULT_SAMPLING_METHOD
- TRIM_PATTERN_FIRST
- MODIFIERS_ATTACHED_TO_SET_OP
- JSON_ARROWS_REQUIRE_JSON_TYPE
- COLON_IS_VARIANT_EXTRACT
- VALUES_FOLLOWED_BY_PAREN
- SUPPORTS_IMPLICIT_UNNEST
- INTERVAL_SPANS
- SUPPORTS_PARTITION_SELECTION
- WRAPPED_TRANSFORM_COLUMN_CONSTRAINT
- OPTIONAL_ALIAS_TOKEN_CTE
- ALTER_RENAME_REQUIRES_COLUMN
- ALTER_TABLE_PARTITIONS
- JOINS_HAVE_EQUAL_PRECEDENCE
- ZONE_AWARE_TIMESTAMP_CONSTRUCTOR
- MAP_KEYS_ARE_ARBITRARY_EXPRESSIONS
- JSON_EXTRACT_REQUIRES_JSON_EXPRESSION
- ADD_JOIN_ON_TRUE
- SUPPORTS_OMITTED_INTERVAL_SPAN_UNIT
- raise_error
- validate_expression
- reset
- errors
- error_level
- error_message_context
- max_errors
- dialect
- sql
- parse
- parse_into
- check_errors
- expression
- parse_set_operation
- build_cast
1002 class Generator(generator.Generator): 1003 LIMIT_IS_TOP = True 1004 QUERY_HINTS = False 1005 RETURNING_END = False 1006 NVL2_SUPPORTED = False 1007 ALTER_TABLE_INCLUDE_COLUMN_KEYWORD = False 1008 LIMIT_FETCH = "FETCH" 1009 COMPUTED_COLUMN_WITH_TYPE = False 1010 CTE_RECURSIVE_KEYWORD_REQUIRED = False 1011 ENSURE_BOOLS = True 1012 NULL_ORDERING_SUPPORTED = None 1013 SUPPORTS_SINGLE_ARG_CONCAT = False 1014 TABLESAMPLE_SEED_KEYWORD = "REPEATABLE" 1015 SUPPORTS_SELECT_INTO = True 1016 JSON_PATH_BRACKETED_KEY_SUPPORTED = False 1017 SUPPORTS_TO_NUMBER = False 1018 SET_OP_MODIFIERS = False 1019 COPY_PARAMS_EQ_REQUIRED = True 1020 PARSE_JSON_NAME = None 1021 EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE = False 1022 ALTER_SET_WRAPPED = True 1023 ALTER_SET_TYPE = "" 1024 1025 EXPRESSIONS_WITHOUT_NESTED_CTES = { 1026 exp.Create, 1027 exp.Delete, 1028 exp.Insert, 1029 exp.Intersect, 1030 exp.Except, 1031 exp.Merge, 1032 exp.Select, 1033 exp.Subquery, 1034 exp.Union, 1035 exp.Update, 1036 } 1037 1038 SUPPORTED_JSON_PATH_PARTS = { 1039 exp.JSONPathKey, 1040 exp.JSONPathRoot, 1041 exp.JSONPathSubscript, 1042 } 1043 1044 TYPE_MAPPING = { 1045 **generator.Generator.TYPE_MAPPING, 1046 exp.DataType.Type.BOOLEAN: "BIT", 1047 exp.DataType.Type.DATETIME2: "DATETIME2", 1048 exp.DataType.Type.DECIMAL: "NUMERIC", 1049 exp.DataType.Type.DOUBLE: "FLOAT", 1050 exp.DataType.Type.INT: "INTEGER", 1051 exp.DataType.Type.ROWVERSION: "ROWVERSION", 1052 exp.DataType.Type.TEXT: "VARCHAR(MAX)", 1053 exp.DataType.Type.TIMESTAMP: "DATETIME2", 1054 exp.DataType.Type.TIMESTAMPNTZ: "DATETIME2", 1055 exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET", 1056 exp.DataType.Type.SMALLDATETIME: "SMALLDATETIME", 1057 exp.DataType.Type.UTINYINT: "TINYINT", 1058 exp.DataType.Type.VARIANT: "SQL_VARIANT", 1059 exp.DataType.Type.UUID: "UNIQUEIDENTIFIER", 1060 } 1061 1062 TYPE_MAPPING.pop(exp.DataType.Type.NCHAR) 1063 TYPE_MAPPING.pop(exp.DataType.Type.NVARCHAR) 1064 1065 TRANSFORMS = { 1066 **generator.Generator.TRANSFORMS, 1067 exp.AnyValue: any_value_to_max_sql, 1068 exp.Atan2: rename_func("ATN2"), 1069 exp.ArrayToString: rename_func("STRING_AGG"), 1070 exp.AutoIncrementColumnConstraint: lambda *_: "IDENTITY", 1071 exp.Ceil: rename_func("CEILING"), 1072 exp.Chr: rename_func("CHAR"), 1073 exp.DateAdd: date_delta_sql("DATEADD"), 1074 exp.CTE: transforms.preprocess([qualify_derived_table_outputs]), 1075 exp.CurrentDate: rename_func("GETDATE"), 1076 exp.CurrentTimestamp: rename_func("GETDATE"), 1077 exp.CurrentTimestampLTZ: rename_func("SYSDATETIMEOFFSET"), 1078 exp.DateStrToDate: datestrtodate_sql, 1079 exp.GeneratedAsIdentityColumnConstraint: generatedasidentitycolumnconstraint_sql, 1080 exp.GroupConcat: _string_agg_sql, 1081 exp.If: rename_func("IIF"), 1082 exp.JSONExtract: _json_extract_sql, 1083 exp.JSONExtractScalar: _json_extract_sql, 1084 exp.LastDay: lambda self, e: self.func("EOMONTH", e.this), 1085 exp.Ln: rename_func("LOG"), 1086 exp.Max: max_or_greatest, 1087 exp.MD5: lambda self, e: self.func("HASHBYTES", exp.Literal.string("MD5"), e.this), 1088 exp.Min: min_or_least, 1089 exp.NumberToStr: _format_sql, 1090 exp.Repeat: rename_func("REPLICATE"), 1091 exp.CurrentSchema: rename_func("SCHEMA_NAME"), 1092 exp.Select: transforms.preprocess( 1093 [ 1094 transforms.eliminate_distinct_on, 1095 transforms.eliminate_semi_and_anti_joins, 1096 transforms.eliminate_qualify, 1097 transforms.unnest_generate_date_array_using_recursive_cte, 1098 ] 1099 ), 1100 exp.Stddev: rename_func("STDEV"), 1101 exp.StrPosition: lambda self, e: strposition_sql( 1102 self, e, func_name="CHARINDEX", supports_position=True 1103 ), 1104 exp.Subquery: transforms.preprocess([qualify_derived_table_outputs]), 1105 exp.SHA: lambda self, e: self.func("HASHBYTES", exp.Literal.string("SHA1"), e.this), 1106 exp.SHA1Digest: lambda self, e: self.func( 1107 "HASHBYTES", exp.Literal.string("SHA1"), e.this 1108 ), 1109 exp.SHA2: lambda self, e: self.func( 1110 "HASHBYTES", exp.Literal.string(f"SHA2_{e.args.get('length', 256)}"), e.this 1111 ), 1112 exp.TemporaryProperty: lambda self, e: "", 1113 exp.TimeStrToTime: _timestrtotime_sql, 1114 exp.TimeToStr: _format_sql, 1115 exp.Trim: trim_sql, 1116 exp.TsOrDsAdd: date_delta_sql("DATEADD", cast=True), 1117 exp.TsOrDsDiff: date_delta_sql("DATEDIFF"), 1118 exp.TimestampTrunc: lambda self, e: self.func("DATETRUNC", e.unit, e.this), 1119 exp.Trunc: lambda self, e: self.func( 1120 "ROUND", 1121 e.this, 1122 e.args.get("decimals") or exp.Literal.number(0), 1123 exp.Literal.number(1), 1124 ), 1125 exp.Uuid: lambda *_: "NEWID()", 1126 exp.DateFromParts: rename_func("DATEFROMPARTS"), 1127 } 1128 1129 TRANSFORMS.pop(exp.ReturnsProperty) 1130 1131 PROPERTIES_LOCATION = { 1132 **generator.Generator.PROPERTIES_LOCATION, 1133 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 1134 } 1135 1136 def scope_resolution(self, rhs: str, scope_name: str) -> str: 1137 return f"{scope_name}::{rhs}" 1138 1139 def select_sql(self, expression: exp.Select) -> str: 1140 limit = expression.args.get("limit") 1141 offset = expression.args.get("offset") 1142 1143 if isinstance(limit, exp.Fetch) and not offset: 1144 # Dialects like Oracle can FETCH directly from a row set but 1145 # T-SQL requires an ORDER BY + OFFSET clause in order to FETCH 1146 offset = exp.Offset(expression=exp.Literal.number(0)) 1147 expression.set("offset", offset) 1148 1149 if offset: 1150 if not expression.args.get("order"): 1151 # ORDER BY is required in order to use OFFSET in a query, so we use 1152 # a noop order by, since we don't really care about the order. 1153 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 1154 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 1155 1156 if isinstance(limit, exp.Limit): 1157 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 1158 # we replace here because otherwise TOP would be generated in select_sql 1159 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 1160 1161 return super().select_sql(expression) 1162 1163 def convert_sql(self, expression: exp.Convert) -> str: 1164 name = "TRY_CONVERT" if expression.args.get("safe") else "CONVERT" 1165 return self.func( 1166 name, expression.this, expression.expression, expression.args.get("style") 1167 ) 1168 1169 def queryoption_sql(self, expression: exp.QueryOption) -> str: 1170 option = self.sql(expression, "this") 1171 value = self.sql(expression, "expression") 1172 if value: 1173 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 1174 return f"{option} {optional_equal_sign}{value}" 1175 return option 1176 1177 def lateral_op(self, expression: exp.Lateral) -> str: 1178 cross_apply = expression.args.get("cross_apply") 1179 if cross_apply is True: 1180 return "CROSS APPLY" 1181 if cross_apply is False: 1182 return "OUTER APPLY" 1183 1184 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 1185 self.unsupported("LATERAL clause is not supported.") 1186 return "LATERAL" 1187 1188 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 1189 this = expression.this 1190 split_count = len(this.name.split(".")) 1191 delimiter = expression.args.get("delimiter") 1192 part_index = expression.args.get("part_index") 1193 1194 if ( 1195 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 1196 or (delimiter and delimiter.name != ".") 1197 or not part_index 1198 or split_count > 4 1199 ): 1200 self.unsupported( 1201 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 1202 ) 1203 return "" 1204 1205 return self.func( 1206 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 1207 ) 1208 1209 def extract_sql(self, expression: exp.Extract) -> str: 1210 part = expression.this 1211 name = DATE_PART_UNMAPPING.get(part.name.upper()) or part 1212 1213 return self.func("DATEPART", name, expression.expression) 1214 1215 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1216 nano = expression.args.get("nano") 1217 if nano is not None: 1218 nano.pop() 1219 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1220 1221 if expression.args.get("fractions") is None: 1222 expression.set("fractions", exp.Literal.number(0)) 1223 if expression.args.get("precision") is None: 1224 expression.set("precision", exp.Literal.number(0)) 1225 1226 return rename_func("TIMEFROMPARTS")(self, expression) 1227 1228 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1229 zone = expression.args.get("zone") 1230 if zone is not None: 1231 zone.pop() 1232 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1233 1234 nano = expression.args.get("nano") 1235 if nano is not None: 1236 nano.pop() 1237 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1238 1239 if expression.args.get("milli") is None: 1240 expression.set("milli", exp.Literal.number(0)) 1241 1242 return rename_func("DATETIMEFROMPARTS")(self, expression) 1243 1244 def setitem_sql(self, expression: exp.SetItem) -> str: 1245 this = expression.this 1246 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1247 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1248 return f"{self.sql(this.left)} {self.sql(this.right)}" 1249 1250 return super().setitem_sql(expression) 1251 1252 def boolean_sql(self, expression: exp.Boolean) -> str: 1253 if type(expression.parent) in BIT_TYPES or isinstance( 1254 expression.find_ancestor(exp.Values, exp.Select), exp.Values 1255 ): 1256 return "1" if expression.this else "0" 1257 1258 return "(1 = 1)" if expression.this else "(1 = 0)" 1259 1260 def is_sql(self, expression: exp.Is) -> str: 1261 if isinstance(expression.expression, exp.Boolean): 1262 return self.binary(expression, "=") 1263 return self.binary(expression, "IS") 1264 1265 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1266 sql = self.sql(expression, "this") 1267 properties = expression.args.get("properties") 1268 1269 if sql[:1] != "#" and any( 1270 isinstance(prop, exp.TemporaryProperty) 1271 for prop in (properties.expressions if properties else []) 1272 ): 1273 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1274 1275 return sql 1276 1277 def create_sql(self, expression: exp.Create) -> str: 1278 kind = expression.kind 1279 exists = expression.args.get("exists") 1280 expression.set("exists", None) 1281 1282 like_property = expression.find(exp.LikeProperty) 1283 if like_property: 1284 ctas_expression = like_property.this 1285 else: 1286 ctas_expression = expression.expression 1287 1288 if kind == "VIEW": 1289 expression.this.set("catalog", None) 1290 with_ = expression.args.get("with_") 1291 if ctas_expression and with_: 1292 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1293 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1294 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1295 ctas_expression.set("with_", with_.pop()) 1296 1297 table = expression.find(exp.Table) 1298 1299 # Convert CTAS statement to SELECT .. INTO .. 1300 if kind == "TABLE" and ctas_expression: 1301 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1302 ctas_expression = ctas_expression.subquery() 1303 1304 properties = expression.args.get("properties") or exp.Properties() 1305 is_temp = any(isinstance(p, exp.TemporaryProperty) for p in properties.expressions) 1306 1307 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1308 select_into.set("into", exp.Into(this=table, temporary=is_temp)) 1309 1310 if like_property: 1311 select_into.limit(0, copy=False) 1312 1313 sql = self.sql(select_into) 1314 else: 1315 sql = super().create_sql(expression) 1316 1317 if exists: 1318 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1319 sql_with_ctes = self.prepend_ctes(expression, sql) 1320 sql_literal = self.sql(exp.Literal.string(sql_with_ctes)) 1321 if kind == "SCHEMA": 1322 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = {identifier}) EXEC({sql_literal})""" 1323 elif kind == "TABLE": 1324 assert table 1325 where = exp.and_( 1326 exp.column("TABLE_NAME").eq(table.name), 1327 exp.column("TABLE_SCHEMA").eq(table.db) if table.db else None, 1328 exp.column("TABLE_CATALOG").eq(table.catalog) if table.catalog else None, 1329 ) 1330 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE {where}) EXEC({sql_literal})""" 1331 elif kind == "INDEX": 1332 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1333 return f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql_literal})""" 1334 elif expression.args.get("replace"): 1335 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1336 1337 return self.prepend_ctes(expression, sql) 1338 1339 @generator.unsupported_args("unlogged", "expressions") 1340 def into_sql(self, expression: exp.Into) -> str: 1341 if expression.args.get("temporary"): 1342 # If the Into expression has a temporary property, push this down to the Identifier 1343 table = expression.find(exp.Table) 1344 if table and isinstance(table.this, exp.Identifier): 1345 table.this.set("temporary", True) 1346 1347 return f"{self.seg('INTO')} {self.sql(expression, 'this')}" 1348 1349 def count_sql(self, expression: exp.Count) -> str: 1350 func_name = "COUNT_BIG" if expression.args.get("big_int") else "COUNT" 1351 return rename_func(func_name)(self, expression) 1352 1353 def datediff_sql(self, expression: exp.DateDiff) -> str: 1354 func_name = "DATEDIFF_BIG" if expression.args.get("big_int") else "DATEDIFF" 1355 return date_delta_sql(func_name)(self, expression) 1356 1357 def offset_sql(self, expression: exp.Offset) -> str: 1358 return f"{super().offset_sql(expression)} ROWS" 1359 1360 def version_sql(self, expression: exp.Version) -> str: 1361 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1362 this = f"FOR {name}" 1363 expr = expression.expression 1364 kind = expression.text("kind") 1365 if kind in ("FROM", "BETWEEN"): 1366 args = expr.expressions 1367 sep = "TO" if kind == "FROM" else "AND" 1368 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1369 else: 1370 expr_sql = self.sql(expr) 1371 1372 expr_sql = f" {expr_sql}" if expr_sql else "" 1373 return f"{this} {kind}{expr_sql}" 1374 1375 def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str: 1376 table = expression.args.get("table") 1377 table = f"{table} " if table else "" 1378 return f"RETURNS {table}{self.sql(expression, 'this')}" 1379 1380 def returning_sql(self, expression: exp.Returning) -> str: 1381 into = self.sql(expression, "into") 1382 into = self.seg(f"INTO {into}") if into else "" 1383 return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}" 1384 1385 def transaction_sql(self, expression: exp.Transaction) -> str: 1386 this = self.sql(expression, "this") 1387 this = f" {this}" if this else "" 1388 mark = self.sql(expression, "mark") 1389 mark = f" WITH MARK {mark}" if mark else "" 1390 return f"BEGIN TRANSACTION{this}{mark}" 1391 1392 def commit_sql(self, expression: exp.Commit) -> str: 1393 this = self.sql(expression, "this") 1394 this = f" {this}" if this else "" 1395 durability = expression.args.get("durability") 1396 durability = ( 1397 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1398 if durability is not None 1399 else "" 1400 ) 1401 return f"COMMIT TRANSACTION{this}{durability}" 1402 1403 def rollback_sql(self, expression: exp.Rollback) -> str: 1404 this = self.sql(expression, "this") 1405 this = f" {this}" if this else "" 1406 return f"ROLLBACK TRANSACTION{this}" 1407 1408 def identifier_sql(self, expression: exp.Identifier) -> str: 1409 identifier = super().identifier_sql(expression) 1410 1411 if expression.args.get("global_"): 1412 identifier = f"##{identifier}" 1413 elif expression.args.get("temporary"): 1414 identifier = f"#{identifier}" 1415 1416 return identifier 1417 1418 def constraint_sql(self, expression: exp.Constraint) -> str: 1419 this = self.sql(expression, "this") 1420 expressions = self.expressions(expression, flat=True, sep=" ") 1421 return f"CONSTRAINT {this} {expressions}" 1422 1423 def length_sql(self, expression: exp.Length) -> str: 1424 return self._uncast_text(expression, "LEN") 1425 1426 def right_sql(self, expression: exp.Right) -> str: 1427 return self._uncast_text(expression, "RIGHT") 1428 1429 def left_sql(self, expression: exp.Left) -> str: 1430 return self._uncast_text(expression, "LEFT") 1431 1432 def _uncast_text(self, expression: exp.Expression, name: str) -> str: 1433 this = expression.this 1434 if isinstance(this, exp.Cast) and this.is_type(exp.DataType.Type.TEXT): 1435 this_sql = self.sql(this, "this") 1436 else: 1437 this_sql = self.sql(this) 1438 expression_sql = self.sql(expression, "expression") 1439 return self.func(name, this_sql, expression_sql if expression_sql else None) 1440 1441 def partition_sql(self, expression: exp.Partition) -> str: 1442 return f"WITH (PARTITIONS({self.expressions(expression, flat=True)}))" 1443 1444 def alter_sql(self, expression: exp.Alter) -> str: 1445 action = seq_get(expression.args.get("actions") or [], 0) 1446 if isinstance(action, exp.AlterRename): 1447 return f"EXEC sp_rename '{self.sql(expression.this)}', '{action.this.name}'" 1448 return super().alter_sql(expression) 1449 1450 def drop_sql(self, expression: exp.Drop) -> str: 1451 if expression.args["kind"] == "VIEW": 1452 expression.this.set("catalog", None) 1453 return super().drop_sql(expression) 1454 1455 def options_modifier(self, expression: exp.Expression) -> str: 1456 options = self.expressions(expression, key="options") 1457 return f" OPTION{self.wrap(options)}" if options else "" 1458 1459 def dpipe_sql(self, expression: exp.DPipe) -> str: 1460 return self.sql( 1461 reduce(lambda x, y: exp.Add(this=x, expression=y), expression.flatten()) 1462 ) 1463 1464 def isascii_sql(self, expression: exp.IsAscii) -> str: 1465 return f"(PATINDEX(CONVERT(VARCHAR(MAX), 0x255b5e002d7f5d25) COLLATE Latin1_General_BIN, {self.sql(expression.this)}) = 0)" 1466 1467 def columndef_sql(self, expression: exp.ColumnDef, sep: str = " ") -> str: 1468 this = super().columndef_sql(expression, sep) 1469 default = self.sql(expression, "default") 1470 default = f" = {default}" if default else "" 1471 output = self.sql(expression, "output") 1472 output = f" {output}" if output else "" 1473 return f"{this}{default}{output}" 1474 1475 def coalesce_sql(self, expression: exp.Coalesce) -> str: 1476 func_name = "ISNULL" if expression.args.get("is_null") else "COALESCE" 1477 return rename_func(func_name)(self, expression) 1478 1479 def storedprocedure_sql(self, expression: exp.StoredProcedure) -> str: 1480 this = self.sql(expression, "this") 1481 expressions = self.expressions(expression) 1482 expressions = ( 1483 self.wrap(expressions) if expression.args.get("wrapped") else f" {expressions}" 1484 ) 1485 return f"{this}{expressions}" if expressions.strip() != "" else this 1486 1487 def ifblock_sql(self, expression: exp.IfBlock) -> str: 1488 this = self.sql(expression, "this") 1489 true = self.sql(expression, "true") 1490 true = f" {true}" if true else " " 1491 false = self.sql(expression, "false") 1492 false = f"; ELSE BEGIN {false}" if false else "" 1493 return f"IF {this} BEGIN{true}{false}" 1494 1495 def whileblock_sql(self, expression: exp.WhileBlock) -> str: 1496 this = self.sql(expression, "this") 1497 body = self.sql(expression, "body") 1498 body = f" {body}" if body else " " 1499 return f"WHILE {this} BEGIN{body}" 1500 1501 def execute_sql(self, expression: exp.Execute) -> str: 1502 this = self.sql(expression, "this") 1503 expressions = self.expressions(expression) 1504 expressions = f" {expressions}" if expressions else "" 1505 return f"EXECUTE {this}{expressions}" 1506 1507 def executesql_sql(self, expression: exp.ExecuteSql) -> str: 1508 return self.execute_sql(expression)
Generator converts a given syntax tree to the corresponding SQL string.
Arguments:
- pretty: Whether to format the produced SQL string. Default: False.
- identify: Determines when an identifier should be quoted. Possible values are: False (default): Never quote, except in cases where it's mandatory by the dialect. True: Always quote except for specials cases. 'safe': Only quote identifiers that are case insensitive.
- normalize: Whether to normalize identifiers to lowercase. Default: False.
- pad: The pad size in a formatted string. For example, this affects the indentation of a projection in a query, relative to its nesting level. Default: 2.
- indent: The indentation size in a formatted string. For example, this affects the
indentation of subqueries and filters under a
WHEREclause. Default: 2. - normalize_functions: How to normalize function names. Possible values are: "upper" or True (default): Convert names to uppercase. "lower": Convert names to lowercase. False: Disables function name normalization.
- unsupported_level: Determines the generator's behavior when it encounters unsupported expressions. Default ErrorLevel.WARN.
- max_unsupported: Maximum number of unsupported messages to include in a raised UnsupportedError. This is only relevant if unsupported_level is ErrorLevel.RAISE. Default: 3
- leading_comma: Whether the comma is leading or trailing in select expressions. This is only relevant when generating in pretty mode. Default: False
- max_text_width: The max number of characters in a segment before creating new lines in pretty mode. The default is on the smaller end because the length only represents a segment and not the true line length. Default: 80
- comments: Whether to preserve comments in the output SQL code. Default: True
1139 def select_sql(self, expression: exp.Select) -> str: 1140 limit = expression.args.get("limit") 1141 offset = expression.args.get("offset") 1142 1143 if isinstance(limit, exp.Fetch) and not offset: 1144 # Dialects like Oracle can FETCH directly from a row set but 1145 # T-SQL requires an ORDER BY + OFFSET clause in order to FETCH 1146 offset = exp.Offset(expression=exp.Literal.number(0)) 1147 expression.set("offset", offset) 1148 1149 if offset: 1150 if not expression.args.get("order"): 1151 # ORDER BY is required in order to use OFFSET in a query, so we use 1152 # a noop order by, since we don't really care about the order. 1153 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 1154 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 1155 1156 if isinstance(limit, exp.Limit): 1157 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 1158 # we replace here because otherwise TOP would be generated in select_sql 1159 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 1160 1161 return super().select_sql(expression)
1169 def queryoption_sql(self, expression: exp.QueryOption) -> str: 1170 option = self.sql(expression, "this") 1171 value = self.sql(expression, "expression") 1172 if value: 1173 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 1174 return f"{option} {optional_equal_sign}{value}" 1175 return option
1177 def lateral_op(self, expression: exp.Lateral) -> str: 1178 cross_apply = expression.args.get("cross_apply") 1179 if cross_apply is True: 1180 return "CROSS APPLY" 1181 if cross_apply is False: 1182 return "OUTER APPLY" 1183 1184 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 1185 self.unsupported("LATERAL clause is not supported.") 1186 return "LATERAL"
1188 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 1189 this = expression.this 1190 split_count = len(this.name.split(".")) 1191 delimiter = expression.args.get("delimiter") 1192 part_index = expression.args.get("part_index") 1193 1194 if ( 1195 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 1196 or (delimiter and delimiter.name != ".") 1197 or not part_index 1198 or split_count > 4 1199 ): 1200 self.unsupported( 1201 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 1202 ) 1203 return "" 1204 1205 return self.func( 1206 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 1207 )
1215 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1216 nano = expression.args.get("nano") 1217 if nano is not None: 1218 nano.pop() 1219 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1220 1221 if expression.args.get("fractions") is None: 1222 expression.set("fractions", exp.Literal.number(0)) 1223 if expression.args.get("precision") is None: 1224 expression.set("precision", exp.Literal.number(0)) 1225 1226 return rename_func("TIMEFROMPARTS")(self, expression)
1228 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1229 zone = expression.args.get("zone") 1230 if zone is not None: 1231 zone.pop() 1232 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1233 1234 nano = expression.args.get("nano") 1235 if nano is not None: 1236 nano.pop() 1237 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1238 1239 if expression.args.get("milli") is None: 1240 expression.set("milli", exp.Literal.number(0)) 1241 1242 return rename_func("DATETIMEFROMPARTS")(self, expression)
1244 def setitem_sql(self, expression: exp.SetItem) -> str: 1245 this = expression.this 1246 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1247 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1248 return f"{self.sql(this.left)} {self.sql(this.right)}" 1249 1250 return super().setitem_sql(expression)
1265 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1266 sql = self.sql(expression, "this") 1267 properties = expression.args.get("properties") 1268 1269 if sql[:1] != "#" and any( 1270 isinstance(prop, exp.TemporaryProperty) 1271 for prop in (properties.expressions if properties else []) 1272 ): 1273 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1274 1275 return sql
1277 def create_sql(self, expression: exp.Create) -> str: 1278 kind = expression.kind 1279 exists = expression.args.get("exists") 1280 expression.set("exists", None) 1281 1282 like_property = expression.find(exp.LikeProperty) 1283 if like_property: 1284 ctas_expression = like_property.this 1285 else: 1286 ctas_expression = expression.expression 1287 1288 if kind == "VIEW": 1289 expression.this.set("catalog", None) 1290 with_ = expression.args.get("with_") 1291 if ctas_expression and with_: 1292 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1293 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1294 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1295 ctas_expression.set("with_", with_.pop()) 1296 1297 table = expression.find(exp.Table) 1298 1299 # Convert CTAS statement to SELECT .. INTO .. 1300 if kind == "TABLE" and ctas_expression: 1301 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1302 ctas_expression = ctas_expression.subquery() 1303 1304 properties = expression.args.get("properties") or exp.Properties() 1305 is_temp = any(isinstance(p, exp.TemporaryProperty) for p in properties.expressions) 1306 1307 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1308 select_into.set("into", exp.Into(this=table, temporary=is_temp)) 1309 1310 if like_property: 1311 select_into.limit(0, copy=False) 1312 1313 sql = self.sql(select_into) 1314 else: 1315 sql = super().create_sql(expression) 1316 1317 if exists: 1318 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1319 sql_with_ctes = self.prepend_ctes(expression, sql) 1320 sql_literal = self.sql(exp.Literal.string(sql_with_ctes)) 1321 if kind == "SCHEMA": 1322 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = {identifier}) EXEC({sql_literal})""" 1323 elif kind == "TABLE": 1324 assert table 1325 where = exp.and_( 1326 exp.column("TABLE_NAME").eq(table.name), 1327 exp.column("TABLE_SCHEMA").eq(table.db) if table.db else None, 1328 exp.column("TABLE_CATALOG").eq(table.catalog) if table.catalog else None, 1329 ) 1330 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE {where}) EXEC({sql_literal})""" 1331 elif kind == "INDEX": 1332 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1333 return f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql_literal})""" 1334 elif expression.args.get("replace"): 1335 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1336 1337 return self.prepend_ctes(expression, sql)
1339 @generator.unsupported_args("unlogged", "expressions") 1340 def into_sql(self, expression: exp.Into) -> str: 1341 if expression.args.get("temporary"): 1342 # If the Into expression has a temporary property, push this down to the Identifier 1343 table = expression.find(exp.Table) 1344 if table and isinstance(table.this, exp.Identifier): 1345 table.this.set("temporary", True) 1346 1347 return f"{self.seg('INTO')} {self.sql(expression, 'this')}"
1360 def version_sql(self, expression: exp.Version) -> str: 1361 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1362 this = f"FOR {name}" 1363 expr = expression.expression 1364 kind = expression.text("kind") 1365 if kind in ("FROM", "BETWEEN"): 1366 args = expr.expressions 1367 sep = "TO" if kind == "FROM" else "AND" 1368 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1369 else: 1370 expr_sql = self.sql(expr) 1371 1372 expr_sql = f" {expr_sql}" if expr_sql else "" 1373 return f"{this} {kind}{expr_sql}"
1392 def commit_sql(self, expression: exp.Commit) -> str: 1393 this = self.sql(expression, "this") 1394 this = f" {this}" if this else "" 1395 durability = expression.args.get("durability") 1396 durability = ( 1397 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1398 if durability is not None 1399 else "" 1400 ) 1401 return f"COMMIT TRANSACTION{this}{durability}"
1408 def identifier_sql(self, expression: exp.Identifier) -> str: 1409 identifier = super().identifier_sql(expression) 1410 1411 if expression.args.get("global_"): 1412 identifier = f"##{identifier}" 1413 elif expression.args.get("temporary"): 1414 identifier = f"#{identifier}" 1415 1416 return identifier
1467 def columndef_sql(self, expression: exp.ColumnDef, sep: str = " ") -> str: 1468 this = super().columndef_sql(expression, sep) 1469 default = self.sql(expression, "default") 1470 default = f" = {default}" if default else "" 1471 output = self.sql(expression, "output") 1472 output = f" {output}" if output else "" 1473 return f"{this}{default}{output}"
1479 def storedprocedure_sql(self, expression: exp.StoredProcedure) -> str: 1480 this = self.sql(expression, "this") 1481 expressions = self.expressions(expression) 1482 expressions = ( 1483 self.wrap(expressions) if expression.args.get("wrapped") else f" {expressions}" 1484 ) 1485 return f"{this}{expressions}" if expressions.strip() != "" else this
1487 def ifblock_sql(self, expression: exp.IfBlock) -> str: 1488 this = self.sql(expression, "this") 1489 true = self.sql(expression, "true") 1490 true = f" {true}" if true else " " 1491 false = self.sql(expression, "false") 1492 false = f"; ELSE BEGIN {false}" if false else "" 1493 return f"IF {this} BEGIN{true}{false}"
Inherited Members
- sqlglot.generator.Generator
- Generator
- IGNORE_NULLS_IN_FUNC
- LOCKING_READS_SUPPORTED
- WRAP_DERIVED_VALUES
- CREATE_FUNCTION_RETURN_AS
- MATCHED_BY_SOURCE
- SINGLE_STRING_INTERVAL
- INTERVAL_ALLOWS_PLURAL_FORM
- LIMIT_ONLY_LITERALS
- RENAME_TABLE_WITH_DB
- GROUPINGS_SEP
- INDEX_ON
- INOUT_SEPARATOR
- JOIN_HINTS
- DIRECTED_JOINS
- TABLE_HINTS
- QUERY_HINT_SEP
- IS_BOOL_ALLOWED
- DUPLICATE_KEY_UPDATE_WITH_SET
- EXTRACT_ALLOWS_QUOTES
- TZ_TO_WITH_TIME_ZONE
- VALUES_AS_TABLE
- UNNEST_WITH_ORDINALITY
- AGGREGATE_FILTER_SUPPORTED
- SEMI_ANTI_JOIN_WITH_SIDE
- SUPPORTS_TABLE_COPY
- TABLESAMPLE_REQUIRES_PARENS
- TABLESAMPLE_SIZE_IS_ROWS
- TABLESAMPLE_KEYWORDS
- TABLESAMPLE_WITH_METHOD
- COLLATE_IS_FUNC
- DATA_TYPE_SPECIFIERS_ALLOWED
- LAST_DAY_SUPPORTS_DATE_PART
- SUPPORTS_TABLE_ALIAS_COLUMNS
- UNPIVOT_ALIASES_ARE_IDENTIFIERS
- JSON_KEY_VALUE_PAIR_SEP
- INSERT_OVERWRITE
- SUPPORTS_UNLOGGED_TABLES
- SUPPORTS_CREATE_TABLE_LIKE
- LIKE_PROPERTY_INSIDE_SCHEMA
- MULTI_ARG_DISTINCT
- JSON_TYPE_REQUIRED_FOR_EXTRACTION
- JSON_PATH_SINGLE_QUOTE_ESCAPE
- CAN_IMPLEMENT_ARRAY_ANY
- SUPPORTS_WINDOW_EXCLUDE
- COPY_PARAMS_ARE_WRAPPED
- COPY_HAS_INTO_KEYWORD
- UNICODE_SUBSTITUTE
- STAR_EXCEPT
- HEX_FUNC
- WITH_PROPERTIES_PREFIX
- QUOTE_JSON_PATH
- PAD_FILL_PATTERN_IS_REQUIRED
- SUPPORTS_EXPLODING_PROJECTIONS
- ARRAY_CONCAT_IS_VAR_LEN
- SUPPORTS_CONVERT_TIMEZONE
- SUPPORTS_MEDIAN
- SUPPORTS_UNIX_SECONDS
- NORMALIZE_EXTRACT_DATE_PARTS
- ARRAY_SIZE_NAME
- ARRAY_SIZE_DIM_REQUIRED
- SUPPORTS_BETWEEN_FLAGS
- SUPPORTS_LIKE_QUANTIFIERS
- MATCH_AGAINST_TABLE_PREFIX
- SET_ASSIGNMENT_REQUIRES_VARIABLE_KEYWORD
- DECLARE_DEFAULT_ASSIGNMENT
- UPDATE_STATEMENT_SUPPORTS_FROM
- STAR_EXCLUDE_REQUIRES_DERIVED_TABLE
- UNSUPPORTED_TYPES
- TIME_PART_SINGULARS
- TOKEN_MAPPING
- STRUCT_DELIMITER
- PARAMETER_TOKEN
- NAMED_PLACEHOLDER_TOKEN
- EXPRESSION_PRECEDES_PROPERTIES_CREATABLES
- RESERVED_KEYWORDS
- WITH_SEPARATED_COMMENTS
- EXCLUDE_COMMENTS
- UNWRAPPED_INTERVAL_VALUES
- PARAMETERIZABLE_TEXT_TYPES
- RESPECT_IGNORE_NULLS_UNSUPPORTED_EXPRESSIONS
- SAFE_JSON_PATH_KEY_RE
- SENTINEL_LINE_BREAK
- pretty
- identify
- normalize
- pad
- unsupported_level
- max_unsupported
- leading_comma
- max_text_width
- comments
- dialect
- normalize_functions
- unsupported_messages
- generate
- preprocess
- unsupported
- sep
- seg
- sanitize_comment
- maybe_comment
- wrap
- no_identify
- normalize_func
- indent
- sql
- uncache_sql
- cache_sql
- characterset_sql
- column_parts
- column_sql
- pseudocolumn_sql
- columnposition_sql
- columnconstraint_sql
- computedcolumnconstraint_sql
- autoincrementcolumnconstraint_sql
- compresscolumnconstraint_sql
- generatedasidentitycolumnconstraint_sql
- generatedasrowcolumnconstraint_sql
- periodforsystemtimeconstraint_sql
- notnullcolumnconstraint_sql
- primarykeycolumnconstraint_sql
- uniquecolumnconstraint_sql
- inoutcolumnconstraint_sql
- sequenceproperties_sql
- triggerproperties_sql
- triggerreferencing_sql
- triggerevent_sql
- clone_sql
- describe_sql
- heredoc_sql
- prepend_ctes
- with_sql
- cte_sql
- tablealias_sql
- bitstring_sql
- hexstring_sql
- bytestring_sql
- unicodestring_sql
- rawstring_sql
- datatypeparam_sql
- datatype_sql
- directory_sql
- delete_sql
- set_operation
- set_operations
- fetch_sql
- limitoptions_sql
- filter_sql
- hint_sql
- indexparameters_sql
- index_sql
- hex_sql
- lowerhex_sql
- inputoutputformat_sql
- national_sql
- properties_sql
- root_properties
- properties
- with_properties
- locate_properties
- property_name
- property_sql
- likeproperty_sql
- fallbackproperty_sql
- journalproperty_sql
- freespaceproperty_sql
- checksumproperty_sql
- mergeblockratioproperty_sql
- datablocksizeproperty_sql
- blockcompressionproperty_sql
- isolatedloadingproperty_sql
- partitionboundspec_sql
- partitionedofproperty_sql
- lockingproperty_sql
- withdataproperty_sql
- withsystemversioningproperty_sql
- insert_sql
- introducer_sql
- kill_sql
- pseudotype_sql
- objectidentifier_sql
- onconflict_sql
- rowformatdelimitedproperty_sql
- withtablehint_sql
- indextablehint_sql
- historicaldata_sql
- table_parts
- table_sql
- tablefromrows_sql
- tablesample_sql
- pivot_sql
- tuple_sql
- update_sql
- values_sql
- var_sql
- from_sql
- groupingsets_sql
- rollup_sql
- rollupindex_sql
- rollupproperty_sql
- cube_sql
- group_sql
- having_sql
- connect_sql
- prior_sql
- join_sql
- lambda_sql
- lateral_sql
- limit_sql
- set_sql
- queryband_sql
- pragma_sql
- lock_sql
- literal_sql
- escape_str
- loaddata_sql
- null_sql
- booland_sql
- boolor_sql
- order_sql
- withfill_sql
- cluster_sql
- distribute_sql
- sort_sql
- ordered_sql
- matchrecognizemeasure_sql
- matchrecognize_sql
- query_modifiers
- for_modifiers
- offset_limit_modifiers
- after_limit_modifiers
- schema_sql
- schema_columns_sql
- star_sql
- parameter_sql
- sessionparameter_sql
- placeholder_sql
- subquery_sql
- qualify_sql
- unnest_sql
- prewhere_sql
- where_sql
- window_sql
- partition_by_sql
- windowspec_sql
- withingroup_sql
- between_sql
- bracket_offset_expressions
- bracket_sql
- all_sql
- any_sql
- exists_sql
- case_sql
- nextvaluefor_sql
- trim_sql
- convert_concat_args
- concat_sql
- concatws_sql
- check_sql
- foreignkey_sql
- primarykey_sql
- if_sql
- matchagainst_sql
- jsonkeyvalue_sql
- jsonpath_sql
- json_path_part
- formatjson_sql
- formatphrase_sql
- jsonobject_sql
- jsonobjectagg_sql
- jsonarray_sql
- jsonarrayagg_sql
- jsoncolumndef_sql
- jsonschema_sql
- jsontable_sql
- openjsoncolumndef_sql
- openjson_sql
- in_sql
- in_unnest_op
- interval_sql
- return_sql
- reference_sql
- anonymous_sql
- paren_sql
- neg_sql
- not_sql
- alias_sql
- pivotalias_sql
- aliases_sql
- atindex_sql
- attimezone_sql
- fromtimezone_sql
- add_sql
- and_sql
- or_sql
- xor_sql
- connector_sql
- bitwiseand_sql
- bitwiseleftshift_sql
- bitwisenot_sql
- bitwiseor_sql
- bitwiserightshift_sql
- bitwisexor_sql
- cast_sql
- strtotime_sql
- currentdate_sql
- collate_sql
- command_sql
- comment_sql
- mergetreettlaction_sql
- mergetreettl_sql
- altercolumn_sql
- alterindex_sql
- alterdiststyle_sql
- altersortkey_sql
- alterrename_sql
- renamecolumn_sql
- alterset_sql
- altersession_sql
- add_column_sql
- droppartition_sql
- addconstraint_sql
- addpartition_sql
- distinct_sql
- ignorenulls_sql
- respectnulls_sql
- havingmax_sql
- intdiv_sql
- div_sql
- safedivide_sql
- overlaps_sql
- distance_sql
- dot_sql
- eq_sql
- propertyeq_sql
- escape_sql
- glob_sql
- gt_sql
- gte_sql
- like_sql
- ilike_sql
- match_sql
- similarto_sql
- lt_sql
- lte_sql
- mod_sql
- mul_sql
- neq_sql
- nullsafeeq_sql
- nullsafeneq_sql
- sub_sql
- trycast_sql
- jsoncast_sql
- try_sql
- log_sql
- use_sql
- binary
- ceil_floor
- function_fallback_sql
- func
- format_args
- too_wide
- format_time
- expressions
- op_expressions
- naked_property
- tag_sql
- token_sql
- userdefinedfunction_sql
- joinhint_sql
- kwarg_sql
- when_sql
- whens_sql
- merge_sql
- tochar_sql
- tonumber_sql
- dictproperty_sql
- dictrange_sql
- dictsubproperty_sql
- duplicatekeyproperty_sql
- uniquekeyproperty_sql
- distributedbyproperty_sql
- oncluster_sql
- clusteredbyproperty_sql
- anyvalue_sql
- querytransform_sql
- indexconstraintoption_sql
- checkcolumnconstraint_sql
- indexcolumnconstraint_sql
- nvl2_sql
- comprehension_sql
- columnprefix_sql
- opclass_sql
- predict_sql
- generateembedding_sql
- mltranslate_sql
- mlforecast_sql
- featuresattime_sql
- vectorsearch_sql
- forin_sql
- refresh_sql
- toarray_sql
- tsordstotime_sql
- tsordstotimestamp_sql
- tsordstodatetime_sql
- tsordstodate_sql
- unixdate_sql
- lastday_sql
- dateadd_sql
- arrayany_sql
- struct_sql
- partitionrange_sql
- truncatetable_sql
- copyparameter_sql
- credentials_sql
- copy_sql
- semicolon_sql
- datadeletionproperty_sql
- maskingpolicycolumnconstraint_sql
- gapfill_sql
- scoperesolution_sql
- parsejson_sql
- rand_sql
- changes_sql
- pad_sql
- summarize_sql
- explodinggenerateseries_sql
- converttimezone_sql
- json_sql
- jsonvalue_sql
- conditionalinsert_sql
- multitableinserts_sql
- oncondition_sql
- jsonextractquote_sql
- jsonexists_sql
- arrayagg_sql
- slice_sql
- apply_sql
- grant_sql
- revoke_sql
- grantprivilege_sql
- grantprincipal_sql
- columns_sql
- overlay_sql
- todouble_sql
- string_sql
- median_sql
- overflowtruncatebehavior_sql
- unixseconds_sql
- arraysize_sql
- attach_sql
- detach_sql
- attachoption_sql
- watermarkcolumnconstraint_sql
- encodeproperty_sql
- includeproperty_sql
- xmlelement_sql
- xmlkeyvalueoption_sql
- partitionbyrangeproperty_sql
- partitionbyrangepropertydynamic_sql
- unpivotcolumns_sql
- analyzesample_sql
- analyzestatistics_sql
- analyzehistogram_sql
- analyzedelete_sql
- analyzelistchainedrows_sql
- analyzevalidate_sql
- analyze_sql
- xmltable_sql
- xmlnamespace_sql
- export_sql
- declare_sql
- declareitem_sql
- recursivewithsearch_sql
- parameterizedagg_sql
- anonymousaggfunc_sql
- combinedaggfunc_sql
- combinedparameterizedagg_sql
- show_sql
- install_sql
- get_put_sql
- translatecharacters_sql
- decodecase_sql
- semanticview_sql
- getextract_sql
- datefromunixdate_sql
- space_sql
- buildproperty_sql
- refreshtriggerproperty_sql
- modelattribute_sql
- directorystage_sql
- uuid_sql
- initcap_sql
- localtime_sql
- localtimestamp_sql
- weekstart_sql
- chr_sql
- block_sql