sqlglot.dialects.tsql
1from __future__ import annotations 2 3import datetime 4import re 5import typing as t 6from functools import reduce 7 8from sqlglot import exp, generator, parser, tokens, transforms 9from sqlglot.dialects.dialect import ( 10 Dialect, 11 NormalizationStrategy, 12 any_value_to_max_sql, 13 build_date_delta, 14 date_delta_sql, 15 datestrtodate_sql, 16 generatedasidentitycolumnconstraint_sql, 17 max_or_greatest, 18 min_or_least, 19 rename_func, 20 strposition_sql, 21 timestrtotime_sql, 22 trim_sql, 23 map_date_part, 24) 25from sqlglot.helper import seq_get 26from sqlglot.parser import build_coalesce 27from sqlglot.time import format_time 28from sqlglot.tokens import TokenType 29from sqlglot.typing.tsql import EXPRESSION_METADATA 30 31if t.TYPE_CHECKING: 32 from sqlglot._typing import E 33 34FULL_FORMAT_TIME_MAPPING = { 35 "weekday": "%A", 36 "dw": "%A", 37 "w": "%A", 38 "month": "%B", 39 "mm": "%B", 40 "m": "%B", 41} 42 43DATE_DELTA_INTERVAL = { 44 "year": "year", 45 "yyyy": "year", 46 "yy": "year", 47 "quarter": "quarter", 48 "qq": "quarter", 49 "q": "quarter", 50 "month": "month", 51 "mm": "month", 52 "m": "month", 53 "week": "week", 54 "ww": "week", 55 "wk": "week", 56 "day": "day", 57 "dd": "day", 58 "d": "day", 59} 60 61DATE_PART_UNMAPPING = { 62 "WEEKISO": "ISO_WEEK", 63 "DAYOFWEEK": "WEEKDAY", 64 "TIMEZONE_MINUTE": "TZOFFSET", 65} 66 67DATE_FMT_RE = re.compile("([dD]{1,2})|([mM]{1,2})|([yY]{1,4})|([hH]{1,2})|([sS]{1,2})") 68 69# N = Numeric, C=Currency 70TRANSPILE_SAFE_NUMBER_FMT = {"N", "C"} 71 72DEFAULT_START_DATE = datetime.date(1900, 1, 1) 73 74BIT_TYPES = {exp.EQ, exp.NEQ, exp.Is, exp.In, exp.Select, exp.Alias} 75 76# Unsupported options: 77# - OPTIMIZE FOR ( @variable_name { UNKNOWN | = <literal_constant> } [ , ...n ] ) 78# - TABLE HINT 79OPTIONS: parser.OPTIONS_TYPE = { 80 **dict.fromkeys( 81 ( 82 "DISABLE_OPTIMIZED_PLAN_FORCING", 83 "FAST", 84 "IGNORE_NONCLUSTERED_COLUMNSTORE_INDEX", 85 "LABEL", 86 "MAXDOP", 87 "MAXRECURSION", 88 "MAX_GRANT_PERCENT", 89 "MIN_GRANT_PERCENT", 90 "NO_PERFORMANCE_SPOOL", 91 "QUERYTRACEON", 92 "RECOMPILE", 93 ), 94 tuple(), 95 ), 96 "CONCAT": ("UNION",), 97 "DISABLE": ("EXTERNALPUSHDOWN", "SCALEOUTEXECUTION"), 98 "EXPAND": ("VIEWS",), 99 "FORCE": ("EXTERNALPUSHDOWN", "ORDER", "SCALEOUTEXECUTION"), 100 "HASH": ("GROUP", "JOIN", "UNION"), 101 "KEEP": ("PLAN",), 102 "KEEPFIXED": ("PLAN",), 103 "LOOP": ("JOIN",), 104 "MERGE": ("JOIN", "UNION"), 105 "OPTIMIZE": (("FOR", "UNKNOWN"),), 106 "ORDER": ("GROUP",), 107 "PARAMETERIZATION": ("FORCED", "SIMPLE"), 108 "ROBUST": ("PLAN",), 109 "USE": ("PLAN",), 110} 111 112 113XML_OPTIONS: parser.OPTIONS_TYPE = { 114 **dict.fromkeys( 115 ( 116 "AUTO", 117 "EXPLICIT", 118 "TYPE", 119 ), 120 tuple(), 121 ), 122 "ELEMENTS": ( 123 "XSINIL", 124 "ABSENT", 125 ), 126 "BINARY": ("BASE64",), 127} 128 129 130OPTIONS_THAT_REQUIRE_EQUAL = ("MAX_GRANT_PERCENT", "MIN_GRANT_PERCENT", "LABEL") 131 132 133def _build_formatted_time( 134 exp_class: t.Type[E], full_format_mapping: t.Optional[bool] = None 135) -> t.Callable[[t.List], E]: 136 def _builder(args: t.List) -> E: 137 fmt = seq_get(args, 0) 138 if isinstance(fmt, exp.Expression): 139 fmt = exp.Literal.string( 140 format_time( 141 fmt.name.lower(), 142 ( 143 {**TSQL.TIME_MAPPING, **FULL_FORMAT_TIME_MAPPING} 144 if full_format_mapping 145 else TSQL.TIME_MAPPING 146 ), 147 ) 148 ) 149 150 this = seq_get(args, 1) 151 if isinstance(this, exp.Expression): 152 this = exp.cast(this, exp.DataType.Type.DATETIME2) 153 154 return exp_class(this=this, format=fmt) 155 156 return _builder 157 158 159def _build_format(args: t.List) -> exp.NumberToStr | exp.TimeToStr: 160 this = seq_get(args, 0) 161 fmt = seq_get(args, 1) 162 culture = seq_get(args, 2) 163 164 number_fmt = fmt and (fmt.name in TRANSPILE_SAFE_NUMBER_FMT or not DATE_FMT_RE.search(fmt.name)) 165 166 if number_fmt: 167 return exp.NumberToStr(this=this, format=fmt, culture=culture) 168 169 if fmt: 170 fmt = exp.Literal.string( 171 format_time(fmt.name, TSQL.FORMAT_TIME_MAPPING) 172 if len(fmt.name) == 1 173 else format_time(fmt.name, TSQL.TIME_MAPPING) 174 ) 175 176 return exp.TimeToStr(this=this, format=fmt, culture=culture) 177 178 179def _build_eomonth(args: t.List) -> exp.LastDay: 180 date = exp.TsOrDsToDate(this=seq_get(args, 0)) 181 month_lag = seq_get(args, 1) 182 183 if month_lag is None: 184 this: exp.Expression = date 185 else: 186 unit = DATE_DELTA_INTERVAL.get("month") 187 this = exp.DateAdd(this=date, expression=month_lag, unit=unit and exp.var(unit)) 188 189 return exp.LastDay(this=this) 190 191 192def _build_hashbytes(args: t.List) -> exp.Expression: 193 kind, data = args 194 kind = kind.name.upper() if kind.is_string else "" 195 196 if kind == "MD5": 197 args.pop(0) 198 return exp.MD5(this=data) 199 if kind in ("SHA", "SHA1"): 200 args.pop(0) 201 return exp.SHA(this=data) 202 if kind == "SHA2_256": 203 return exp.SHA2(this=data, length=exp.Literal.number(256)) 204 if kind == "SHA2_512": 205 return exp.SHA2(this=data, length=exp.Literal.number(512)) 206 207 return exp.func("HASHBYTES", *args) 208 209 210def _format_sql(self: TSQL.Generator, expression: exp.NumberToStr | exp.TimeToStr) -> str: 211 fmt = expression.args["format"] 212 213 if not isinstance(expression, exp.NumberToStr): 214 if fmt.is_string: 215 mapped_fmt = format_time(fmt.name, TSQL.INVERSE_TIME_MAPPING) 216 fmt_sql = self.sql(exp.Literal.string(mapped_fmt)) 217 else: 218 fmt_sql = self.format_time(expression) or self.sql(fmt) 219 else: 220 fmt_sql = self.sql(fmt) 221 222 return self.func("FORMAT", expression.this, fmt_sql, expression.args.get("culture")) 223 224 225def _string_agg_sql(self: TSQL.Generator, expression: exp.GroupConcat) -> str: 226 this = expression.this 227 distinct = expression.find(exp.Distinct) 228 if distinct: 229 # exp.Distinct can appear below an exp.Order or an exp.GroupConcat expression 230 self.unsupported("T-SQL STRING_AGG doesn't support DISTINCT.") 231 this = distinct.pop().expressions[0] 232 233 order = "" 234 if isinstance(expression.this, exp.Order): 235 if expression.this.this: 236 this = expression.this.this.pop() 237 # Order has a leading space 238 order = f" WITHIN GROUP ({self.sql(expression.this)[1:]})" 239 240 separator = expression.args.get("separator") or exp.Literal.string(",") 241 return f"STRING_AGG({self.format_args(this, separator)}){order}" 242 243 244def _build_date_delta( 245 exp_class: t.Type[E], unit_mapping: t.Optional[t.Dict[str, str]] = None, big_int: bool = False 246) -> t.Callable[[t.List], E]: 247 def _builder(args: t.List) -> E: 248 unit = seq_get(args, 0) 249 if unit and unit_mapping: 250 unit = exp.var(unit_mapping.get(unit.name.lower(), unit.name)) 251 252 start_date = seq_get(args, 1) 253 if start_date and start_date.is_number: 254 # Numeric types are valid DATETIME values 255 if start_date.is_int: 256 adds = DEFAULT_START_DATE + datetime.timedelta(days=start_date.to_py()) 257 start_date = exp.Literal.string(adds.strftime("%F")) 258 else: 259 # We currently don't handle float values, i.e. they're not converted to equivalent DATETIMEs. 260 # This is not a problem when generating T-SQL code, it is when transpiling to other dialects. 261 return exp_class( 262 this=seq_get(args, 2), expression=start_date, unit=unit, big_int=big_int 263 ) 264 265 return exp_class( 266 this=exp.TimeStrToTime(this=seq_get(args, 2)), 267 expression=exp.TimeStrToTime(this=start_date), 268 unit=unit, 269 big_int=big_int, 270 ) 271 272 return _builder 273 274 275def qualify_derived_table_outputs(expression: exp.Expression) -> exp.Expression: 276 """Ensures all (unnamed) output columns are aliased for CTEs and Subqueries.""" 277 alias = expression.args.get("alias") 278 279 if ( 280 isinstance(expression, (exp.CTE, exp.Subquery)) 281 and isinstance(alias, exp.TableAlias) 282 and not alias.columns 283 ): 284 from sqlglot.optimizer.qualify_columns import qualify_outputs 285 286 # We keep track of the unaliased column projection indexes instead of the expressions 287 # themselves, because the latter are going to be replaced by new nodes when the aliases 288 # are added and hence we won't be able to reach these newly added Alias parents 289 query = expression.this 290 unaliased_column_indexes = ( 291 i for i, c in enumerate(query.selects) if isinstance(c, exp.Column) and not c.alias 292 ) 293 294 qualify_outputs(query) 295 296 # Preserve the quoting information of columns for newly added Alias nodes 297 query_selects = query.selects 298 for select_index in unaliased_column_indexes: 299 alias = query_selects[select_index] 300 column = alias.this 301 if isinstance(column.this, exp.Identifier): 302 alias.args["alias"].set("quoted", column.this.quoted) 303 304 return expression 305 306 307# https://learn.microsoft.com/en-us/sql/t-sql/functions/datetimefromparts-transact-sql?view=sql-server-ver16#syntax 308def _build_datetimefromparts(args: t.List) -> exp.TimestampFromParts: 309 return exp.TimestampFromParts( 310 year=seq_get(args, 0), 311 month=seq_get(args, 1), 312 day=seq_get(args, 2), 313 hour=seq_get(args, 3), 314 min=seq_get(args, 4), 315 sec=seq_get(args, 5), 316 milli=seq_get(args, 6), 317 ) 318 319 320# https://learn.microsoft.com/en-us/sql/t-sql/functions/timefromparts-transact-sql?view=sql-server-ver16#syntax 321def _build_timefromparts(args: t.List) -> exp.TimeFromParts: 322 return exp.TimeFromParts( 323 hour=seq_get(args, 0), 324 min=seq_get(args, 1), 325 sec=seq_get(args, 2), 326 fractions=seq_get(args, 3), 327 precision=seq_get(args, 4), 328 ) 329 330 331def _build_with_arg_as_text( 332 klass: t.Type[exp.Expression], 333) -> t.Callable[[t.List[exp.Expression]], exp.Expression]: 334 def _parse(args: t.List[exp.Expression]) -> exp.Expression: 335 this = seq_get(args, 0) 336 337 if this and not this.is_string: 338 this = exp.cast(this, exp.DataType.Type.TEXT) 339 340 expression = seq_get(args, 1) 341 kwargs = {"this": this} 342 343 if expression: 344 kwargs["expression"] = expression 345 346 return klass(**kwargs) 347 348 return _parse 349 350 351# https://learn.microsoft.com/en-us/sql/t-sql/functions/parsename-transact-sql?view=sql-server-ver16 352def _build_parsename(args: t.List) -> exp.SplitPart | exp.Anonymous: 353 # PARSENAME(...) will be stored into exp.SplitPart if: 354 # - All args are literals 355 # - The part index (2nd arg) is <= 4 (max valid value, otherwise TSQL returns NULL) 356 if len(args) == 2 and all(isinstance(arg, exp.Literal) for arg in args): 357 this = args[0] 358 part_index = args[1] 359 split_count = len(this.name.split(".")) 360 if split_count <= 4: 361 return exp.SplitPart( 362 this=this, 363 delimiter=exp.Literal.string("."), 364 part_index=exp.Literal.number(split_count + 1 - part_index.to_py()), 365 ) 366 367 return exp.Anonymous(this="PARSENAME", expressions=args) 368 369 370def _build_json_query(args: t.List, dialect: Dialect) -> exp.JSONExtract: 371 if len(args) == 1: 372 # The default value for path is '$'. As a result, if you don't provide a 373 # value for path, JSON_QUERY returns the input expression. 374 args.append(exp.Literal.string("$")) 375 376 return parser.build_extract_json_with_path(exp.JSONExtract)(args, dialect) 377 378 379def _json_extract_sql( 380 self: TSQL.Generator, expression: exp.JSONExtract | exp.JSONExtractScalar 381) -> str: 382 json_query = self.func("JSON_QUERY", expression.this, expression.expression) 383 json_value = self.func("JSON_VALUE", expression.this, expression.expression) 384 return self.func("ISNULL", json_query, json_value) 385 386 387def _timestrtotime_sql(self: TSQL.Generator, expression: exp.TimeStrToTime): 388 sql = timestrtotime_sql(self, expression) 389 if expression.args.get("zone"): 390 # If there is a timezone, produce an expression like: 391 # CAST('2020-01-01 12:13:14-08:00' AS DATETIMEOFFSET) AT TIME ZONE 'UTC' 392 # If you dont have AT TIME ZONE 'UTC', wrapping that expression in another cast back to DATETIME2 just drops the timezone information 393 return self.sql(exp.AtTimeZone(this=sql, zone=exp.Literal.string("UTC"))) 394 return sql 395 396 397def _build_datetrunc(args: t.List) -> exp.TimestampTrunc: 398 unit = seq_get(args, 0) 399 this = seq_get(args, 1) 400 401 if this and this.is_string: 402 this = exp.cast(this, exp.DataType.Type.DATETIME2) 403 404 return exp.TimestampTrunc(this=this, unit=unit) 405 406 407class TSQL(Dialect): 408 SUPPORTS_SEMI_ANTI_JOIN = False 409 LOG_BASE_FIRST = False 410 TYPED_DIVISION = True 411 CONCAT_COALESCE = True 412 NORMALIZATION_STRATEGY = NormalizationStrategy.CASE_INSENSITIVE 413 ALTER_TABLE_ADD_REQUIRED_FOR_EACH_COLUMN = False 414 415 TIME_FORMAT = "'yyyy-mm-dd hh:mm:ss'" 416 417 EXPRESSION_METADATA = EXPRESSION_METADATA.copy() 418 419 DATE_PART_MAPPING = { 420 **Dialect.DATE_PART_MAPPING, 421 "QQ": "QUARTER", 422 "M": "MONTH", 423 "Y": "DAYOFYEAR", 424 "WW": "WEEK", 425 "N": "MINUTE", 426 "SS": "SECOND", 427 "MCS": "MICROSECOND", 428 "TZOFFSET": "TIMEZONE_MINUTE", 429 "TZ": "TIMEZONE_MINUTE", 430 "ISO_WEEK": "WEEKISO", 431 "ISOWK": "WEEKISO", 432 "ISOWW": "WEEKISO", 433 } 434 435 TIME_MAPPING = { 436 "year": "%Y", 437 "dayofyear": "%j", 438 "day": "%d", 439 "dy": "%d", 440 "y": "%Y", 441 "week": "%W", 442 "ww": "%W", 443 "wk": "%W", 444 "isowk": "%V", 445 "isoww": "%V", 446 "iso_week": "%V", 447 "hour": "%h", 448 "hh": "%I", 449 "minute": "%M", 450 "mi": "%M", 451 "n": "%M", 452 "second": "%S", 453 "ss": "%S", 454 "s": "%-S", 455 "millisecond": "%f", 456 "ms": "%f", 457 "weekday": "%w", 458 "dw": "%w", 459 "month": "%m", 460 "mm": "%M", 461 "m": "%-M", 462 "Y": "%Y", 463 "YYYY": "%Y", 464 "YY": "%y", 465 "MMMM": "%B", 466 "MMM": "%b", 467 "MM": "%m", 468 "M": "%-m", 469 "dddd": "%A", 470 "dd": "%d", 471 "d": "%-d", 472 "HH": "%H", 473 "H": "%-H", 474 "h": "%-I", 475 "ffffff": "%f", 476 "yyyy": "%Y", 477 "yy": "%y", 478 } 479 480 CONVERT_FORMAT_MAPPING = { 481 "0": "%b %d %Y %-I:%M%p", 482 "1": "%m/%d/%y", 483 "2": "%y.%m.%d", 484 "3": "%d/%m/%y", 485 "4": "%d.%m.%y", 486 "5": "%d-%m-%y", 487 "6": "%d %b %y", 488 "7": "%b %d, %y", 489 "8": "%H:%M:%S", 490 "9": "%b %d %Y %-I:%M:%S:%f%p", 491 "10": "mm-dd-yy", 492 "11": "yy/mm/dd", 493 "12": "yymmdd", 494 "13": "%d %b %Y %H:%M:ss:%f", 495 "14": "%H:%M:%S:%f", 496 "20": "%Y-%m-%d %H:%M:%S", 497 "21": "%Y-%m-%d %H:%M:%S.%f", 498 "22": "%m/%d/%y %-I:%M:%S %p", 499 "23": "%Y-%m-%d", 500 "24": "%H:%M:%S", 501 "25": "%Y-%m-%d %H:%M:%S.%f", 502 "100": "%b %d %Y %-I:%M%p", 503 "101": "%m/%d/%Y", 504 "102": "%Y.%m.%d", 505 "103": "%d/%m/%Y", 506 "104": "%d.%m.%Y", 507 "105": "%d-%m-%Y", 508 "106": "%d %b %Y", 509 "107": "%b %d, %Y", 510 "108": "%H:%M:%S", 511 "109": "%b %d %Y %-I:%M:%S:%f%p", 512 "110": "%m-%d-%Y", 513 "111": "%Y/%m/%d", 514 "112": "%Y%m%d", 515 "113": "%d %b %Y %H:%M:%S:%f", 516 "114": "%H:%M:%S:%f", 517 "120": "%Y-%m-%d %H:%M:%S", 518 "121": "%Y-%m-%d %H:%M:%S.%f", 519 "126": "%Y-%m-%dT%H:%M:%S.%f", 520 } 521 522 FORMAT_TIME_MAPPING = { 523 "y": "%B %Y", 524 "d": "%m/%d/%Y", 525 "H": "%-H", 526 "h": "%-I", 527 "s": "%Y-%m-%d %H:%M:%S", 528 "D": "%A,%B,%Y", 529 "f": "%A,%B,%Y %-I:%M %p", 530 "F": "%A,%B,%Y %-I:%M:%S %p", 531 "g": "%m/%d/%Y %-I:%M %p", 532 "G": "%m/%d/%Y %-I:%M:%S %p", 533 "M": "%B %-d", 534 "m": "%B %-d", 535 "O": "%Y-%m-%dT%H:%M:%S", 536 "u": "%Y-%M-%D %H:%M:%S%z", 537 "U": "%A, %B %D, %Y %H:%M:%S%z", 538 "T": "%-I:%M:%S %p", 539 "t": "%-I:%M", 540 "Y": "%a %Y", 541 } 542 543 class Tokenizer(tokens.Tokenizer): 544 IDENTIFIERS = [("[", "]"), '"'] 545 QUOTES = ["'", '"'] 546 HEX_STRINGS = [("0x", ""), ("0X", "")] 547 VAR_SINGLE_TOKENS = {"@", "$", "#"} 548 549 KEYWORDS = { 550 **tokens.Tokenizer.KEYWORDS, 551 "CLUSTERED INDEX": TokenType.INDEX, 552 "DATETIME2": TokenType.DATETIME2, 553 "DATETIMEOFFSET": TokenType.TIMESTAMPTZ, 554 "DECLARE": TokenType.DECLARE, 555 "EXEC": TokenType.COMMAND, 556 "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT, 557 "GO": TokenType.COMMAND, 558 "IMAGE": TokenType.IMAGE, 559 "MONEY": TokenType.MONEY, 560 "NONCLUSTERED INDEX": TokenType.INDEX, 561 "NTEXT": TokenType.TEXT, 562 "OPTION": TokenType.OPTION, 563 "OUTPUT": TokenType.RETURNING, 564 "PRINT": TokenType.COMMAND, 565 "PROC": TokenType.PROCEDURE, 566 "REAL": TokenType.FLOAT, 567 "ROWVERSION": TokenType.ROWVERSION, 568 "SMALLDATETIME": TokenType.SMALLDATETIME, 569 "SMALLMONEY": TokenType.SMALLMONEY, 570 "SQL_VARIANT": TokenType.VARIANT, 571 "SYSTEM_USER": TokenType.CURRENT_USER, 572 "TOP": TokenType.TOP, 573 "TIMESTAMP": TokenType.ROWVERSION, 574 "TINYINT": TokenType.UTINYINT, 575 "UNIQUEIDENTIFIER": TokenType.UUID, 576 "UPDATE STATISTICS": TokenType.COMMAND, 577 "XML": TokenType.XML, 578 } 579 KEYWORDS.pop("/*+") 580 581 COMMANDS = {*tokens.Tokenizer.COMMANDS, TokenType.END} 582 583 class Parser(parser.Parser): 584 SET_REQUIRES_ASSIGNMENT_DELIMITER = False 585 LOG_DEFAULTS_TO_LN = True 586 STRING_ALIASES = True 587 NO_PAREN_IF_COMMANDS = False 588 589 QUERY_MODIFIER_PARSERS = { 590 **parser.Parser.QUERY_MODIFIER_PARSERS, 591 TokenType.OPTION: lambda self: ("options", self._parse_options()), 592 TokenType.FOR: lambda self: ("for_", self._parse_for()), 593 } 594 595 # T-SQL does not allow BEGIN to be used as an identifier 596 ID_VAR_TOKENS = parser.Parser.ID_VAR_TOKENS - {TokenType.BEGIN} 597 ALIAS_TOKENS = parser.Parser.ALIAS_TOKENS - {TokenType.BEGIN} 598 TABLE_ALIAS_TOKENS = parser.Parser.TABLE_ALIAS_TOKENS - {TokenType.BEGIN} 599 COMMENT_TABLE_ALIAS_TOKENS = parser.Parser.COMMENT_TABLE_ALIAS_TOKENS - {TokenType.BEGIN} 600 UPDATE_ALIAS_TOKENS = parser.Parser.UPDATE_ALIAS_TOKENS - {TokenType.BEGIN} 601 602 FUNCTIONS = { 603 **parser.Parser.FUNCTIONS, 604 "CHARINDEX": lambda args: exp.StrPosition( 605 this=seq_get(args, 1), 606 substr=seq_get(args, 0), 607 position=seq_get(args, 2), 608 ), 609 "COUNT": lambda args: exp.Count( 610 this=seq_get(args, 0), expressions=args[1:], big_int=False 611 ), 612 "COUNT_BIG": lambda args: exp.Count( 613 this=seq_get(args, 0), expressions=args[1:], big_int=True 614 ), 615 "DATEADD": build_date_delta(exp.DateAdd, unit_mapping=DATE_DELTA_INTERVAL), 616 "DATEDIFF": _build_date_delta(exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL), 617 "DATEDIFF_BIG": _build_date_delta( 618 exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL, big_int=True 619 ), 620 "DATENAME": _build_formatted_time(exp.TimeToStr, full_format_mapping=True), 621 "DATETIMEFROMPARTS": _build_datetimefromparts, 622 "EOMONTH": _build_eomonth, 623 "FORMAT": _build_format, 624 "GETDATE": exp.CurrentTimestamp.from_arg_list, 625 "HASHBYTES": _build_hashbytes, 626 "ISNULL": lambda args: build_coalesce(args=args, is_null=True), 627 "JSON_QUERY": _build_json_query, 628 "JSON_VALUE": parser.build_extract_json_with_path(exp.JSONExtractScalar), 629 "LEN": _build_with_arg_as_text(exp.Length), 630 "LEFT": _build_with_arg_as_text(exp.Left), 631 "NEWID": exp.Uuid.from_arg_list, 632 "RIGHT": _build_with_arg_as_text(exp.Right), 633 "PARSENAME": _build_parsename, 634 "REPLICATE": exp.Repeat.from_arg_list, 635 "SCHEMA_NAME": exp.CurrentSchema.from_arg_list, 636 "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)), 637 "SYSDATETIME": exp.CurrentTimestamp.from_arg_list, 638 "SUSER_NAME": exp.CurrentUser.from_arg_list, 639 "SUSER_SNAME": exp.CurrentUser.from_arg_list, 640 "SYSDATETIMEOFFSET": exp.CurrentTimestampLTZ.from_arg_list, 641 "SYSTEM_USER": exp.CurrentUser.from_arg_list, 642 "TIMEFROMPARTS": _build_timefromparts, 643 "DATETRUNC": _build_datetrunc, 644 } 645 646 JOIN_HINTS = {"LOOP", "HASH", "MERGE", "REMOTE"} 647 648 PROCEDURE_OPTIONS = dict.fromkeys( 649 ("ENCRYPTION", "RECOMPILE", "SCHEMABINDING", "NATIVE_COMPILATION", "EXECUTE"), tuple() 650 ) 651 652 COLUMN_DEFINITION_MODES = {"OUT", "OUTPUT", "READONLY"} 653 654 RETURNS_TABLE_TOKENS = parser.Parser.ID_VAR_TOKENS - { 655 TokenType.TABLE, 656 *parser.Parser.TYPE_TOKENS, 657 } 658 659 STATEMENT_PARSERS = { 660 **parser.Parser.STATEMENT_PARSERS, 661 TokenType.DECLARE: lambda self: self._parse_declare(), 662 } 663 664 RANGE_PARSERS = { 665 **parser.Parser.RANGE_PARSERS, 666 TokenType.DCOLON: lambda self, this: self.expression( 667 exp.ScopeResolution, 668 this=this, 669 expression=self._parse_function() or self._parse_var(any_token=True), 670 ), 671 } 672 673 NO_PAREN_FUNCTION_PARSERS = { 674 **parser.Parser.NO_PAREN_FUNCTION_PARSERS, 675 "NEXT": lambda self: self._parse_next_value_for(), 676 } 677 678 FUNCTION_PARSERS: t.Dict[str, t.Callable] = { 679 **parser.Parser.FUNCTION_PARSERS, 680 "JSON_ARRAYAGG": lambda self: self.expression( 681 exp.JSONArrayAgg, 682 this=self._parse_bitwise(), 683 order=self._parse_order(), 684 null_handling=self._parse_on_handling("NULL", "NULL", "ABSENT"), 685 ), 686 "DATEPART": lambda self: self._parse_datepart(), 687 } 688 689 # The DCOLON (::) operator serves as a scope resolution (exp.ScopeResolution) operator in T-SQL 690 COLUMN_OPERATORS = { 691 **parser.Parser.COLUMN_OPERATORS, 692 TokenType.DCOLON: lambda self, this, to: self.expression(exp.Cast, this=this, to=to) 693 if isinstance(to, exp.DataType) and to.this != exp.DataType.Type.USERDEFINED 694 else self.expression(exp.ScopeResolution, this=this, expression=to), 695 } 696 697 SET_OP_MODIFIERS = {"offset"} 698 699 ODBC_DATETIME_LITERALS = { 700 "d": exp.Date, 701 "t": exp.Time, 702 "ts": exp.Timestamp, 703 } 704 705 def _parse_datepart(self) -> exp.Extract: 706 this = self._parse_var() 707 expression = self._match(TokenType.COMMA) and self._parse_bitwise() 708 name = map_date_part(this, self.dialect) 709 710 return self.expression(exp.Extract, this=name, expression=expression) 711 712 def _parse_alter_table_set(self) -> exp.AlterSet: 713 return self._parse_wrapped(super()._parse_alter_table_set) 714 715 def _parse_wrapped_select(self, table: bool = False) -> t.Optional[exp.Expression]: 716 if self._match(TokenType.MERGE): 717 comments = self._prev_comments 718 merge = self._parse_merge() 719 merge.add_comments(comments, prepend=True) 720 return merge 721 722 return super()._parse_wrapped_select(table=table) 723 724 def _parse_dcolon(self) -> t.Optional[exp.Expression]: 725 # We want to use _parse_types() if the first token after :: is a known type, 726 # otherwise we could parse something like x::varchar(max) into a function 727 if self._match_set(self.TYPE_TOKENS, advance=False): 728 return self._parse_types() 729 730 return self._parse_function() or self._parse_types() 731 732 def _parse_options(self) -> t.Optional[t.List[exp.Expression]]: 733 if not self._match(TokenType.OPTION): 734 return None 735 736 def _parse_option() -> t.Optional[exp.Expression]: 737 option = self._parse_var_from_options(OPTIONS) 738 if not option: 739 return None 740 741 self._match(TokenType.EQ) 742 return self.expression( 743 exp.QueryOption, this=option, expression=self._parse_primary_or_var() 744 ) 745 746 return self._parse_wrapped_csv(_parse_option) 747 748 def _parse_xml_key_value_option(self) -> exp.XMLKeyValueOption: 749 this = self._parse_primary_or_var() 750 if self._match(TokenType.L_PAREN, advance=False): 751 expression = self._parse_wrapped(self._parse_string) 752 else: 753 expression = None 754 755 return exp.XMLKeyValueOption(this=this, expression=expression) 756 757 def _parse_for(self) -> t.Optional[t.List[exp.Expression]]: 758 if not self._match_pair(TokenType.FOR, TokenType.XML): 759 return None 760 761 def _parse_for_xml() -> t.Optional[exp.Expression]: 762 return self.expression( 763 exp.QueryOption, 764 this=self._parse_var_from_options(XML_OPTIONS, raise_unmatched=False) 765 or self._parse_xml_key_value_option(), 766 ) 767 768 return self._parse_csv(_parse_for_xml) 769 770 def _parse_projections(self) -> t.List[exp.Expression]: 771 """ 772 T-SQL supports the syntax alias = expression in the SELECT's projection list, 773 so we transform all parsed Selects to convert their EQ projections into Aliases. 774 775 See: https://learn.microsoft.com/en-us/sql/t-sql/queries/select-clause-transact-sql?view=sql-server-ver16#syntax 776 """ 777 return [ 778 ( 779 exp.alias_(projection.expression, projection.this.this, copy=False) 780 if isinstance(projection, exp.EQ) and isinstance(projection.this, exp.Column) 781 else projection 782 ) 783 for projection in super()._parse_projections() 784 ] 785 786 def _parse_commit_or_rollback(self) -> exp.Commit | exp.Rollback: 787 """Applies to SQL Server and Azure SQL Database 788 COMMIT [ { TRAN | TRANSACTION } 789 [ transaction_name | @tran_name_variable ] ] 790 [ WITH ( DELAYED_DURABILITY = { OFF | ON } ) ] 791 792 ROLLBACK { TRAN | TRANSACTION } 793 [ transaction_name | @tran_name_variable 794 | savepoint_name | @savepoint_variable ] 795 """ 796 rollback = self._prev.token_type == TokenType.ROLLBACK 797 798 self._match_texts(("TRAN", "TRANSACTION")) 799 this = self._parse_id_var() 800 801 if rollback: 802 return self.expression(exp.Rollback, this=this) 803 804 durability = None 805 if self._match_pair(TokenType.WITH, TokenType.L_PAREN): 806 self._match_text_seq("DELAYED_DURABILITY") 807 self._match(TokenType.EQ) 808 809 if self._match_text_seq("OFF"): 810 durability = False 811 else: 812 self._match(TokenType.ON) 813 durability = True 814 815 self._match_r_paren() 816 817 return self.expression(exp.Commit, this=this, durability=durability) 818 819 def _parse_transaction(self) -> exp.Transaction | exp.Command: 820 """Applies to SQL Server and Azure SQL Database 821 BEGIN { TRAN | TRANSACTION } 822 [ { transaction_name | @tran_name_variable } 823 [ WITH MARK [ 'description' ] ] 824 ] 825 """ 826 if self._match_texts(("TRAN", "TRANSACTION")): 827 transaction = self.expression(exp.Transaction, this=self._parse_id_var()) 828 if self._match_text_seq("WITH", "MARK"): 829 transaction.set("mark", self._parse_string()) 830 831 return transaction 832 833 return self._parse_as_command(self._prev) 834 835 def _parse_returns(self) -> exp.ReturnsProperty: 836 table = self._parse_id_var(any_token=False, tokens=self.RETURNS_TABLE_TOKENS) 837 returns = super()._parse_returns() 838 returns.set("table", table) 839 return returns 840 841 def _parse_convert( 842 self, strict: bool, safe: t.Optional[bool] = None 843 ) -> t.Optional[exp.Expression]: 844 this = self._parse_types() 845 self._match(TokenType.COMMA) 846 args = [this, *self._parse_csv(self._parse_assignment)] 847 convert = exp.Convert.from_arg_list(args) 848 convert.set("safe", safe) 849 return convert 850 851 def _parse_column_def( 852 self, this: t.Optional[exp.Expression], computed_column: bool = True 853 ) -> t.Optional[exp.Expression]: 854 this = super()._parse_column_def(this=this, computed_column=computed_column) 855 if not this: 856 return None 857 if self._match(TokenType.EQ): 858 this.set("default", self._parse_disjunction()) 859 if self._match_texts(self.COLUMN_DEFINITION_MODES): 860 this.set("output", self._prev.text) 861 return this 862 863 def _parse_user_defined_function( 864 self, kind: t.Optional[TokenType] = None 865 ) -> t.Optional[exp.Expression]: 866 this = super()._parse_user_defined_function(kind=kind) 867 868 if ( 869 kind == TokenType.FUNCTION 870 or isinstance(this, exp.UserDefinedFunction) 871 or self._match(TokenType.ALIAS, advance=False) 872 ): 873 return this 874 875 if not self._match(TokenType.WITH, advance=False): 876 expressions = self._parse_csv(self._parse_function_parameter) 877 else: 878 expressions = None 879 880 return self.expression(exp.UserDefinedFunction, this=this, expressions=expressions) 881 882 def _parse_into(self) -> t.Optional[exp.Into]: 883 into = super()._parse_into() 884 885 table = isinstance(into, exp.Into) and into.find(exp.Table) 886 if isinstance(table, exp.Table): 887 table_identifier = table.this 888 if table_identifier.args.get("temporary"): 889 # Promote the temporary property from the Identifier to the Into expression 890 t.cast(exp.Into, into).set("temporary", True) 891 892 return into 893 894 def _parse_id_var( 895 self, 896 any_token: bool = True, 897 tokens: t.Optional[t.Collection[TokenType]] = None, 898 ) -> t.Optional[exp.Expression]: 899 is_temporary = self._match(TokenType.HASH) 900 is_global = is_temporary and self._match(TokenType.HASH) 901 902 this = super()._parse_id_var(any_token=any_token, tokens=tokens) 903 if this: 904 if is_global: 905 this.set("global_", True) 906 elif is_temporary: 907 this.set("temporary", True) 908 909 return this 910 911 def _parse_create(self) -> exp.Create | exp.Command: 912 create = super()._parse_create() 913 914 if isinstance(create, exp.Create): 915 table = create.this.this if isinstance(create.this, exp.Schema) else create.this 916 if isinstance(table, exp.Table) and table.this and table.this.args.get("temporary"): 917 if not create.args.get("properties"): 918 create.set("properties", exp.Properties(expressions=[])) 919 920 create.args["properties"].append("expressions", exp.TemporaryProperty()) 921 922 return create 923 924 def _parse_if(self) -> t.Optional[exp.Expression]: 925 index = self._index 926 927 if self._match_text_seq("OBJECT_ID"): 928 self._parse_wrapped_csv(self._parse_string) 929 if self._match_text_seq("IS", "NOT", "NULL") and self._match(TokenType.DROP): 930 return self._parse_drop(exists=True) 931 self._retreat(index) 932 933 return super()._parse_if() 934 935 def _parse_unique(self) -> exp.UniqueColumnConstraint: 936 if self._match_texts(("CLUSTERED", "NONCLUSTERED")): 937 this = self.CONSTRAINT_PARSERS[self._prev.text.upper()](self) 938 else: 939 this = self._parse_schema(self._parse_id_var(any_token=False)) 940 941 return self.expression(exp.UniqueColumnConstraint, this=this) 942 943 def _parse_update(self) -> exp.Update: 944 expression = super()._parse_update() 945 expression.set("options", self._parse_options()) 946 return expression 947 948 def _parse_partition(self) -> t.Optional[exp.Partition]: 949 if not self._match_text_seq("WITH", "(", "PARTITIONS"): 950 return None 951 952 def parse_range(): 953 low = self._parse_bitwise() 954 high = self._parse_bitwise() if self._match_text_seq("TO") else None 955 956 return ( 957 self.expression(exp.PartitionRange, this=low, expression=high) if high else low 958 ) 959 960 partition = self.expression( 961 exp.Partition, expressions=self._parse_wrapped_csv(parse_range) 962 ) 963 964 self._match_r_paren() 965 966 return partition 967 968 def _parse_declareitem(self) -> t.Optional[exp.DeclareItem]: 969 var = self._parse_id_var() 970 if not var: 971 return None 972 973 self._match(TokenType.ALIAS) 974 return self.expression( 975 exp.DeclareItem, 976 this=var, 977 kind=self._parse_schema() if self._match(TokenType.TABLE) else self._parse_types(), 978 default=self._match(TokenType.EQ) and self._parse_bitwise(), 979 ) 980 981 def _parse_alter_table_alter(self) -> t.Optional[exp.Expression]: 982 expression = super()._parse_alter_table_alter() 983 984 if expression is not None: 985 collation = expression.args.get("collate") 986 if isinstance(collation, exp.Column) and isinstance(collation.this, exp.Identifier): 987 identifier = collation.this 988 collation.set("this", exp.Var(this=identifier.name)) 989 990 return expression 991 992 def _parse_primary_key_part(self) -> t.Optional[exp.Expression]: 993 return self._parse_ordered() 994 995 class Generator(generator.Generator): 996 LIMIT_IS_TOP = True 997 QUERY_HINTS = False 998 RETURNING_END = False 999 NVL2_SUPPORTED = False 1000 ALTER_TABLE_INCLUDE_COLUMN_KEYWORD = False 1001 LIMIT_FETCH = "FETCH" 1002 COMPUTED_COLUMN_WITH_TYPE = False 1003 CTE_RECURSIVE_KEYWORD_REQUIRED = False 1004 ENSURE_BOOLS = True 1005 NULL_ORDERING_SUPPORTED = None 1006 SUPPORTS_SINGLE_ARG_CONCAT = False 1007 TABLESAMPLE_SEED_KEYWORD = "REPEATABLE" 1008 SUPPORTS_SELECT_INTO = True 1009 JSON_PATH_BRACKETED_KEY_SUPPORTED = False 1010 SUPPORTS_TO_NUMBER = False 1011 SET_OP_MODIFIERS = False 1012 COPY_PARAMS_EQ_REQUIRED = True 1013 PARSE_JSON_NAME = None 1014 EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE = False 1015 ALTER_SET_WRAPPED = True 1016 ALTER_SET_TYPE = "" 1017 1018 EXPRESSIONS_WITHOUT_NESTED_CTES = { 1019 exp.Create, 1020 exp.Delete, 1021 exp.Insert, 1022 exp.Intersect, 1023 exp.Except, 1024 exp.Merge, 1025 exp.Select, 1026 exp.Subquery, 1027 exp.Union, 1028 exp.Update, 1029 } 1030 1031 SUPPORTED_JSON_PATH_PARTS = { 1032 exp.JSONPathKey, 1033 exp.JSONPathRoot, 1034 exp.JSONPathSubscript, 1035 } 1036 1037 TYPE_MAPPING = { 1038 **generator.Generator.TYPE_MAPPING, 1039 exp.DataType.Type.BOOLEAN: "BIT", 1040 exp.DataType.Type.DATETIME2: "DATETIME2", 1041 exp.DataType.Type.DECIMAL: "NUMERIC", 1042 exp.DataType.Type.DOUBLE: "FLOAT", 1043 exp.DataType.Type.INT: "INTEGER", 1044 exp.DataType.Type.ROWVERSION: "ROWVERSION", 1045 exp.DataType.Type.TEXT: "VARCHAR(MAX)", 1046 exp.DataType.Type.TIMESTAMP: "DATETIME2", 1047 exp.DataType.Type.TIMESTAMPNTZ: "DATETIME2", 1048 exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET", 1049 exp.DataType.Type.SMALLDATETIME: "SMALLDATETIME", 1050 exp.DataType.Type.UTINYINT: "TINYINT", 1051 exp.DataType.Type.VARIANT: "SQL_VARIANT", 1052 exp.DataType.Type.UUID: "UNIQUEIDENTIFIER", 1053 } 1054 1055 TYPE_MAPPING.pop(exp.DataType.Type.NCHAR) 1056 TYPE_MAPPING.pop(exp.DataType.Type.NVARCHAR) 1057 1058 TRANSFORMS = { 1059 **generator.Generator.TRANSFORMS, 1060 exp.AnyValue: any_value_to_max_sql, 1061 exp.ArrayToString: rename_func("STRING_AGG"), 1062 exp.AutoIncrementColumnConstraint: lambda *_: "IDENTITY", 1063 exp.Ceil: rename_func("CEILING"), 1064 exp.Chr: rename_func("CHAR"), 1065 exp.DateAdd: date_delta_sql("DATEADD"), 1066 exp.CTE: transforms.preprocess([qualify_derived_table_outputs]), 1067 exp.CurrentDate: rename_func("GETDATE"), 1068 exp.CurrentTimestamp: rename_func("GETDATE"), 1069 exp.CurrentTimestampLTZ: rename_func("SYSDATETIMEOFFSET"), 1070 exp.DateStrToDate: datestrtodate_sql, 1071 exp.GeneratedAsIdentityColumnConstraint: generatedasidentitycolumnconstraint_sql, 1072 exp.GroupConcat: _string_agg_sql, 1073 exp.If: rename_func("IIF"), 1074 exp.JSONExtract: _json_extract_sql, 1075 exp.JSONExtractScalar: _json_extract_sql, 1076 exp.LastDay: lambda self, e: self.func("EOMONTH", e.this), 1077 exp.Ln: rename_func("LOG"), 1078 exp.Max: max_or_greatest, 1079 exp.MD5: lambda self, e: self.func("HASHBYTES", exp.Literal.string("MD5"), e.this), 1080 exp.Min: min_or_least, 1081 exp.NumberToStr: _format_sql, 1082 exp.Repeat: rename_func("REPLICATE"), 1083 exp.CurrentSchema: rename_func("SCHEMA_NAME"), 1084 exp.Select: transforms.preprocess( 1085 [ 1086 transforms.eliminate_distinct_on, 1087 transforms.eliminate_semi_and_anti_joins, 1088 transforms.eliminate_qualify, 1089 transforms.unnest_generate_date_array_using_recursive_cte, 1090 ] 1091 ), 1092 exp.Stddev: rename_func("STDEV"), 1093 exp.StrPosition: lambda self, e: strposition_sql( 1094 self, e, func_name="CHARINDEX", supports_position=True 1095 ), 1096 exp.Subquery: transforms.preprocess([qualify_derived_table_outputs]), 1097 exp.SHA: lambda self, e: self.func("HASHBYTES", exp.Literal.string("SHA1"), e.this), 1098 exp.SHA1Digest: lambda self, e: self.func( 1099 "HASHBYTES", exp.Literal.string("SHA1"), e.this 1100 ), 1101 exp.SHA2: lambda self, e: self.func( 1102 "HASHBYTES", exp.Literal.string(f"SHA2_{e.args.get('length', 256)}"), e.this 1103 ), 1104 exp.TemporaryProperty: lambda self, e: "", 1105 exp.TimeStrToTime: _timestrtotime_sql, 1106 exp.TimeToStr: _format_sql, 1107 exp.Trim: trim_sql, 1108 exp.TsOrDsAdd: date_delta_sql("DATEADD", cast=True), 1109 exp.TsOrDsDiff: date_delta_sql("DATEDIFF"), 1110 exp.TimestampTrunc: lambda self, e: self.func("DATETRUNC", e.unit, e.this), 1111 exp.Uuid: lambda *_: "NEWID()", 1112 exp.DateFromParts: rename_func("DATEFROMPARTS"), 1113 } 1114 1115 TRANSFORMS.pop(exp.ReturnsProperty) 1116 1117 PROPERTIES_LOCATION = { 1118 **generator.Generator.PROPERTIES_LOCATION, 1119 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 1120 } 1121 1122 def scope_resolution(self, rhs: str, scope_name: str) -> str: 1123 return f"{scope_name}::{rhs}" 1124 1125 def select_sql(self, expression: exp.Select) -> str: 1126 limit = expression.args.get("limit") 1127 offset = expression.args.get("offset") 1128 1129 if isinstance(limit, exp.Fetch) and not offset: 1130 # Dialects like Oracle can FETCH directly from a row set but 1131 # T-SQL requires an ORDER BY + OFFSET clause in order to FETCH 1132 offset = exp.Offset(expression=exp.Literal.number(0)) 1133 expression.set("offset", offset) 1134 1135 if offset: 1136 if not expression.args.get("order"): 1137 # ORDER BY is required in order to use OFFSET in a query, so we use 1138 # a noop order by, since we don't really care about the order. 1139 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 1140 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 1141 1142 if isinstance(limit, exp.Limit): 1143 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 1144 # we replace here because otherwise TOP would be generated in select_sql 1145 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 1146 1147 return super().select_sql(expression) 1148 1149 def convert_sql(self, expression: exp.Convert) -> str: 1150 name = "TRY_CONVERT" if expression.args.get("safe") else "CONVERT" 1151 return self.func( 1152 name, expression.this, expression.expression, expression.args.get("style") 1153 ) 1154 1155 def queryoption_sql(self, expression: exp.QueryOption) -> str: 1156 option = self.sql(expression, "this") 1157 value = self.sql(expression, "expression") 1158 if value: 1159 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 1160 return f"{option} {optional_equal_sign}{value}" 1161 return option 1162 1163 def lateral_op(self, expression: exp.Lateral) -> str: 1164 cross_apply = expression.args.get("cross_apply") 1165 if cross_apply is True: 1166 return "CROSS APPLY" 1167 if cross_apply is False: 1168 return "OUTER APPLY" 1169 1170 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 1171 self.unsupported("LATERAL clause is not supported.") 1172 return "LATERAL" 1173 1174 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 1175 this = expression.this 1176 split_count = len(this.name.split(".")) 1177 delimiter = expression.args.get("delimiter") 1178 part_index = expression.args.get("part_index") 1179 1180 if ( 1181 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 1182 or (delimiter and delimiter.name != ".") 1183 or not part_index 1184 or split_count > 4 1185 ): 1186 self.unsupported( 1187 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 1188 ) 1189 return "" 1190 1191 return self.func( 1192 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 1193 ) 1194 1195 def extract_sql(self, expression: exp.Extract) -> str: 1196 part = expression.this 1197 name = DATE_PART_UNMAPPING.get(part.name.upper()) or part 1198 1199 return self.func("DATEPART", name, expression.expression) 1200 1201 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1202 nano = expression.args.get("nano") 1203 if nano is not None: 1204 nano.pop() 1205 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1206 1207 if expression.args.get("fractions") is None: 1208 expression.set("fractions", exp.Literal.number(0)) 1209 if expression.args.get("precision") is None: 1210 expression.set("precision", exp.Literal.number(0)) 1211 1212 return rename_func("TIMEFROMPARTS")(self, expression) 1213 1214 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1215 zone = expression.args.get("zone") 1216 if zone is not None: 1217 zone.pop() 1218 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1219 1220 nano = expression.args.get("nano") 1221 if nano is not None: 1222 nano.pop() 1223 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1224 1225 if expression.args.get("milli") is None: 1226 expression.set("milli", exp.Literal.number(0)) 1227 1228 return rename_func("DATETIMEFROMPARTS")(self, expression) 1229 1230 def setitem_sql(self, expression: exp.SetItem) -> str: 1231 this = expression.this 1232 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1233 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1234 return f"{self.sql(this.left)} {self.sql(this.right)}" 1235 1236 return super().setitem_sql(expression) 1237 1238 def boolean_sql(self, expression: exp.Boolean) -> str: 1239 if type(expression.parent) in BIT_TYPES or isinstance( 1240 expression.find_ancestor(exp.Values, exp.Select), exp.Values 1241 ): 1242 return "1" if expression.this else "0" 1243 1244 return "(1 = 1)" if expression.this else "(1 = 0)" 1245 1246 def is_sql(self, expression: exp.Is) -> str: 1247 if isinstance(expression.expression, exp.Boolean): 1248 return self.binary(expression, "=") 1249 return self.binary(expression, "IS") 1250 1251 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1252 sql = self.sql(expression, "this") 1253 properties = expression.args.get("properties") 1254 1255 if sql[:1] != "#" and any( 1256 isinstance(prop, exp.TemporaryProperty) 1257 for prop in (properties.expressions if properties else []) 1258 ): 1259 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1260 1261 return sql 1262 1263 def create_sql(self, expression: exp.Create) -> str: 1264 kind = expression.kind 1265 exists = expression.args.get("exists") 1266 expression.set("exists", None) 1267 1268 like_property = expression.find(exp.LikeProperty) 1269 if like_property: 1270 ctas_expression = like_property.this 1271 else: 1272 ctas_expression = expression.expression 1273 1274 if kind == "VIEW": 1275 expression.this.set("catalog", None) 1276 with_ = expression.args.get("with_") 1277 if ctas_expression and with_: 1278 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1279 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1280 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1281 ctas_expression.set("with_", with_.pop()) 1282 1283 table = expression.find(exp.Table) 1284 1285 # Convert CTAS statement to SELECT .. INTO .. 1286 if kind == "TABLE" and ctas_expression: 1287 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1288 ctas_expression = ctas_expression.subquery() 1289 1290 properties = expression.args.get("properties") or exp.Properties() 1291 is_temp = any(isinstance(p, exp.TemporaryProperty) for p in properties.expressions) 1292 1293 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1294 select_into.set("into", exp.Into(this=table, temporary=is_temp)) 1295 1296 if like_property: 1297 select_into.limit(0, copy=False) 1298 1299 sql = self.sql(select_into) 1300 else: 1301 sql = super().create_sql(expression) 1302 1303 if exists: 1304 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1305 sql_with_ctes = self.prepend_ctes(expression, sql) 1306 sql_literal = self.sql(exp.Literal.string(sql_with_ctes)) 1307 if kind == "SCHEMA": 1308 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = {identifier}) EXEC({sql_literal})""" 1309 elif kind == "TABLE": 1310 assert table 1311 where = exp.and_( 1312 exp.column("TABLE_NAME").eq(table.name), 1313 exp.column("TABLE_SCHEMA").eq(table.db) if table.db else None, 1314 exp.column("TABLE_CATALOG").eq(table.catalog) if table.catalog else None, 1315 ) 1316 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE {where}) EXEC({sql_literal})""" 1317 elif kind == "INDEX": 1318 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1319 return f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql_literal})""" 1320 elif expression.args.get("replace"): 1321 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1322 1323 return self.prepend_ctes(expression, sql) 1324 1325 @generator.unsupported_args("unlogged", "expressions") 1326 def into_sql(self, expression: exp.Into) -> str: 1327 if expression.args.get("temporary"): 1328 # If the Into expression has a temporary property, push this down to the Identifier 1329 table = expression.find(exp.Table) 1330 if table and isinstance(table.this, exp.Identifier): 1331 table.this.set("temporary", True) 1332 1333 return f"{self.seg('INTO')} {self.sql(expression, 'this')}" 1334 1335 def count_sql(self, expression: exp.Count) -> str: 1336 func_name = "COUNT_BIG" if expression.args.get("big_int") else "COUNT" 1337 return rename_func(func_name)(self, expression) 1338 1339 def datediff_sql(self, expression: exp.DateDiff) -> str: 1340 func_name = "DATEDIFF_BIG" if expression.args.get("big_int") else "DATEDIFF" 1341 return date_delta_sql(func_name)(self, expression) 1342 1343 def offset_sql(self, expression: exp.Offset) -> str: 1344 return f"{super().offset_sql(expression)} ROWS" 1345 1346 def version_sql(self, expression: exp.Version) -> str: 1347 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1348 this = f"FOR {name}" 1349 expr = expression.expression 1350 kind = expression.text("kind") 1351 if kind in ("FROM", "BETWEEN"): 1352 args = expr.expressions 1353 sep = "TO" if kind == "FROM" else "AND" 1354 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1355 else: 1356 expr_sql = self.sql(expr) 1357 1358 expr_sql = f" {expr_sql}" if expr_sql else "" 1359 return f"{this} {kind}{expr_sql}" 1360 1361 def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str: 1362 table = expression.args.get("table") 1363 table = f"{table} " if table else "" 1364 return f"RETURNS {table}{self.sql(expression, 'this')}" 1365 1366 def returning_sql(self, expression: exp.Returning) -> str: 1367 into = self.sql(expression, "into") 1368 into = self.seg(f"INTO {into}") if into else "" 1369 return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}" 1370 1371 def transaction_sql(self, expression: exp.Transaction) -> str: 1372 this = self.sql(expression, "this") 1373 this = f" {this}" if this else "" 1374 mark = self.sql(expression, "mark") 1375 mark = f" WITH MARK {mark}" if mark else "" 1376 return f"BEGIN TRANSACTION{this}{mark}" 1377 1378 def commit_sql(self, expression: exp.Commit) -> str: 1379 this = self.sql(expression, "this") 1380 this = f" {this}" if this else "" 1381 durability = expression.args.get("durability") 1382 durability = ( 1383 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1384 if durability is not None 1385 else "" 1386 ) 1387 return f"COMMIT TRANSACTION{this}{durability}" 1388 1389 def rollback_sql(self, expression: exp.Rollback) -> str: 1390 this = self.sql(expression, "this") 1391 this = f" {this}" if this else "" 1392 return f"ROLLBACK TRANSACTION{this}" 1393 1394 def identifier_sql(self, expression: exp.Identifier) -> str: 1395 identifier = super().identifier_sql(expression) 1396 1397 if expression.args.get("global_"): 1398 identifier = f"##{identifier}" 1399 elif expression.args.get("temporary"): 1400 identifier = f"#{identifier}" 1401 1402 return identifier 1403 1404 def constraint_sql(self, expression: exp.Constraint) -> str: 1405 this = self.sql(expression, "this") 1406 expressions = self.expressions(expression, flat=True, sep=" ") 1407 return f"CONSTRAINT {this} {expressions}" 1408 1409 def length_sql(self, expression: exp.Length) -> str: 1410 return self._uncast_text(expression, "LEN") 1411 1412 def right_sql(self, expression: exp.Right) -> str: 1413 return self._uncast_text(expression, "RIGHT") 1414 1415 def left_sql(self, expression: exp.Left) -> str: 1416 return self._uncast_text(expression, "LEFT") 1417 1418 def _uncast_text(self, expression: exp.Expression, name: str) -> str: 1419 this = expression.this 1420 if isinstance(this, exp.Cast) and this.is_type(exp.DataType.Type.TEXT): 1421 this_sql = self.sql(this, "this") 1422 else: 1423 this_sql = self.sql(this) 1424 expression_sql = self.sql(expression, "expression") 1425 return self.func(name, this_sql, expression_sql if expression_sql else None) 1426 1427 def partition_sql(self, expression: exp.Partition) -> str: 1428 return f"WITH (PARTITIONS({self.expressions(expression, flat=True)}))" 1429 1430 def alter_sql(self, expression: exp.Alter) -> str: 1431 action = seq_get(expression.args.get("actions") or [], 0) 1432 if isinstance(action, exp.AlterRename): 1433 return f"EXEC sp_rename '{self.sql(expression.this)}', '{action.this.name}'" 1434 return super().alter_sql(expression) 1435 1436 def drop_sql(self, expression: exp.Drop) -> str: 1437 if expression.args["kind"] == "VIEW": 1438 expression.this.set("catalog", None) 1439 return super().drop_sql(expression) 1440 1441 def options_modifier(self, expression: exp.Expression) -> str: 1442 options = self.expressions(expression, key="options") 1443 return f" OPTION{self.wrap(options)}" if options else "" 1444 1445 def dpipe_sql(self, expression: exp.DPipe) -> str: 1446 return self.sql( 1447 reduce(lambda x, y: exp.Add(this=x, expression=y), expression.flatten()) 1448 ) 1449 1450 def isascii_sql(self, expression: exp.IsAscii) -> str: 1451 return f"(PATINDEX(CONVERT(VARCHAR(MAX), 0x255b5e002d7f5d25) COLLATE Latin1_General_BIN, {self.sql(expression.this)}) = 0)" 1452 1453 def columndef_sql(self, expression: exp.ColumnDef, sep: str = " ") -> str: 1454 this = super().columndef_sql(expression, sep) 1455 default = self.sql(expression, "default") 1456 default = f" = {default}" if default else "" 1457 output = self.sql(expression, "output") 1458 output = f" {output}" if output else "" 1459 return f"{this}{default}{output}" 1460 1461 def coalesce_sql(self, expression: exp.Coalesce) -> str: 1462 func_name = "ISNULL" if expression.args.get("is_null") else "COALESCE" 1463 return rename_func(func_name)(self, expression)
276def qualify_derived_table_outputs(expression: exp.Expression) -> exp.Expression: 277 """Ensures all (unnamed) output columns are aliased for CTEs and Subqueries.""" 278 alias = expression.args.get("alias") 279 280 if ( 281 isinstance(expression, (exp.CTE, exp.Subquery)) 282 and isinstance(alias, exp.TableAlias) 283 and not alias.columns 284 ): 285 from sqlglot.optimizer.qualify_columns import qualify_outputs 286 287 # We keep track of the unaliased column projection indexes instead of the expressions 288 # themselves, because the latter are going to be replaced by new nodes when the aliases 289 # are added and hence we won't be able to reach these newly added Alias parents 290 query = expression.this 291 unaliased_column_indexes = ( 292 i for i, c in enumerate(query.selects) if isinstance(c, exp.Column) and not c.alias 293 ) 294 295 qualify_outputs(query) 296 297 # Preserve the quoting information of columns for newly added Alias nodes 298 query_selects = query.selects 299 for select_index in unaliased_column_indexes: 300 alias = query_selects[select_index] 301 column = alias.this 302 if isinstance(column.this, exp.Identifier): 303 alias.args["alias"].set("quoted", column.this.quoted) 304 305 return expression
Ensures all (unnamed) output columns are aliased for CTEs and Subqueries.
408class TSQL(Dialect): 409 SUPPORTS_SEMI_ANTI_JOIN = False 410 LOG_BASE_FIRST = False 411 TYPED_DIVISION = True 412 CONCAT_COALESCE = True 413 NORMALIZATION_STRATEGY = NormalizationStrategy.CASE_INSENSITIVE 414 ALTER_TABLE_ADD_REQUIRED_FOR_EACH_COLUMN = False 415 416 TIME_FORMAT = "'yyyy-mm-dd hh:mm:ss'" 417 418 EXPRESSION_METADATA = EXPRESSION_METADATA.copy() 419 420 DATE_PART_MAPPING = { 421 **Dialect.DATE_PART_MAPPING, 422 "QQ": "QUARTER", 423 "M": "MONTH", 424 "Y": "DAYOFYEAR", 425 "WW": "WEEK", 426 "N": "MINUTE", 427 "SS": "SECOND", 428 "MCS": "MICROSECOND", 429 "TZOFFSET": "TIMEZONE_MINUTE", 430 "TZ": "TIMEZONE_MINUTE", 431 "ISO_WEEK": "WEEKISO", 432 "ISOWK": "WEEKISO", 433 "ISOWW": "WEEKISO", 434 } 435 436 TIME_MAPPING = { 437 "year": "%Y", 438 "dayofyear": "%j", 439 "day": "%d", 440 "dy": "%d", 441 "y": "%Y", 442 "week": "%W", 443 "ww": "%W", 444 "wk": "%W", 445 "isowk": "%V", 446 "isoww": "%V", 447 "iso_week": "%V", 448 "hour": "%h", 449 "hh": "%I", 450 "minute": "%M", 451 "mi": "%M", 452 "n": "%M", 453 "second": "%S", 454 "ss": "%S", 455 "s": "%-S", 456 "millisecond": "%f", 457 "ms": "%f", 458 "weekday": "%w", 459 "dw": "%w", 460 "month": "%m", 461 "mm": "%M", 462 "m": "%-M", 463 "Y": "%Y", 464 "YYYY": "%Y", 465 "YY": "%y", 466 "MMMM": "%B", 467 "MMM": "%b", 468 "MM": "%m", 469 "M": "%-m", 470 "dddd": "%A", 471 "dd": "%d", 472 "d": "%-d", 473 "HH": "%H", 474 "H": "%-H", 475 "h": "%-I", 476 "ffffff": "%f", 477 "yyyy": "%Y", 478 "yy": "%y", 479 } 480 481 CONVERT_FORMAT_MAPPING = { 482 "0": "%b %d %Y %-I:%M%p", 483 "1": "%m/%d/%y", 484 "2": "%y.%m.%d", 485 "3": "%d/%m/%y", 486 "4": "%d.%m.%y", 487 "5": "%d-%m-%y", 488 "6": "%d %b %y", 489 "7": "%b %d, %y", 490 "8": "%H:%M:%S", 491 "9": "%b %d %Y %-I:%M:%S:%f%p", 492 "10": "mm-dd-yy", 493 "11": "yy/mm/dd", 494 "12": "yymmdd", 495 "13": "%d %b %Y %H:%M:ss:%f", 496 "14": "%H:%M:%S:%f", 497 "20": "%Y-%m-%d %H:%M:%S", 498 "21": "%Y-%m-%d %H:%M:%S.%f", 499 "22": "%m/%d/%y %-I:%M:%S %p", 500 "23": "%Y-%m-%d", 501 "24": "%H:%M:%S", 502 "25": "%Y-%m-%d %H:%M:%S.%f", 503 "100": "%b %d %Y %-I:%M%p", 504 "101": "%m/%d/%Y", 505 "102": "%Y.%m.%d", 506 "103": "%d/%m/%Y", 507 "104": "%d.%m.%Y", 508 "105": "%d-%m-%Y", 509 "106": "%d %b %Y", 510 "107": "%b %d, %Y", 511 "108": "%H:%M:%S", 512 "109": "%b %d %Y %-I:%M:%S:%f%p", 513 "110": "%m-%d-%Y", 514 "111": "%Y/%m/%d", 515 "112": "%Y%m%d", 516 "113": "%d %b %Y %H:%M:%S:%f", 517 "114": "%H:%M:%S:%f", 518 "120": "%Y-%m-%d %H:%M:%S", 519 "121": "%Y-%m-%d %H:%M:%S.%f", 520 "126": "%Y-%m-%dT%H:%M:%S.%f", 521 } 522 523 FORMAT_TIME_MAPPING = { 524 "y": "%B %Y", 525 "d": "%m/%d/%Y", 526 "H": "%-H", 527 "h": "%-I", 528 "s": "%Y-%m-%d %H:%M:%S", 529 "D": "%A,%B,%Y", 530 "f": "%A,%B,%Y %-I:%M %p", 531 "F": "%A,%B,%Y %-I:%M:%S %p", 532 "g": "%m/%d/%Y %-I:%M %p", 533 "G": "%m/%d/%Y %-I:%M:%S %p", 534 "M": "%B %-d", 535 "m": "%B %-d", 536 "O": "%Y-%m-%dT%H:%M:%S", 537 "u": "%Y-%M-%D %H:%M:%S%z", 538 "U": "%A, %B %D, %Y %H:%M:%S%z", 539 "T": "%-I:%M:%S %p", 540 "t": "%-I:%M", 541 "Y": "%a %Y", 542 } 543 544 class Tokenizer(tokens.Tokenizer): 545 IDENTIFIERS = [("[", "]"), '"'] 546 QUOTES = ["'", '"'] 547 HEX_STRINGS = [("0x", ""), ("0X", "")] 548 VAR_SINGLE_TOKENS = {"@", "$", "#"} 549 550 KEYWORDS = { 551 **tokens.Tokenizer.KEYWORDS, 552 "CLUSTERED INDEX": TokenType.INDEX, 553 "DATETIME2": TokenType.DATETIME2, 554 "DATETIMEOFFSET": TokenType.TIMESTAMPTZ, 555 "DECLARE": TokenType.DECLARE, 556 "EXEC": TokenType.COMMAND, 557 "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT, 558 "GO": TokenType.COMMAND, 559 "IMAGE": TokenType.IMAGE, 560 "MONEY": TokenType.MONEY, 561 "NONCLUSTERED INDEX": TokenType.INDEX, 562 "NTEXT": TokenType.TEXT, 563 "OPTION": TokenType.OPTION, 564 "OUTPUT": TokenType.RETURNING, 565 "PRINT": TokenType.COMMAND, 566 "PROC": TokenType.PROCEDURE, 567 "REAL": TokenType.FLOAT, 568 "ROWVERSION": TokenType.ROWVERSION, 569 "SMALLDATETIME": TokenType.SMALLDATETIME, 570 "SMALLMONEY": TokenType.SMALLMONEY, 571 "SQL_VARIANT": TokenType.VARIANT, 572 "SYSTEM_USER": TokenType.CURRENT_USER, 573 "TOP": TokenType.TOP, 574 "TIMESTAMP": TokenType.ROWVERSION, 575 "TINYINT": TokenType.UTINYINT, 576 "UNIQUEIDENTIFIER": TokenType.UUID, 577 "UPDATE STATISTICS": TokenType.COMMAND, 578 "XML": TokenType.XML, 579 } 580 KEYWORDS.pop("/*+") 581 582 COMMANDS = {*tokens.Tokenizer.COMMANDS, TokenType.END} 583 584 class Parser(parser.Parser): 585 SET_REQUIRES_ASSIGNMENT_DELIMITER = False 586 LOG_DEFAULTS_TO_LN = True 587 STRING_ALIASES = True 588 NO_PAREN_IF_COMMANDS = False 589 590 QUERY_MODIFIER_PARSERS = { 591 **parser.Parser.QUERY_MODIFIER_PARSERS, 592 TokenType.OPTION: lambda self: ("options", self._parse_options()), 593 TokenType.FOR: lambda self: ("for_", self._parse_for()), 594 } 595 596 # T-SQL does not allow BEGIN to be used as an identifier 597 ID_VAR_TOKENS = parser.Parser.ID_VAR_TOKENS - {TokenType.BEGIN} 598 ALIAS_TOKENS = parser.Parser.ALIAS_TOKENS - {TokenType.BEGIN} 599 TABLE_ALIAS_TOKENS = parser.Parser.TABLE_ALIAS_TOKENS - {TokenType.BEGIN} 600 COMMENT_TABLE_ALIAS_TOKENS = parser.Parser.COMMENT_TABLE_ALIAS_TOKENS - {TokenType.BEGIN} 601 UPDATE_ALIAS_TOKENS = parser.Parser.UPDATE_ALIAS_TOKENS - {TokenType.BEGIN} 602 603 FUNCTIONS = { 604 **parser.Parser.FUNCTIONS, 605 "CHARINDEX": lambda args: exp.StrPosition( 606 this=seq_get(args, 1), 607 substr=seq_get(args, 0), 608 position=seq_get(args, 2), 609 ), 610 "COUNT": lambda args: exp.Count( 611 this=seq_get(args, 0), expressions=args[1:], big_int=False 612 ), 613 "COUNT_BIG": lambda args: exp.Count( 614 this=seq_get(args, 0), expressions=args[1:], big_int=True 615 ), 616 "DATEADD": build_date_delta(exp.DateAdd, unit_mapping=DATE_DELTA_INTERVAL), 617 "DATEDIFF": _build_date_delta(exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL), 618 "DATEDIFF_BIG": _build_date_delta( 619 exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL, big_int=True 620 ), 621 "DATENAME": _build_formatted_time(exp.TimeToStr, full_format_mapping=True), 622 "DATETIMEFROMPARTS": _build_datetimefromparts, 623 "EOMONTH": _build_eomonth, 624 "FORMAT": _build_format, 625 "GETDATE": exp.CurrentTimestamp.from_arg_list, 626 "HASHBYTES": _build_hashbytes, 627 "ISNULL": lambda args: build_coalesce(args=args, is_null=True), 628 "JSON_QUERY": _build_json_query, 629 "JSON_VALUE": parser.build_extract_json_with_path(exp.JSONExtractScalar), 630 "LEN": _build_with_arg_as_text(exp.Length), 631 "LEFT": _build_with_arg_as_text(exp.Left), 632 "NEWID": exp.Uuid.from_arg_list, 633 "RIGHT": _build_with_arg_as_text(exp.Right), 634 "PARSENAME": _build_parsename, 635 "REPLICATE": exp.Repeat.from_arg_list, 636 "SCHEMA_NAME": exp.CurrentSchema.from_arg_list, 637 "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)), 638 "SYSDATETIME": exp.CurrentTimestamp.from_arg_list, 639 "SUSER_NAME": exp.CurrentUser.from_arg_list, 640 "SUSER_SNAME": exp.CurrentUser.from_arg_list, 641 "SYSDATETIMEOFFSET": exp.CurrentTimestampLTZ.from_arg_list, 642 "SYSTEM_USER": exp.CurrentUser.from_arg_list, 643 "TIMEFROMPARTS": _build_timefromparts, 644 "DATETRUNC": _build_datetrunc, 645 } 646 647 JOIN_HINTS = {"LOOP", "HASH", "MERGE", "REMOTE"} 648 649 PROCEDURE_OPTIONS = dict.fromkeys( 650 ("ENCRYPTION", "RECOMPILE", "SCHEMABINDING", "NATIVE_COMPILATION", "EXECUTE"), tuple() 651 ) 652 653 COLUMN_DEFINITION_MODES = {"OUT", "OUTPUT", "READONLY"} 654 655 RETURNS_TABLE_TOKENS = parser.Parser.ID_VAR_TOKENS - { 656 TokenType.TABLE, 657 *parser.Parser.TYPE_TOKENS, 658 } 659 660 STATEMENT_PARSERS = { 661 **parser.Parser.STATEMENT_PARSERS, 662 TokenType.DECLARE: lambda self: self._parse_declare(), 663 } 664 665 RANGE_PARSERS = { 666 **parser.Parser.RANGE_PARSERS, 667 TokenType.DCOLON: lambda self, this: self.expression( 668 exp.ScopeResolution, 669 this=this, 670 expression=self._parse_function() or self._parse_var(any_token=True), 671 ), 672 } 673 674 NO_PAREN_FUNCTION_PARSERS = { 675 **parser.Parser.NO_PAREN_FUNCTION_PARSERS, 676 "NEXT": lambda self: self._parse_next_value_for(), 677 } 678 679 FUNCTION_PARSERS: t.Dict[str, t.Callable] = { 680 **parser.Parser.FUNCTION_PARSERS, 681 "JSON_ARRAYAGG": lambda self: self.expression( 682 exp.JSONArrayAgg, 683 this=self._parse_bitwise(), 684 order=self._parse_order(), 685 null_handling=self._parse_on_handling("NULL", "NULL", "ABSENT"), 686 ), 687 "DATEPART": lambda self: self._parse_datepart(), 688 } 689 690 # The DCOLON (::) operator serves as a scope resolution (exp.ScopeResolution) operator in T-SQL 691 COLUMN_OPERATORS = { 692 **parser.Parser.COLUMN_OPERATORS, 693 TokenType.DCOLON: lambda self, this, to: self.expression(exp.Cast, this=this, to=to) 694 if isinstance(to, exp.DataType) and to.this != exp.DataType.Type.USERDEFINED 695 else self.expression(exp.ScopeResolution, this=this, expression=to), 696 } 697 698 SET_OP_MODIFIERS = {"offset"} 699 700 ODBC_DATETIME_LITERALS = { 701 "d": exp.Date, 702 "t": exp.Time, 703 "ts": exp.Timestamp, 704 } 705 706 def _parse_datepart(self) -> exp.Extract: 707 this = self._parse_var() 708 expression = self._match(TokenType.COMMA) and self._parse_bitwise() 709 name = map_date_part(this, self.dialect) 710 711 return self.expression(exp.Extract, this=name, expression=expression) 712 713 def _parse_alter_table_set(self) -> exp.AlterSet: 714 return self._parse_wrapped(super()._parse_alter_table_set) 715 716 def _parse_wrapped_select(self, table: bool = False) -> t.Optional[exp.Expression]: 717 if self._match(TokenType.MERGE): 718 comments = self._prev_comments 719 merge = self._parse_merge() 720 merge.add_comments(comments, prepend=True) 721 return merge 722 723 return super()._parse_wrapped_select(table=table) 724 725 def _parse_dcolon(self) -> t.Optional[exp.Expression]: 726 # We want to use _parse_types() if the first token after :: is a known type, 727 # otherwise we could parse something like x::varchar(max) into a function 728 if self._match_set(self.TYPE_TOKENS, advance=False): 729 return self._parse_types() 730 731 return self._parse_function() or self._parse_types() 732 733 def _parse_options(self) -> t.Optional[t.List[exp.Expression]]: 734 if not self._match(TokenType.OPTION): 735 return None 736 737 def _parse_option() -> t.Optional[exp.Expression]: 738 option = self._parse_var_from_options(OPTIONS) 739 if not option: 740 return None 741 742 self._match(TokenType.EQ) 743 return self.expression( 744 exp.QueryOption, this=option, expression=self._parse_primary_or_var() 745 ) 746 747 return self._parse_wrapped_csv(_parse_option) 748 749 def _parse_xml_key_value_option(self) -> exp.XMLKeyValueOption: 750 this = self._parse_primary_or_var() 751 if self._match(TokenType.L_PAREN, advance=False): 752 expression = self._parse_wrapped(self._parse_string) 753 else: 754 expression = None 755 756 return exp.XMLKeyValueOption(this=this, expression=expression) 757 758 def _parse_for(self) -> t.Optional[t.List[exp.Expression]]: 759 if not self._match_pair(TokenType.FOR, TokenType.XML): 760 return None 761 762 def _parse_for_xml() -> t.Optional[exp.Expression]: 763 return self.expression( 764 exp.QueryOption, 765 this=self._parse_var_from_options(XML_OPTIONS, raise_unmatched=False) 766 or self._parse_xml_key_value_option(), 767 ) 768 769 return self._parse_csv(_parse_for_xml) 770 771 def _parse_projections(self) -> t.List[exp.Expression]: 772 """ 773 T-SQL supports the syntax alias = expression in the SELECT's projection list, 774 so we transform all parsed Selects to convert their EQ projections into Aliases. 775 776 See: https://learn.microsoft.com/en-us/sql/t-sql/queries/select-clause-transact-sql?view=sql-server-ver16#syntax 777 """ 778 return [ 779 ( 780 exp.alias_(projection.expression, projection.this.this, copy=False) 781 if isinstance(projection, exp.EQ) and isinstance(projection.this, exp.Column) 782 else projection 783 ) 784 for projection in super()._parse_projections() 785 ] 786 787 def _parse_commit_or_rollback(self) -> exp.Commit | exp.Rollback: 788 """Applies to SQL Server and Azure SQL Database 789 COMMIT [ { TRAN | TRANSACTION } 790 [ transaction_name | @tran_name_variable ] ] 791 [ WITH ( DELAYED_DURABILITY = { OFF | ON } ) ] 792 793 ROLLBACK { TRAN | TRANSACTION } 794 [ transaction_name | @tran_name_variable 795 | savepoint_name | @savepoint_variable ] 796 """ 797 rollback = self._prev.token_type == TokenType.ROLLBACK 798 799 self._match_texts(("TRAN", "TRANSACTION")) 800 this = self._parse_id_var() 801 802 if rollback: 803 return self.expression(exp.Rollback, this=this) 804 805 durability = None 806 if self._match_pair(TokenType.WITH, TokenType.L_PAREN): 807 self._match_text_seq("DELAYED_DURABILITY") 808 self._match(TokenType.EQ) 809 810 if self._match_text_seq("OFF"): 811 durability = False 812 else: 813 self._match(TokenType.ON) 814 durability = True 815 816 self._match_r_paren() 817 818 return self.expression(exp.Commit, this=this, durability=durability) 819 820 def _parse_transaction(self) -> exp.Transaction | exp.Command: 821 """Applies to SQL Server and Azure SQL Database 822 BEGIN { TRAN | TRANSACTION } 823 [ { transaction_name | @tran_name_variable } 824 [ WITH MARK [ 'description' ] ] 825 ] 826 """ 827 if self._match_texts(("TRAN", "TRANSACTION")): 828 transaction = self.expression(exp.Transaction, this=self._parse_id_var()) 829 if self._match_text_seq("WITH", "MARK"): 830 transaction.set("mark", self._parse_string()) 831 832 return transaction 833 834 return self._parse_as_command(self._prev) 835 836 def _parse_returns(self) -> exp.ReturnsProperty: 837 table = self._parse_id_var(any_token=False, tokens=self.RETURNS_TABLE_TOKENS) 838 returns = super()._parse_returns() 839 returns.set("table", table) 840 return returns 841 842 def _parse_convert( 843 self, strict: bool, safe: t.Optional[bool] = None 844 ) -> t.Optional[exp.Expression]: 845 this = self._parse_types() 846 self._match(TokenType.COMMA) 847 args = [this, *self._parse_csv(self._parse_assignment)] 848 convert = exp.Convert.from_arg_list(args) 849 convert.set("safe", safe) 850 return convert 851 852 def _parse_column_def( 853 self, this: t.Optional[exp.Expression], computed_column: bool = True 854 ) -> t.Optional[exp.Expression]: 855 this = super()._parse_column_def(this=this, computed_column=computed_column) 856 if not this: 857 return None 858 if self._match(TokenType.EQ): 859 this.set("default", self._parse_disjunction()) 860 if self._match_texts(self.COLUMN_DEFINITION_MODES): 861 this.set("output", self._prev.text) 862 return this 863 864 def _parse_user_defined_function( 865 self, kind: t.Optional[TokenType] = None 866 ) -> t.Optional[exp.Expression]: 867 this = super()._parse_user_defined_function(kind=kind) 868 869 if ( 870 kind == TokenType.FUNCTION 871 or isinstance(this, exp.UserDefinedFunction) 872 or self._match(TokenType.ALIAS, advance=False) 873 ): 874 return this 875 876 if not self._match(TokenType.WITH, advance=False): 877 expressions = self._parse_csv(self._parse_function_parameter) 878 else: 879 expressions = None 880 881 return self.expression(exp.UserDefinedFunction, this=this, expressions=expressions) 882 883 def _parse_into(self) -> t.Optional[exp.Into]: 884 into = super()._parse_into() 885 886 table = isinstance(into, exp.Into) and into.find(exp.Table) 887 if isinstance(table, exp.Table): 888 table_identifier = table.this 889 if table_identifier.args.get("temporary"): 890 # Promote the temporary property from the Identifier to the Into expression 891 t.cast(exp.Into, into).set("temporary", True) 892 893 return into 894 895 def _parse_id_var( 896 self, 897 any_token: bool = True, 898 tokens: t.Optional[t.Collection[TokenType]] = None, 899 ) -> t.Optional[exp.Expression]: 900 is_temporary = self._match(TokenType.HASH) 901 is_global = is_temporary and self._match(TokenType.HASH) 902 903 this = super()._parse_id_var(any_token=any_token, tokens=tokens) 904 if this: 905 if is_global: 906 this.set("global_", True) 907 elif is_temporary: 908 this.set("temporary", True) 909 910 return this 911 912 def _parse_create(self) -> exp.Create | exp.Command: 913 create = super()._parse_create() 914 915 if isinstance(create, exp.Create): 916 table = create.this.this if isinstance(create.this, exp.Schema) else create.this 917 if isinstance(table, exp.Table) and table.this and table.this.args.get("temporary"): 918 if not create.args.get("properties"): 919 create.set("properties", exp.Properties(expressions=[])) 920 921 create.args["properties"].append("expressions", exp.TemporaryProperty()) 922 923 return create 924 925 def _parse_if(self) -> t.Optional[exp.Expression]: 926 index = self._index 927 928 if self._match_text_seq("OBJECT_ID"): 929 self._parse_wrapped_csv(self._parse_string) 930 if self._match_text_seq("IS", "NOT", "NULL") and self._match(TokenType.DROP): 931 return self._parse_drop(exists=True) 932 self._retreat(index) 933 934 return super()._parse_if() 935 936 def _parse_unique(self) -> exp.UniqueColumnConstraint: 937 if self._match_texts(("CLUSTERED", "NONCLUSTERED")): 938 this = self.CONSTRAINT_PARSERS[self._prev.text.upper()](self) 939 else: 940 this = self._parse_schema(self._parse_id_var(any_token=False)) 941 942 return self.expression(exp.UniqueColumnConstraint, this=this) 943 944 def _parse_update(self) -> exp.Update: 945 expression = super()._parse_update() 946 expression.set("options", self._parse_options()) 947 return expression 948 949 def _parse_partition(self) -> t.Optional[exp.Partition]: 950 if not self._match_text_seq("WITH", "(", "PARTITIONS"): 951 return None 952 953 def parse_range(): 954 low = self._parse_bitwise() 955 high = self._parse_bitwise() if self._match_text_seq("TO") else None 956 957 return ( 958 self.expression(exp.PartitionRange, this=low, expression=high) if high else low 959 ) 960 961 partition = self.expression( 962 exp.Partition, expressions=self._parse_wrapped_csv(parse_range) 963 ) 964 965 self._match_r_paren() 966 967 return partition 968 969 def _parse_declareitem(self) -> t.Optional[exp.DeclareItem]: 970 var = self._parse_id_var() 971 if not var: 972 return None 973 974 self._match(TokenType.ALIAS) 975 return self.expression( 976 exp.DeclareItem, 977 this=var, 978 kind=self._parse_schema() if self._match(TokenType.TABLE) else self._parse_types(), 979 default=self._match(TokenType.EQ) and self._parse_bitwise(), 980 ) 981 982 def _parse_alter_table_alter(self) -> t.Optional[exp.Expression]: 983 expression = super()._parse_alter_table_alter() 984 985 if expression is not None: 986 collation = expression.args.get("collate") 987 if isinstance(collation, exp.Column) and isinstance(collation.this, exp.Identifier): 988 identifier = collation.this 989 collation.set("this", exp.Var(this=identifier.name)) 990 991 return expression 992 993 def _parse_primary_key_part(self) -> t.Optional[exp.Expression]: 994 return self._parse_ordered() 995 996 class Generator(generator.Generator): 997 LIMIT_IS_TOP = True 998 QUERY_HINTS = False 999 RETURNING_END = False 1000 NVL2_SUPPORTED = False 1001 ALTER_TABLE_INCLUDE_COLUMN_KEYWORD = False 1002 LIMIT_FETCH = "FETCH" 1003 COMPUTED_COLUMN_WITH_TYPE = False 1004 CTE_RECURSIVE_KEYWORD_REQUIRED = False 1005 ENSURE_BOOLS = True 1006 NULL_ORDERING_SUPPORTED = None 1007 SUPPORTS_SINGLE_ARG_CONCAT = False 1008 TABLESAMPLE_SEED_KEYWORD = "REPEATABLE" 1009 SUPPORTS_SELECT_INTO = True 1010 JSON_PATH_BRACKETED_KEY_SUPPORTED = False 1011 SUPPORTS_TO_NUMBER = False 1012 SET_OP_MODIFIERS = False 1013 COPY_PARAMS_EQ_REQUIRED = True 1014 PARSE_JSON_NAME = None 1015 EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE = False 1016 ALTER_SET_WRAPPED = True 1017 ALTER_SET_TYPE = "" 1018 1019 EXPRESSIONS_WITHOUT_NESTED_CTES = { 1020 exp.Create, 1021 exp.Delete, 1022 exp.Insert, 1023 exp.Intersect, 1024 exp.Except, 1025 exp.Merge, 1026 exp.Select, 1027 exp.Subquery, 1028 exp.Union, 1029 exp.Update, 1030 } 1031 1032 SUPPORTED_JSON_PATH_PARTS = { 1033 exp.JSONPathKey, 1034 exp.JSONPathRoot, 1035 exp.JSONPathSubscript, 1036 } 1037 1038 TYPE_MAPPING = { 1039 **generator.Generator.TYPE_MAPPING, 1040 exp.DataType.Type.BOOLEAN: "BIT", 1041 exp.DataType.Type.DATETIME2: "DATETIME2", 1042 exp.DataType.Type.DECIMAL: "NUMERIC", 1043 exp.DataType.Type.DOUBLE: "FLOAT", 1044 exp.DataType.Type.INT: "INTEGER", 1045 exp.DataType.Type.ROWVERSION: "ROWVERSION", 1046 exp.DataType.Type.TEXT: "VARCHAR(MAX)", 1047 exp.DataType.Type.TIMESTAMP: "DATETIME2", 1048 exp.DataType.Type.TIMESTAMPNTZ: "DATETIME2", 1049 exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET", 1050 exp.DataType.Type.SMALLDATETIME: "SMALLDATETIME", 1051 exp.DataType.Type.UTINYINT: "TINYINT", 1052 exp.DataType.Type.VARIANT: "SQL_VARIANT", 1053 exp.DataType.Type.UUID: "UNIQUEIDENTIFIER", 1054 } 1055 1056 TYPE_MAPPING.pop(exp.DataType.Type.NCHAR) 1057 TYPE_MAPPING.pop(exp.DataType.Type.NVARCHAR) 1058 1059 TRANSFORMS = { 1060 **generator.Generator.TRANSFORMS, 1061 exp.AnyValue: any_value_to_max_sql, 1062 exp.ArrayToString: rename_func("STRING_AGG"), 1063 exp.AutoIncrementColumnConstraint: lambda *_: "IDENTITY", 1064 exp.Ceil: rename_func("CEILING"), 1065 exp.Chr: rename_func("CHAR"), 1066 exp.DateAdd: date_delta_sql("DATEADD"), 1067 exp.CTE: transforms.preprocess([qualify_derived_table_outputs]), 1068 exp.CurrentDate: rename_func("GETDATE"), 1069 exp.CurrentTimestamp: rename_func("GETDATE"), 1070 exp.CurrentTimestampLTZ: rename_func("SYSDATETIMEOFFSET"), 1071 exp.DateStrToDate: datestrtodate_sql, 1072 exp.GeneratedAsIdentityColumnConstraint: generatedasidentitycolumnconstraint_sql, 1073 exp.GroupConcat: _string_agg_sql, 1074 exp.If: rename_func("IIF"), 1075 exp.JSONExtract: _json_extract_sql, 1076 exp.JSONExtractScalar: _json_extract_sql, 1077 exp.LastDay: lambda self, e: self.func("EOMONTH", e.this), 1078 exp.Ln: rename_func("LOG"), 1079 exp.Max: max_or_greatest, 1080 exp.MD5: lambda self, e: self.func("HASHBYTES", exp.Literal.string("MD5"), e.this), 1081 exp.Min: min_or_least, 1082 exp.NumberToStr: _format_sql, 1083 exp.Repeat: rename_func("REPLICATE"), 1084 exp.CurrentSchema: rename_func("SCHEMA_NAME"), 1085 exp.Select: transforms.preprocess( 1086 [ 1087 transforms.eliminate_distinct_on, 1088 transforms.eliminate_semi_and_anti_joins, 1089 transforms.eliminate_qualify, 1090 transforms.unnest_generate_date_array_using_recursive_cte, 1091 ] 1092 ), 1093 exp.Stddev: rename_func("STDEV"), 1094 exp.StrPosition: lambda self, e: strposition_sql( 1095 self, e, func_name="CHARINDEX", supports_position=True 1096 ), 1097 exp.Subquery: transforms.preprocess([qualify_derived_table_outputs]), 1098 exp.SHA: lambda self, e: self.func("HASHBYTES", exp.Literal.string("SHA1"), e.this), 1099 exp.SHA1Digest: lambda self, e: self.func( 1100 "HASHBYTES", exp.Literal.string("SHA1"), e.this 1101 ), 1102 exp.SHA2: lambda self, e: self.func( 1103 "HASHBYTES", exp.Literal.string(f"SHA2_{e.args.get('length', 256)}"), e.this 1104 ), 1105 exp.TemporaryProperty: lambda self, e: "", 1106 exp.TimeStrToTime: _timestrtotime_sql, 1107 exp.TimeToStr: _format_sql, 1108 exp.Trim: trim_sql, 1109 exp.TsOrDsAdd: date_delta_sql("DATEADD", cast=True), 1110 exp.TsOrDsDiff: date_delta_sql("DATEDIFF"), 1111 exp.TimestampTrunc: lambda self, e: self.func("DATETRUNC", e.unit, e.this), 1112 exp.Uuid: lambda *_: "NEWID()", 1113 exp.DateFromParts: rename_func("DATEFROMPARTS"), 1114 } 1115 1116 TRANSFORMS.pop(exp.ReturnsProperty) 1117 1118 PROPERTIES_LOCATION = { 1119 **generator.Generator.PROPERTIES_LOCATION, 1120 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 1121 } 1122 1123 def scope_resolution(self, rhs: str, scope_name: str) -> str: 1124 return f"{scope_name}::{rhs}" 1125 1126 def select_sql(self, expression: exp.Select) -> str: 1127 limit = expression.args.get("limit") 1128 offset = expression.args.get("offset") 1129 1130 if isinstance(limit, exp.Fetch) and not offset: 1131 # Dialects like Oracle can FETCH directly from a row set but 1132 # T-SQL requires an ORDER BY + OFFSET clause in order to FETCH 1133 offset = exp.Offset(expression=exp.Literal.number(0)) 1134 expression.set("offset", offset) 1135 1136 if offset: 1137 if not expression.args.get("order"): 1138 # ORDER BY is required in order to use OFFSET in a query, so we use 1139 # a noop order by, since we don't really care about the order. 1140 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 1141 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 1142 1143 if isinstance(limit, exp.Limit): 1144 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 1145 # we replace here because otherwise TOP would be generated in select_sql 1146 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 1147 1148 return super().select_sql(expression) 1149 1150 def convert_sql(self, expression: exp.Convert) -> str: 1151 name = "TRY_CONVERT" if expression.args.get("safe") else "CONVERT" 1152 return self.func( 1153 name, expression.this, expression.expression, expression.args.get("style") 1154 ) 1155 1156 def queryoption_sql(self, expression: exp.QueryOption) -> str: 1157 option = self.sql(expression, "this") 1158 value = self.sql(expression, "expression") 1159 if value: 1160 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 1161 return f"{option} {optional_equal_sign}{value}" 1162 return option 1163 1164 def lateral_op(self, expression: exp.Lateral) -> str: 1165 cross_apply = expression.args.get("cross_apply") 1166 if cross_apply is True: 1167 return "CROSS APPLY" 1168 if cross_apply is False: 1169 return "OUTER APPLY" 1170 1171 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 1172 self.unsupported("LATERAL clause is not supported.") 1173 return "LATERAL" 1174 1175 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 1176 this = expression.this 1177 split_count = len(this.name.split(".")) 1178 delimiter = expression.args.get("delimiter") 1179 part_index = expression.args.get("part_index") 1180 1181 if ( 1182 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 1183 or (delimiter and delimiter.name != ".") 1184 or not part_index 1185 or split_count > 4 1186 ): 1187 self.unsupported( 1188 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 1189 ) 1190 return "" 1191 1192 return self.func( 1193 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 1194 ) 1195 1196 def extract_sql(self, expression: exp.Extract) -> str: 1197 part = expression.this 1198 name = DATE_PART_UNMAPPING.get(part.name.upper()) or part 1199 1200 return self.func("DATEPART", name, expression.expression) 1201 1202 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1203 nano = expression.args.get("nano") 1204 if nano is not None: 1205 nano.pop() 1206 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1207 1208 if expression.args.get("fractions") is None: 1209 expression.set("fractions", exp.Literal.number(0)) 1210 if expression.args.get("precision") is None: 1211 expression.set("precision", exp.Literal.number(0)) 1212 1213 return rename_func("TIMEFROMPARTS")(self, expression) 1214 1215 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1216 zone = expression.args.get("zone") 1217 if zone is not None: 1218 zone.pop() 1219 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1220 1221 nano = expression.args.get("nano") 1222 if nano is not None: 1223 nano.pop() 1224 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1225 1226 if expression.args.get("milli") is None: 1227 expression.set("milli", exp.Literal.number(0)) 1228 1229 return rename_func("DATETIMEFROMPARTS")(self, expression) 1230 1231 def setitem_sql(self, expression: exp.SetItem) -> str: 1232 this = expression.this 1233 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1234 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1235 return f"{self.sql(this.left)} {self.sql(this.right)}" 1236 1237 return super().setitem_sql(expression) 1238 1239 def boolean_sql(self, expression: exp.Boolean) -> str: 1240 if type(expression.parent) in BIT_TYPES or isinstance( 1241 expression.find_ancestor(exp.Values, exp.Select), exp.Values 1242 ): 1243 return "1" if expression.this else "0" 1244 1245 return "(1 = 1)" if expression.this else "(1 = 0)" 1246 1247 def is_sql(self, expression: exp.Is) -> str: 1248 if isinstance(expression.expression, exp.Boolean): 1249 return self.binary(expression, "=") 1250 return self.binary(expression, "IS") 1251 1252 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1253 sql = self.sql(expression, "this") 1254 properties = expression.args.get("properties") 1255 1256 if sql[:1] != "#" and any( 1257 isinstance(prop, exp.TemporaryProperty) 1258 for prop in (properties.expressions if properties else []) 1259 ): 1260 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1261 1262 return sql 1263 1264 def create_sql(self, expression: exp.Create) -> str: 1265 kind = expression.kind 1266 exists = expression.args.get("exists") 1267 expression.set("exists", None) 1268 1269 like_property = expression.find(exp.LikeProperty) 1270 if like_property: 1271 ctas_expression = like_property.this 1272 else: 1273 ctas_expression = expression.expression 1274 1275 if kind == "VIEW": 1276 expression.this.set("catalog", None) 1277 with_ = expression.args.get("with_") 1278 if ctas_expression and with_: 1279 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1280 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1281 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1282 ctas_expression.set("with_", with_.pop()) 1283 1284 table = expression.find(exp.Table) 1285 1286 # Convert CTAS statement to SELECT .. INTO .. 1287 if kind == "TABLE" and ctas_expression: 1288 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1289 ctas_expression = ctas_expression.subquery() 1290 1291 properties = expression.args.get("properties") or exp.Properties() 1292 is_temp = any(isinstance(p, exp.TemporaryProperty) for p in properties.expressions) 1293 1294 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1295 select_into.set("into", exp.Into(this=table, temporary=is_temp)) 1296 1297 if like_property: 1298 select_into.limit(0, copy=False) 1299 1300 sql = self.sql(select_into) 1301 else: 1302 sql = super().create_sql(expression) 1303 1304 if exists: 1305 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1306 sql_with_ctes = self.prepend_ctes(expression, sql) 1307 sql_literal = self.sql(exp.Literal.string(sql_with_ctes)) 1308 if kind == "SCHEMA": 1309 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = {identifier}) EXEC({sql_literal})""" 1310 elif kind == "TABLE": 1311 assert table 1312 where = exp.and_( 1313 exp.column("TABLE_NAME").eq(table.name), 1314 exp.column("TABLE_SCHEMA").eq(table.db) if table.db else None, 1315 exp.column("TABLE_CATALOG").eq(table.catalog) if table.catalog else None, 1316 ) 1317 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE {where}) EXEC({sql_literal})""" 1318 elif kind == "INDEX": 1319 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1320 return f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql_literal})""" 1321 elif expression.args.get("replace"): 1322 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1323 1324 return self.prepend_ctes(expression, sql) 1325 1326 @generator.unsupported_args("unlogged", "expressions") 1327 def into_sql(self, expression: exp.Into) -> str: 1328 if expression.args.get("temporary"): 1329 # If the Into expression has a temporary property, push this down to the Identifier 1330 table = expression.find(exp.Table) 1331 if table and isinstance(table.this, exp.Identifier): 1332 table.this.set("temporary", True) 1333 1334 return f"{self.seg('INTO')} {self.sql(expression, 'this')}" 1335 1336 def count_sql(self, expression: exp.Count) -> str: 1337 func_name = "COUNT_BIG" if expression.args.get("big_int") else "COUNT" 1338 return rename_func(func_name)(self, expression) 1339 1340 def datediff_sql(self, expression: exp.DateDiff) -> str: 1341 func_name = "DATEDIFF_BIG" if expression.args.get("big_int") else "DATEDIFF" 1342 return date_delta_sql(func_name)(self, expression) 1343 1344 def offset_sql(self, expression: exp.Offset) -> str: 1345 return f"{super().offset_sql(expression)} ROWS" 1346 1347 def version_sql(self, expression: exp.Version) -> str: 1348 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1349 this = f"FOR {name}" 1350 expr = expression.expression 1351 kind = expression.text("kind") 1352 if kind in ("FROM", "BETWEEN"): 1353 args = expr.expressions 1354 sep = "TO" if kind == "FROM" else "AND" 1355 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1356 else: 1357 expr_sql = self.sql(expr) 1358 1359 expr_sql = f" {expr_sql}" if expr_sql else "" 1360 return f"{this} {kind}{expr_sql}" 1361 1362 def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str: 1363 table = expression.args.get("table") 1364 table = f"{table} " if table else "" 1365 return f"RETURNS {table}{self.sql(expression, 'this')}" 1366 1367 def returning_sql(self, expression: exp.Returning) -> str: 1368 into = self.sql(expression, "into") 1369 into = self.seg(f"INTO {into}") if into else "" 1370 return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}" 1371 1372 def transaction_sql(self, expression: exp.Transaction) -> str: 1373 this = self.sql(expression, "this") 1374 this = f" {this}" if this else "" 1375 mark = self.sql(expression, "mark") 1376 mark = f" WITH MARK {mark}" if mark else "" 1377 return f"BEGIN TRANSACTION{this}{mark}" 1378 1379 def commit_sql(self, expression: exp.Commit) -> str: 1380 this = self.sql(expression, "this") 1381 this = f" {this}" if this else "" 1382 durability = expression.args.get("durability") 1383 durability = ( 1384 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1385 if durability is not None 1386 else "" 1387 ) 1388 return f"COMMIT TRANSACTION{this}{durability}" 1389 1390 def rollback_sql(self, expression: exp.Rollback) -> str: 1391 this = self.sql(expression, "this") 1392 this = f" {this}" if this else "" 1393 return f"ROLLBACK TRANSACTION{this}" 1394 1395 def identifier_sql(self, expression: exp.Identifier) -> str: 1396 identifier = super().identifier_sql(expression) 1397 1398 if expression.args.get("global_"): 1399 identifier = f"##{identifier}" 1400 elif expression.args.get("temporary"): 1401 identifier = f"#{identifier}" 1402 1403 return identifier 1404 1405 def constraint_sql(self, expression: exp.Constraint) -> str: 1406 this = self.sql(expression, "this") 1407 expressions = self.expressions(expression, flat=True, sep=" ") 1408 return f"CONSTRAINT {this} {expressions}" 1409 1410 def length_sql(self, expression: exp.Length) -> str: 1411 return self._uncast_text(expression, "LEN") 1412 1413 def right_sql(self, expression: exp.Right) -> str: 1414 return self._uncast_text(expression, "RIGHT") 1415 1416 def left_sql(self, expression: exp.Left) -> str: 1417 return self._uncast_text(expression, "LEFT") 1418 1419 def _uncast_text(self, expression: exp.Expression, name: str) -> str: 1420 this = expression.this 1421 if isinstance(this, exp.Cast) and this.is_type(exp.DataType.Type.TEXT): 1422 this_sql = self.sql(this, "this") 1423 else: 1424 this_sql = self.sql(this) 1425 expression_sql = self.sql(expression, "expression") 1426 return self.func(name, this_sql, expression_sql if expression_sql else None) 1427 1428 def partition_sql(self, expression: exp.Partition) -> str: 1429 return f"WITH (PARTITIONS({self.expressions(expression, flat=True)}))" 1430 1431 def alter_sql(self, expression: exp.Alter) -> str: 1432 action = seq_get(expression.args.get("actions") or [], 0) 1433 if isinstance(action, exp.AlterRename): 1434 return f"EXEC sp_rename '{self.sql(expression.this)}', '{action.this.name}'" 1435 return super().alter_sql(expression) 1436 1437 def drop_sql(self, expression: exp.Drop) -> str: 1438 if expression.args["kind"] == "VIEW": 1439 expression.this.set("catalog", None) 1440 return super().drop_sql(expression) 1441 1442 def options_modifier(self, expression: exp.Expression) -> str: 1443 options = self.expressions(expression, key="options") 1444 return f" OPTION{self.wrap(options)}" if options else "" 1445 1446 def dpipe_sql(self, expression: exp.DPipe) -> str: 1447 return self.sql( 1448 reduce(lambda x, y: exp.Add(this=x, expression=y), expression.flatten()) 1449 ) 1450 1451 def isascii_sql(self, expression: exp.IsAscii) -> str: 1452 return f"(PATINDEX(CONVERT(VARCHAR(MAX), 0x255b5e002d7f5d25) COLLATE Latin1_General_BIN, {self.sql(expression.this)}) = 0)" 1453 1454 def columndef_sql(self, expression: exp.ColumnDef, sep: str = " ") -> str: 1455 this = super().columndef_sql(expression, sep) 1456 default = self.sql(expression, "default") 1457 default = f" = {default}" if default else "" 1458 output = self.sql(expression, "output") 1459 output = f" {output}" if output else "" 1460 return f"{this}{default}{output}" 1461 1462 def coalesce_sql(self, expression: exp.Coalesce) -> str: 1463 func_name = "ISNULL" if expression.args.get("is_null") else "COALESCE" 1464 return rename_func(func_name)(self, expression)
Whether the base comes first in the LOG function.
Possible values: True, False, None (two arguments are not supported by LOG)
Whether the behavior of a / b depends on the types of a and b.
False means a / b is always float division.
True means a / b is integer division if both a and b are integers.
A NULL arg in CONCAT yields NULL by default, but in some dialects it yields an empty string.
Specifies the strategy according to which identifiers should be normalized.
Associates this dialect's time formats with their equivalent Python strftime formats.
544 class Tokenizer(tokens.Tokenizer): 545 IDENTIFIERS = [("[", "]"), '"'] 546 QUOTES = ["'", '"'] 547 HEX_STRINGS = [("0x", ""), ("0X", "")] 548 VAR_SINGLE_TOKENS = {"@", "$", "#"} 549 550 KEYWORDS = { 551 **tokens.Tokenizer.KEYWORDS, 552 "CLUSTERED INDEX": TokenType.INDEX, 553 "DATETIME2": TokenType.DATETIME2, 554 "DATETIMEOFFSET": TokenType.TIMESTAMPTZ, 555 "DECLARE": TokenType.DECLARE, 556 "EXEC": TokenType.COMMAND, 557 "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT, 558 "GO": TokenType.COMMAND, 559 "IMAGE": TokenType.IMAGE, 560 "MONEY": TokenType.MONEY, 561 "NONCLUSTERED INDEX": TokenType.INDEX, 562 "NTEXT": TokenType.TEXT, 563 "OPTION": TokenType.OPTION, 564 "OUTPUT": TokenType.RETURNING, 565 "PRINT": TokenType.COMMAND, 566 "PROC": TokenType.PROCEDURE, 567 "REAL": TokenType.FLOAT, 568 "ROWVERSION": TokenType.ROWVERSION, 569 "SMALLDATETIME": TokenType.SMALLDATETIME, 570 "SMALLMONEY": TokenType.SMALLMONEY, 571 "SQL_VARIANT": TokenType.VARIANT, 572 "SYSTEM_USER": TokenType.CURRENT_USER, 573 "TOP": TokenType.TOP, 574 "TIMESTAMP": TokenType.ROWVERSION, 575 "TINYINT": TokenType.UTINYINT, 576 "UNIQUEIDENTIFIER": TokenType.UUID, 577 "UPDATE STATISTICS": TokenType.COMMAND, 578 "XML": TokenType.XML, 579 } 580 KEYWORDS.pop("/*+") 581 582 COMMANDS = {*tokens.Tokenizer.COMMANDS, TokenType.END}
Inherited Members
- sqlglot.tokens.Tokenizer
- Tokenizer
- SINGLE_TOKENS
- BIT_STRINGS
- BYTE_STRINGS
- RAW_STRINGS
- HEREDOC_STRINGS
- UNICODE_STRINGS
- STRING_ESCAPES
- ESCAPE_FOLLOW_CHARS
- IDENTIFIER_ESCAPES
- HEREDOC_TAG_IS_IDENTIFIER
- HEREDOC_STRING_ALTERNATIVE
- STRING_ESCAPES_ALLOWED_IN_RAW_STRINGS
- NESTED_COMMENTS
- HINT_START
- TOKENS_PRECEDING_HINT
- WHITE_SPACE
- COMMAND_PREFIX_TOKENS
- NUMERIC_LITERALS
- COMMENTS
- dialect
- use_rs_tokenizer
- reset
- tokenize
- tokenize_rs
- size
- sql
- tokens
584 class Parser(parser.Parser): 585 SET_REQUIRES_ASSIGNMENT_DELIMITER = False 586 LOG_DEFAULTS_TO_LN = True 587 STRING_ALIASES = True 588 NO_PAREN_IF_COMMANDS = False 589 590 QUERY_MODIFIER_PARSERS = { 591 **parser.Parser.QUERY_MODIFIER_PARSERS, 592 TokenType.OPTION: lambda self: ("options", self._parse_options()), 593 TokenType.FOR: lambda self: ("for_", self._parse_for()), 594 } 595 596 # T-SQL does not allow BEGIN to be used as an identifier 597 ID_VAR_TOKENS = parser.Parser.ID_VAR_TOKENS - {TokenType.BEGIN} 598 ALIAS_TOKENS = parser.Parser.ALIAS_TOKENS - {TokenType.BEGIN} 599 TABLE_ALIAS_TOKENS = parser.Parser.TABLE_ALIAS_TOKENS - {TokenType.BEGIN} 600 COMMENT_TABLE_ALIAS_TOKENS = parser.Parser.COMMENT_TABLE_ALIAS_TOKENS - {TokenType.BEGIN} 601 UPDATE_ALIAS_TOKENS = parser.Parser.UPDATE_ALIAS_TOKENS - {TokenType.BEGIN} 602 603 FUNCTIONS = { 604 **parser.Parser.FUNCTIONS, 605 "CHARINDEX": lambda args: exp.StrPosition( 606 this=seq_get(args, 1), 607 substr=seq_get(args, 0), 608 position=seq_get(args, 2), 609 ), 610 "COUNT": lambda args: exp.Count( 611 this=seq_get(args, 0), expressions=args[1:], big_int=False 612 ), 613 "COUNT_BIG": lambda args: exp.Count( 614 this=seq_get(args, 0), expressions=args[1:], big_int=True 615 ), 616 "DATEADD": build_date_delta(exp.DateAdd, unit_mapping=DATE_DELTA_INTERVAL), 617 "DATEDIFF": _build_date_delta(exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL), 618 "DATEDIFF_BIG": _build_date_delta( 619 exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL, big_int=True 620 ), 621 "DATENAME": _build_formatted_time(exp.TimeToStr, full_format_mapping=True), 622 "DATETIMEFROMPARTS": _build_datetimefromparts, 623 "EOMONTH": _build_eomonth, 624 "FORMAT": _build_format, 625 "GETDATE": exp.CurrentTimestamp.from_arg_list, 626 "HASHBYTES": _build_hashbytes, 627 "ISNULL": lambda args: build_coalesce(args=args, is_null=True), 628 "JSON_QUERY": _build_json_query, 629 "JSON_VALUE": parser.build_extract_json_with_path(exp.JSONExtractScalar), 630 "LEN": _build_with_arg_as_text(exp.Length), 631 "LEFT": _build_with_arg_as_text(exp.Left), 632 "NEWID": exp.Uuid.from_arg_list, 633 "RIGHT": _build_with_arg_as_text(exp.Right), 634 "PARSENAME": _build_parsename, 635 "REPLICATE": exp.Repeat.from_arg_list, 636 "SCHEMA_NAME": exp.CurrentSchema.from_arg_list, 637 "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)), 638 "SYSDATETIME": exp.CurrentTimestamp.from_arg_list, 639 "SUSER_NAME": exp.CurrentUser.from_arg_list, 640 "SUSER_SNAME": exp.CurrentUser.from_arg_list, 641 "SYSDATETIMEOFFSET": exp.CurrentTimestampLTZ.from_arg_list, 642 "SYSTEM_USER": exp.CurrentUser.from_arg_list, 643 "TIMEFROMPARTS": _build_timefromparts, 644 "DATETRUNC": _build_datetrunc, 645 } 646 647 JOIN_HINTS = {"LOOP", "HASH", "MERGE", "REMOTE"} 648 649 PROCEDURE_OPTIONS = dict.fromkeys( 650 ("ENCRYPTION", "RECOMPILE", "SCHEMABINDING", "NATIVE_COMPILATION", "EXECUTE"), tuple() 651 ) 652 653 COLUMN_DEFINITION_MODES = {"OUT", "OUTPUT", "READONLY"} 654 655 RETURNS_TABLE_TOKENS = parser.Parser.ID_VAR_TOKENS - { 656 TokenType.TABLE, 657 *parser.Parser.TYPE_TOKENS, 658 } 659 660 STATEMENT_PARSERS = { 661 **parser.Parser.STATEMENT_PARSERS, 662 TokenType.DECLARE: lambda self: self._parse_declare(), 663 } 664 665 RANGE_PARSERS = { 666 **parser.Parser.RANGE_PARSERS, 667 TokenType.DCOLON: lambda self, this: self.expression( 668 exp.ScopeResolution, 669 this=this, 670 expression=self._parse_function() or self._parse_var(any_token=True), 671 ), 672 } 673 674 NO_PAREN_FUNCTION_PARSERS = { 675 **parser.Parser.NO_PAREN_FUNCTION_PARSERS, 676 "NEXT": lambda self: self._parse_next_value_for(), 677 } 678 679 FUNCTION_PARSERS: t.Dict[str, t.Callable] = { 680 **parser.Parser.FUNCTION_PARSERS, 681 "JSON_ARRAYAGG": lambda self: self.expression( 682 exp.JSONArrayAgg, 683 this=self._parse_bitwise(), 684 order=self._parse_order(), 685 null_handling=self._parse_on_handling("NULL", "NULL", "ABSENT"), 686 ), 687 "DATEPART": lambda self: self._parse_datepart(), 688 } 689 690 # The DCOLON (::) operator serves as a scope resolution (exp.ScopeResolution) operator in T-SQL 691 COLUMN_OPERATORS = { 692 **parser.Parser.COLUMN_OPERATORS, 693 TokenType.DCOLON: lambda self, this, to: self.expression(exp.Cast, this=this, to=to) 694 if isinstance(to, exp.DataType) and to.this != exp.DataType.Type.USERDEFINED 695 else self.expression(exp.ScopeResolution, this=this, expression=to), 696 } 697 698 SET_OP_MODIFIERS = {"offset"} 699 700 ODBC_DATETIME_LITERALS = { 701 "d": exp.Date, 702 "t": exp.Time, 703 "ts": exp.Timestamp, 704 } 705 706 def _parse_datepart(self) -> exp.Extract: 707 this = self._parse_var() 708 expression = self._match(TokenType.COMMA) and self._parse_bitwise() 709 name = map_date_part(this, self.dialect) 710 711 return self.expression(exp.Extract, this=name, expression=expression) 712 713 def _parse_alter_table_set(self) -> exp.AlterSet: 714 return self._parse_wrapped(super()._parse_alter_table_set) 715 716 def _parse_wrapped_select(self, table: bool = False) -> t.Optional[exp.Expression]: 717 if self._match(TokenType.MERGE): 718 comments = self._prev_comments 719 merge = self._parse_merge() 720 merge.add_comments(comments, prepend=True) 721 return merge 722 723 return super()._parse_wrapped_select(table=table) 724 725 def _parse_dcolon(self) -> t.Optional[exp.Expression]: 726 # We want to use _parse_types() if the first token after :: is a known type, 727 # otherwise we could parse something like x::varchar(max) into a function 728 if self._match_set(self.TYPE_TOKENS, advance=False): 729 return self._parse_types() 730 731 return self._parse_function() or self._parse_types() 732 733 def _parse_options(self) -> t.Optional[t.List[exp.Expression]]: 734 if not self._match(TokenType.OPTION): 735 return None 736 737 def _parse_option() -> t.Optional[exp.Expression]: 738 option = self._parse_var_from_options(OPTIONS) 739 if not option: 740 return None 741 742 self._match(TokenType.EQ) 743 return self.expression( 744 exp.QueryOption, this=option, expression=self._parse_primary_or_var() 745 ) 746 747 return self._parse_wrapped_csv(_parse_option) 748 749 def _parse_xml_key_value_option(self) -> exp.XMLKeyValueOption: 750 this = self._parse_primary_or_var() 751 if self._match(TokenType.L_PAREN, advance=False): 752 expression = self._parse_wrapped(self._parse_string) 753 else: 754 expression = None 755 756 return exp.XMLKeyValueOption(this=this, expression=expression) 757 758 def _parse_for(self) -> t.Optional[t.List[exp.Expression]]: 759 if not self._match_pair(TokenType.FOR, TokenType.XML): 760 return None 761 762 def _parse_for_xml() -> t.Optional[exp.Expression]: 763 return self.expression( 764 exp.QueryOption, 765 this=self._parse_var_from_options(XML_OPTIONS, raise_unmatched=False) 766 or self._parse_xml_key_value_option(), 767 ) 768 769 return self._parse_csv(_parse_for_xml) 770 771 def _parse_projections(self) -> t.List[exp.Expression]: 772 """ 773 T-SQL supports the syntax alias = expression in the SELECT's projection list, 774 so we transform all parsed Selects to convert their EQ projections into Aliases. 775 776 See: https://learn.microsoft.com/en-us/sql/t-sql/queries/select-clause-transact-sql?view=sql-server-ver16#syntax 777 """ 778 return [ 779 ( 780 exp.alias_(projection.expression, projection.this.this, copy=False) 781 if isinstance(projection, exp.EQ) and isinstance(projection.this, exp.Column) 782 else projection 783 ) 784 for projection in super()._parse_projections() 785 ] 786 787 def _parse_commit_or_rollback(self) -> exp.Commit | exp.Rollback: 788 """Applies to SQL Server and Azure SQL Database 789 COMMIT [ { TRAN | TRANSACTION } 790 [ transaction_name | @tran_name_variable ] ] 791 [ WITH ( DELAYED_DURABILITY = { OFF | ON } ) ] 792 793 ROLLBACK { TRAN | TRANSACTION } 794 [ transaction_name | @tran_name_variable 795 | savepoint_name | @savepoint_variable ] 796 """ 797 rollback = self._prev.token_type == TokenType.ROLLBACK 798 799 self._match_texts(("TRAN", "TRANSACTION")) 800 this = self._parse_id_var() 801 802 if rollback: 803 return self.expression(exp.Rollback, this=this) 804 805 durability = None 806 if self._match_pair(TokenType.WITH, TokenType.L_PAREN): 807 self._match_text_seq("DELAYED_DURABILITY") 808 self._match(TokenType.EQ) 809 810 if self._match_text_seq("OFF"): 811 durability = False 812 else: 813 self._match(TokenType.ON) 814 durability = True 815 816 self._match_r_paren() 817 818 return self.expression(exp.Commit, this=this, durability=durability) 819 820 def _parse_transaction(self) -> exp.Transaction | exp.Command: 821 """Applies to SQL Server and Azure SQL Database 822 BEGIN { TRAN | TRANSACTION } 823 [ { transaction_name | @tran_name_variable } 824 [ WITH MARK [ 'description' ] ] 825 ] 826 """ 827 if self._match_texts(("TRAN", "TRANSACTION")): 828 transaction = self.expression(exp.Transaction, this=self._parse_id_var()) 829 if self._match_text_seq("WITH", "MARK"): 830 transaction.set("mark", self._parse_string()) 831 832 return transaction 833 834 return self._parse_as_command(self._prev) 835 836 def _parse_returns(self) -> exp.ReturnsProperty: 837 table = self._parse_id_var(any_token=False, tokens=self.RETURNS_TABLE_TOKENS) 838 returns = super()._parse_returns() 839 returns.set("table", table) 840 return returns 841 842 def _parse_convert( 843 self, strict: bool, safe: t.Optional[bool] = None 844 ) -> t.Optional[exp.Expression]: 845 this = self._parse_types() 846 self._match(TokenType.COMMA) 847 args = [this, *self._parse_csv(self._parse_assignment)] 848 convert = exp.Convert.from_arg_list(args) 849 convert.set("safe", safe) 850 return convert 851 852 def _parse_column_def( 853 self, this: t.Optional[exp.Expression], computed_column: bool = True 854 ) -> t.Optional[exp.Expression]: 855 this = super()._parse_column_def(this=this, computed_column=computed_column) 856 if not this: 857 return None 858 if self._match(TokenType.EQ): 859 this.set("default", self._parse_disjunction()) 860 if self._match_texts(self.COLUMN_DEFINITION_MODES): 861 this.set("output", self._prev.text) 862 return this 863 864 def _parse_user_defined_function( 865 self, kind: t.Optional[TokenType] = None 866 ) -> t.Optional[exp.Expression]: 867 this = super()._parse_user_defined_function(kind=kind) 868 869 if ( 870 kind == TokenType.FUNCTION 871 or isinstance(this, exp.UserDefinedFunction) 872 or self._match(TokenType.ALIAS, advance=False) 873 ): 874 return this 875 876 if not self._match(TokenType.WITH, advance=False): 877 expressions = self._parse_csv(self._parse_function_parameter) 878 else: 879 expressions = None 880 881 return self.expression(exp.UserDefinedFunction, this=this, expressions=expressions) 882 883 def _parse_into(self) -> t.Optional[exp.Into]: 884 into = super()._parse_into() 885 886 table = isinstance(into, exp.Into) and into.find(exp.Table) 887 if isinstance(table, exp.Table): 888 table_identifier = table.this 889 if table_identifier.args.get("temporary"): 890 # Promote the temporary property from the Identifier to the Into expression 891 t.cast(exp.Into, into).set("temporary", True) 892 893 return into 894 895 def _parse_id_var( 896 self, 897 any_token: bool = True, 898 tokens: t.Optional[t.Collection[TokenType]] = None, 899 ) -> t.Optional[exp.Expression]: 900 is_temporary = self._match(TokenType.HASH) 901 is_global = is_temporary and self._match(TokenType.HASH) 902 903 this = super()._parse_id_var(any_token=any_token, tokens=tokens) 904 if this: 905 if is_global: 906 this.set("global_", True) 907 elif is_temporary: 908 this.set("temporary", True) 909 910 return this 911 912 def _parse_create(self) -> exp.Create | exp.Command: 913 create = super()._parse_create() 914 915 if isinstance(create, exp.Create): 916 table = create.this.this if isinstance(create.this, exp.Schema) else create.this 917 if isinstance(table, exp.Table) and table.this and table.this.args.get("temporary"): 918 if not create.args.get("properties"): 919 create.set("properties", exp.Properties(expressions=[])) 920 921 create.args["properties"].append("expressions", exp.TemporaryProperty()) 922 923 return create 924 925 def _parse_if(self) -> t.Optional[exp.Expression]: 926 index = self._index 927 928 if self._match_text_seq("OBJECT_ID"): 929 self._parse_wrapped_csv(self._parse_string) 930 if self._match_text_seq("IS", "NOT", "NULL") and self._match(TokenType.DROP): 931 return self._parse_drop(exists=True) 932 self._retreat(index) 933 934 return super()._parse_if() 935 936 def _parse_unique(self) -> exp.UniqueColumnConstraint: 937 if self._match_texts(("CLUSTERED", "NONCLUSTERED")): 938 this = self.CONSTRAINT_PARSERS[self._prev.text.upper()](self) 939 else: 940 this = self._parse_schema(self._parse_id_var(any_token=False)) 941 942 return self.expression(exp.UniqueColumnConstraint, this=this) 943 944 def _parse_update(self) -> exp.Update: 945 expression = super()._parse_update() 946 expression.set("options", self._parse_options()) 947 return expression 948 949 def _parse_partition(self) -> t.Optional[exp.Partition]: 950 if not self._match_text_seq("WITH", "(", "PARTITIONS"): 951 return None 952 953 def parse_range(): 954 low = self._parse_bitwise() 955 high = self._parse_bitwise() if self._match_text_seq("TO") else None 956 957 return ( 958 self.expression(exp.PartitionRange, this=low, expression=high) if high else low 959 ) 960 961 partition = self.expression( 962 exp.Partition, expressions=self._parse_wrapped_csv(parse_range) 963 ) 964 965 self._match_r_paren() 966 967 return partition 968 969 def _parse_declareitem(self) -> t.Optional[exp.DeclareItem]: 970 var = self._parse_id_var() 971 if not var: 972 return None 973 974 self._match(TokenType.ALIAS) 975 return self.expression( 976 exp.DeclareItem, 977 this=var, 978 kind=self._parse_schema() if self._match(TokenType.TABLE) else self._parse_types(), 979 default=self._match(TokenType.EQ) and self._parse_bitwise(), 980 ) 981 982 def _parse_alter_table_alter(self) -> t.Optional[exp.Expression]: 983 expression = super()._parse_alter_table_alter() 984 985 if expression is not None: 986 collation = expression.args.get("collate") 987 if isinstance(collation, exp.Column) and isinstance(collation.this, exp.Identifier): 988 identifier = collation.this 989 collation.set("this", exp.Var(this=identifier.name)) 990 991 return expression 992 993 def _parse_primary_key_part(self) -> t.Optional[exp.Expression]: 994 return self._parse_ordered()
Parser consumes a list of tokens produced by the Tokenizer and produces a parsed syntax tree.
Arguments:
- error_level: The desired error level. Default: ErrorLevel.IMMEDIATE
- error_message_context: The amount of context to capture from a query string when displaying the error message (in number of characters). Default: 100
- max_errors: Maximum number of error messages to include in a raised ParseError. This is only relevant if error_level is ErrorLevel.RAISE. Default: 3
Inherited Members
- sqlglot.parser.Parser
- Parser
- STRUCT_TYPE_TOKENS
- NESTED_TYPE_TOKENS
- ENUM_TYPE_TOKENS
- AGGREGATE_TYPE_TOKENS
- TYPE_TOKENS
- SIGNED_TO_UNSIGNED_TYPE_TOKEN
- SUBQUERY_PREDICATES
- RESERVED_TOKENS
- DB_CREATABLES
- CREATABLES
- ALTERABLES
- COLON_PLACEHOLDER_TOKENS
- ARRAY_CONSTRUCTORS
- TRIM_TYPES
- FUNC_TOKENS
- CONJUNCTION
- ASSIGNMENT
- DISJUNCTION
- EQUALITY
- COMPARISON
- BITWISE
- TERM
- FACTOR
- EXPONENT
- TIMES
- TIMESTAMPS
- SET_OPERATIONS
- JOIN_METHODS
- JOIN_SIDES
- JOIN_KINDS
- LAMBDAS
- CAST_COLUMN_OPERATORS
- EXPRESSION_PARSERS
- UNARY_PARSERS
- STRING_PARSERS
- NUMERIC_PARSERS
- PRIMARY_PARSERS
- PLACEHOLDER_PARSERS
- PIPE_SYNTAX_TRANSFORM_PARSERS
- PROPERTY_PARSERS
- CONSTRAINT_PARSERS
- ALTER_PARSERS
- ALTER_ALTER_PARSERS
- SCHEMA_UNNAMED_CONSTRAINTS
- INVALID_FUNC_NAME_TOKENS
- FUNCTIONS_WITH_ALIASED_ARGS
- KEY_VALUE_DEFINITIONS
- QUERY_MODIFIER_TOKENS
- SET_PARSERS
- SHOW_PARSERS
- TYPE_LITERAL_PARSERS
- TYPE_CONVERTERS
- DDL_SELECT_TOKENS
- PRE_VOLATILE_TOKENS
- TRANSACTION_KIND
- TRANSACTION_CHARACTERISTICS
- CONFLICT_ACTIONS
- CREATE_SEQUENCE
- ISOLATED_LOADING_OPTIONS
- USABLES
- CAST_ACTIONS
- SCHEMA_BINDING_OPTIONS
- EXECUTE_AS_OPTIONS
- KEY_CONSTRAINT_OPTIONS
- WINDOW_EXCLUDE_OPTIONS
- INSERT_ALTERNATIVES
- CLONE_KEYWORDS
- HISTORICAL_DATA_PREFIX
- HISTORICAL_DATA_KIND
- OPCLASS_FOLLOW_KEYWORDS
- OPTYPE_FOLLOW_TOKENS
- TABLE_INDEX_HINT_TOKENS
- VIEW_ATTRIBUTES
- WINDOW_ALIAS_TOKENS
- WINDOW_BEFORE_PAREN_TOKENS
- WINDOW_SIDES
- JSON_KEY_VALUE_SEPARATOR_TOKENS
- FETCH_TOKENS
- ADD_CONSTRAINT_TOKENS
- DISTINCT_TOKENS
- UNNEST_OFFSET_ALIAS_TOKENS
- SELECT_START_TOKENS
- COPY_INTO_VARLEN_OPTIONS
- IS_JSON_PREDICATE_KIND
- ON_CONDITION_TOKENS
- PRIVILEGE_FOLLOW_TOKENS
- DESCRIBE_STYLES
- SET_ASSIGNMENT_DELIMITERS
- ANALYZE_STYLES
- ANALYZE_EXPRESSION_PARSERS
- PARTITION_KEYWORDS
- AMBIGUOUS_ALIAS_TOKENS
- OPERATION_MODIFIERS
- RECURSIVE_CTE_SEARCH_KIND
- MODIFIABLES
- STRICT_CAST
- PREFIXED_PIVOT_COLUMNS
- IDENTIFY_PIVOT_STRINGS
- TABLESAMPLE_CSV
- DEFAULT_SAMPLING_METHOD
- TRIM_PATTERN_FIRST
- MODIFIERS_ATTACHED_TO_SET_OP
- JSON_ARROWS_REQUIRE_JSON_TYPE
- COLON_IS_VARIANT_EXTRACT
- VALUES_FOLLOWED_BY_PAREN
- SUPPORTS_IMPLICIT_UNNEST
- INTERVAL_SPANS
- SUPPORTS_PARTITION_SELECTION
- WRAPPED_TRANSFORM_COLUMN_CONSTRAINT
- OPTIONAL_ALIAS_TOKEN_CTE
- ALTER_RENAME_REQUIRES_COLUMN
- ALTER_TABLE_PARTITIONS
- JOINS_HAVE_EQUAL_PRECEDENCE
- ZONE_AWARE_TIMESTAMP_CONSTRUCTOR
- MAP_KEYS_ARE_ARBITRARY_EXPRESSIONS
- JSON_EXTRACT_REQUIRES_JSON_EXPRESSION
- ADD_JOIN_ON_TRUE
- SUPPORTS_OMITTED_INTERVAL_SPAN_UNIT
- error_level
- error_message_context
- max_errors
- dialect
- reset
- parse
- parse_into
- check_errors
- raise_error
- expression
- validate_expression
- parse_set_operation
- build_cast
- errors
- sql
996 class Generator(generator.Generator): 997 LIMIT_IS_TOP = True 998 QUERY_HINTS = False 999 RETURNING_END = False 1000 NVL2_SUPPORTED = False 1001 ALTER_TABLE_INCLUDE_COLUMN_KEYWORD = False 1002 LIMIT_FETCH = "FETCH" 1003 COMPUTED_COLUMN_WITH_TYPE = False 1004 CTE_RECURSIVE_KEYWORD_REQUIRED = False 1005 ENSURE_BOOLS = True 1006 NULL_ORDERING_SUPPORTED = None 1007 SUPPORTS_SINGLE_ARG_CONCAT = False 1008 TABLESAMPLE_SEED_KEYWORD = "REPEATABLE" 1009 SUPPORTS_SELECT_INTO = True 1010 JSON_PATH_BRACKETED_KEY_SUPPORTED = False 1011 SUPPORTS_TO_NUMBER = False 1012 SET_OP_MODIFIERS = False 1013 COPY_PARAMS_EQ_REQUIRED = True 1014 PARSE_JSON_NAME = None 1015 EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE = False 1016 ALTER_SET_WRAPPED = True 1017 ALTER_SET_TYPE = "" 1018 1019 EXPRESSIONS_WITHOUT_NESTED_CTES = { 1020 exp.Create, 1021 exp.Delete, 1022 exp.Insert, 1023 exp.Intersect, 1024 exp.Except, 1025 exp.Merge, 1026 exp.Select, 1027 exp.Subquery, 1028 exp.Union, 1029 exp.Update, 1030 } 1031 1032 SUPPORTED_JSON_PATH_PARTS = { 1033 exp.JSONPathKey, 1034 exp.JSONPathRoot, 1035 exp.JSONPathSubscript, 1036 } 1037 1038 TYPE_MAPPING = { 1039 **generator.Generator.TYPE_MAPPING, 1040 exp.DataType.Type.BOOLEAN: "BIT", 1041 exp.DataType.Type.DATETIME2: "DATETIME2", 1042 exp.DataType.Type.DECIMAL: "NUMERIC", 1043 exp.DataType.Type.DOUBLE: "FLOAT", 1044 exp.DataType.Type.INT: "INTEGER", 1045 exp.DataType.Type.ROWVERSION: "ROWVERSION", 1046 exp.DataType.Type.TEXT: "VARCHAR(MAX)", 1047 exp.DataType.Type.TIMESTAMP: "DATETIME2", 1048 exp.DataType.Type.TIMESTAMPNTZ: "DATETIME2", 1049 exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET", 1050 exp.DataType.Type.SMALLDATETIME: "SMALLDATETIME", 1051 exp.DataType.Type.UTINYINT: "TINYINT", 1052 exp.DataType.Type.VARIANT: "SQL_VARIANT", 1053 exp.DataType.Type.UUID: "UNIQUEIDENTIFIER", 1054 } 1055 1056 TYPE_MAPPING.pop(exp.DataType.Type.NCHAR) 1057 TYPE_MAPPING.pop(exp.DataType.Type.NVARCHAR) 1058 1059 TRANSFORMS = { 1060 **generator.Generator.TRANSFORMS, 1061 exp.AnyValue: any_value_to_max_sql, 1062 exp.ArrayToString: rename_func("STRING_AGG"), 1063 exp.AutoIncrementColumnConstraint: lambda *_: "IDENTITY", 1064 exp.Ceil: rename_func("CEILING"), 1065 exp.Chr: rename_func("CHAR"), 1066 exp.DateAdd: date_delta_sql("DATEADD"), 1067 exp.CTE: transforms.preprocess([qualify_derived_table_outputs]), 1068 exp.CurrentDate: rename_func("GETDATE"), 1069 exp.CurrentTimestamp: rename_func("GETDATE"), 1070 exp.CurrentTimestampLTZ: rename_func("SYSDATETIMEOFFSET"), 1071 exp.DateStrToDate: datestrtodate_sql, 1072 exp.GeneratedAsIdentityColumnConstraint: generatedasidentitycolumnconstraint_sql, 1073 exp.GroupConcat: _string_agg_sql, 1074 exp.If: rename_func("IIF"), 1075 exp.JSONExtract: _json_extract_sql, 1076 exp.JSONExtractScalar: _json_extract_sql, 1077 exp.LastDay: lambda self, e: self.func("EOMONTH", e.this), 1078 exp.Ln: rename_func("LOG"), 1079 exp.Max: max_or_greatest, 1080 exp.MD5: lambda self, e: self.func("HASHBYTES", exp.Literal.string("MD5"), e.this), 1081 exp.Min: min_or_least, 1082 exp.NumberToStr: _format_sql, 1083 exp.Repeat: rename_func("REPLICATE"), 1084 exp.CurrentSchema: rename_func("SCHEMA_NAME"), 1085 exp.Select: transforms.preprocess( 1086 [ 1087 transforms.eliminate_distinct_on, 1088 transforms.eliminate_semi_and_anti_joins, 1089 transforms.eliminate_qualify, 1090 transforms.unnest_generate_date_array_using_recursive_cte, 1091 ] 1092 ), 1093 exp.Stddev: rename_func("STDEV"), 1094 exp.StrPosition: lambda self, e: strposition_sql( 1095 self, e, func_name="CHARINDEX", supports_position=True 1096 ), 1097 exp.Subquery: transforms.preprocess([qualify_derived_table_outputs]), 1098 exp.SHA: lambda self, e: self.func("HASHBYTES", exp.Literal.string("SHA1"), e.this), 1099 exp.SHA1Digest: lambda self, e: self.func( 1100 "HASHBYTES", exp.Literal.string("SHA1"), e.this 1101 ), 1102 exp.SHA2: lambda self, e: self.func( 1103 "HASHBYTES", exp.Literal.string(f"SHA2_{e.args.get('length', 256)}"), e.this 1104 ), 1105 exp.TemporaryProperty: lambda self, e: "", 1106 exp.TimeStrToTime: _timestrtotime_sql, 1107 exp.TimeToStr: _format_sql, 1108 exp.Trim: trim_sql, 1109 exp.TsOrDsAdd: date_delta_sql("DATEADD", cast=True), 1110 exp.TsOrDsDiff: date_delta_sql("DATEDIFF"), 1111 exp.TimestampTrunc: lambda self, e: self.func("DATETRUNC", e.unit, e.this), 1112 exp.Uuid: lambda *_: "NEWID()", 1113 exp.DateFromParts: rename_func("DATEFROMPARTS"), 1114 } 1115 1116 TRANSFORMS.pop(exp.ReturnsProperty) 1117 1118 PROPERTIES_LOCATION = { 1119 **generator.Generator.PROPERTIES_LOCATION, 1120 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 1121 } 1122 1123 def scope_resolution(self, rhs: str, scope_name: str) -> str: 1124 return f"{scope_name}::{rhs}" 1125 1126 def select_sql(self, expression: exp.Select) -> str: 1127 limit = expression.args.get("limit") 1128 offset = expression.args.get("offset") 1129 1130 if isinstance(limit, exp.Fetch) and not offset: 1131 # Dialects like Oracle can FETCH directly from a row set but 1132 # T-SQL requires an ORDER BY + OFFSET clause in order to FETCH 1133 offset = exp.Offset(expression=exp.Literal.number(0)) 1134 expression.set("offset", offset) 1135 1136 if offset: 1137 if not expression.args.get("order"): 1138 # ORDER BY is required in order to use OFFSET in a query, so we use 1139 # a noop order by, since we don't really care about the order. 1140 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 1141 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 1142 1143 if isinstance(limit, exp.Limit): 1144 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 1145 # we replace here because otherwise TOP would be generated in select_sql 1146 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 1147 1148 return super().select_sql(expression) 1149 1150 def convert_sql(self, expression: exp.Convert) -> str: 1151 name = "TRY_CONVERT" if expression.args.get("safe") else "CONVERT" 1152 return self.func( 1153 name, expression.this, expression.expression, expression.args.get("style") 1154 ) 1155 1156 def queryoption_sql(self, expression: exp.QueryOption) -> str: 1157 option = self.sql(expression, "this") 1158 value = self.sql(expression, "expression") 1159 if value: 1160 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 1161 return f"{option} {optional_equal_sign}{value}" 1162 return option 1163 1164 def lateral_op(self, expression: exp.Lateral) -> str: 1165 cross_apply = expression.args.get("cross_apply") 1166 if cross_apply is True: 1167 return "CROSS APPLY" 1168 if cross_apply is False: 1169 return "OUTER APPLY" 1170 1171 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 1172 self.unsupported("LATERAL clause is not supported.") 1173 return "LATERAL" 1174 1175 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 1176 this = expression.this 1177 split_count = len(this.name.split(".")) 1178 delimiter = expression.args.get("delimiter") 1179 part_index = expression.args.get("part_index") 1180 1181 if ( 1182 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 1183 or (delimiter and delimiter.name != ".") 1184 or not part_index 1185 or split_count > 4 1186 ): 1187 self.unsupported( 1188 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 1189 ) 1190 return "" 1191 1192 return self.func( 1193 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 1194 ) 1195 1196 def extract_sql(self, expression: exp.Extract) -> str: 1197 part = expression.this 1198 name = DATE_PART_UNMAPPING.get(part.name.upper()) or part 1199 1200 return self.func("DATEPART", name, expression.expression) 1201 1202 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1203 nano = expression.args.get("nano") 1204 if nano is not None: 1205 nano.pop() 1206 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1207 1208 if expression.args.get("fractions") is None: 1209 expression.set("fractions", exp.Literal.number(0)) 1210 if expression.args.get("precision") is None: 1211 expression.set("precision", exp.Literal.number(0)) 1212 1213 return rename_func("TIMEFROMPARTS")(self, expression) 1214 1215 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1216 zone = expression.args.get("zone") 1217 if zone is not None: 1218 zone.pop() 1219 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1220 1221 nano = expression.args.get("nano") 1222 if nano is not None: 1223 nano.pop() 1224 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1225 1226 if expression.args.get("milli") is None: 1227 expression.set("milli", exp.Literal.number(0)) 1228 1229 return rename_func("DATETIMEFROMPARTS")(self, expression) 1230 1231 def setitem_sql(self, expression: exp.SetItem) -> str: 1232 this = expression.this 1233 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1234 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1235 return f"{self.sql(this.left)} {self.sql(this.right)}" 1236 1237 return super().setitem_sql(expression) 1238 1239 def boolean_sql(self, expression: exp.Boolean) -> str: 1240 if type(expression.parent) in BIT_TYPES or isinstance( 1241 expression.find_ancestor(exp.Values, exp.Select), exp.Values 1242 ): 1243 return "1" if expression.this else "0" 1244 1245 return "(1 = 1)" if expression.this else "(1 = 0)" 1246 1247 def is_sql(self, expression: exp.Is) -> str: 1248 if isinstance(expression.expression, exp.Boolean): 1249 return self.binary(expression, "=") 1250 return self.binary(expression, "IS") 1251 1252 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1253 sql = self.sql(expression, "this") 1254 properties = expression.args.get("properties") 1255 1256 if sql[:1] != "#" and any( 1257 isinstance(prop, exp.TemporaryProperty) 1258 for prop in (properties.expressions if properties else []) 1259 ): 1260 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1261 1262 return sql 1263 1264 def create_sql(self, expression: exp.Create) -> str: 1265 kind = expression.kind 1266 exists = expression.args.get("exists") 1267 expression.set("exists", None) 1268 1269 like_property = expression.find(exp.LikeProperty) 1270 if like_property: 1271 ctas_expression = like_property.this 1272 else: 1273 ctas_expression = expression.expression 1274 1275 if kind == "VIEW": 1276 expression.this.set("catalog", None) 1277 with_ = expression.args.get("with_") 1278 if ctas_expression and with_: 1279 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1280 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1281 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1282 ctas_expression.set("with_", with_.pop()) 1283 1284 table = expression.find(exp.Table) 1285 1286 # Convert CTAS statement to SELECT .. INTO .. 1287 if kind == "TABLE" and ctas_expression: 1288 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1289 ctas_expression = ctas_expression.subquery() 1290 1291 properties = expression.args.get("properties") or exp.Properties() 1292 is_temp = any(isinstance(p, exp.TemporaryProperty) for p in properties.expressions) 1293 1294 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1295 select_into.set("into", exp.Into(this=table, temporary=is_temp)) 1296 1297 if like_property: 1298 select_into.limit(0, copy=False) 1299 1300 sql = self.sql(select_into) 1301 else: 1302 sql = super().create_sql(expression) 1303 1304 if exists: 1305 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1306 sql_with_ctes = self.prepend_ctes(expression, sql) 1307 sql_literal = self.sql(exp.Literal.string(sql_with_ctes)) 1308 if kind == "SCHEMA": 1309 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = {identifier}) EXEC({sql_literal})""" 1310 elif kind == "TABLE": 1311 assert table 1312 where = exp.and_( 1313 exp.column("TABLE_NAME").eq(table.name), 1314 exp.column("TABLE_SCHEMA").eq(table.db) if table.db else None, 1315 exp.column("TABLE_CATALOG").eq(table.catalog) if table.catalog else None, 1316 ) 1317 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE {where}) EXEC({sql_literal})""" 1318 elif kind == "INDEX": 1319 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1320 return f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql_literal})""" 1321 elif expression.args.get("replace"): 1322 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1323 1324 return self.prepend_ctes(expression, sql) 1325 1326 @generator.unsupported_args("unlogged", "expressions") 1327 def into_sql(self, expression: exp.Into) -> str: 1328 if expression.args.get("temporary"): 1329 # If the Into expression has a temporary property, push this down to the Identifier 1330 table = expression.find(exp.Table) 1331 if table and isinstance(table.this, exp.Identifier): 1332 table.this.set("temporary", True) 1333 1334 return f"{self.seg('INTO')} {self.sql(expression, 'this')}" 1335 1336 def count_sql(self, expression: exp.Count) -> str: 1337 func_name = "COUNT_BIG" if expression.args.get("big_int") else "COUNT" 1338 return rename_func(func_name)(self, expression) 1339 1340 def datediff_sql(self, expression: exp.DateDiff) -> str: 1341 func_name = "DATEDIFF_BIG" if expression.args.get("big_int") else "DATEDIFF" 1342 return date_delta_sql(func_name)(self, expression) 1343 1344 def offset_sql(self, expression: exp.Offset) -> str: 1345 return f"{super().offset_sql(expression)} ROWS" 1346 1347 def version_sql(self, expression: exp.Version) -> str: 1348 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1349 this = f"FOR {name}" 1350 expr = expression.expression 1351 kind = expression.text("kind") 1352 if kind in ("FROM", "BETWEEN"): 1353 args = expr.expressions 1354 sep = "TO" if kind == "FROM" else "AND" 1355 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1356 else: 1357 expr_sql = self.sql(expr) 1358 1359 expr_sql = f" {expr_sql}" if expr_sql else "" 1360 return f"{this} {kind}{expr_sql}" 1361 1362 def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str: 1363 table = expression.args.get("table") 1364 table = f"{table} " if table else "" 1365 return f"RETURNS {table}{self.sql(expression, 'this')}" 1366 1367 def returning_sql(self, expression: exp.Returning) -> str: 1368 into = self.sql(expression, "into") 1369 into = self.seg(f"INTO {into}") if into else "" 1370 return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}" 1371 1372 def transaction_sql(self, expression: exp.Transaction) -> str: 1373 this = self.sql(expression, "this") 1374 this = f" {this}" if this else "" 1375 mark = self.sql(expression, "mark") 1376 mark = f" WITH MARK {mark}" if mark else "" 1377 return f"BEGIN TRANSACTION{this}{mark}" 1378 1379 def commit_sql(self, expression: exp.Commit) -> str: 1380 this = self.sql(expression, "this") 1381 this = f" {this}" if this else "" 1382 durability = expression.args.get("durability") 1383 durability = ( 1384 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1385 if durability is not None 1386 else "" 1387 ) 1388 return f"COMMIT TRANSACTION{this}{durability}" 1389 1390 def rollback_sql(self, expression: exp.Rollback) -> str: 1391 this = self.sql(expression, "this") 1392 this = f" {this}" if this else "" 1393 return f"ROLLBACK TRANSACTION{this}" 1394 1395 def identifier_sql(self, expression: exp.Identifier) -> str: 1396 identifier = super().identifier_sql(expression) 1397 1398 if expression.args.get("global_"): 1399 identifier = f"##{identifier}" 1400 elif expression.args.get("temporary"): 1401 identifier = f"#{identifier}" 1402 1403 return identifier 1404 1405 def constraint_sql(self, expression: exp.Constraint) -> str: 1406 this = self.sql(expression, "this") 1407 expressions = self.expressions(expression, flat=True, sep=" ") 1408 return f"CONSTRAINT {this} {expressions}" 1409 1410 def length_sql(self, expression: exp.Length) -> str: 1411 return self._uncast_text(expression, "LEN") 1412 1413 def right_sql(self, expression: exp.Right) -> str: 1414 return self._uncast_text(expression, "RIGHT") 1415 1416 def left_sql(self, expression: exp.Left) -> str: 1417 return self._uncast_text(expression, "LEFT") 1418 1419 def _uncast_text(self, expression: exp.Expression, name: str) -> str: 1420 this = expression.this 1421 if isinstance(this, exp.Cast) and this.is_type(exp.DataType.Type.TEXT): 1422 this_sql = self.sql(this, "this") 1423 else: 1424 this_sql = self.sql(this) 1425 expression_sql = self.sql(expression, "expression") 1426 return self.func(name, this_sql, expression_sql if expression_sql else None) 1427 1428 def partition_sql(self, expression: exp.Partition) -> str: 1429 return f"WITH (PARTITIONS({self.expressions(expression, flat=True)}))" 1430 1431 def alter_sql(self, expression: exp.Alter) -> str: 1432 action = seq_get(expression.args.get("actions") or [], 0) 1433 if isinstance(action, exp.AlterRename): 1434 return f"EXEC sp_rename '{self.sql(expression.this)}', '{action.this.name}'" 1435 return super().alter_sql(expression) 1436 1437 def drop_sql(self, expression: exp.Drop) -> str: 1438 if expression.args["kind"] == "VIEW": 1439 expression.this.set("catalog", None) 1440 return super().drop_sql(expression) 1441 1442 def options_modifier(self, expression: exp.Expression) -> str: 1443 options = self.expressions(expression, key="options") 1444 return f" OPTION{self.wrap(options)}" if options else "" 1445 1446 def dpipe_sql(self, expression: exp.DPipe) -> str: 1447 return self.sql( 1448 reduce(lambda x, y: exp.Add(this=x, expression=y), expression.flatten()) 1449 ) 1450 1451 def isascii_sql(self, expression: exp.IsAscii) -> str: 1452 return f"(PATINDEX(CONVERT(VARCHAR(MAX), 0x255b5e002d7f5d25) COLLATE Latin1_General_BIN, {self.sql(expression.this)}) = 0)" 1453 1454 def columndef_sql(self, expression: exp.ColumnDef, sep: str = " ") -> str: 1455 this = super().columndef_sql(expression, sep) 1456 default = self.sql(expression, "default") 1457 default = f" = {default}" if default else "" 1458 output = self.sql(expression, "output") 1459 output = f" {output}" if output else "" 1460 return f"{this}{default}{output}" 1461 1462 def coalesce_sql(self, expression: exp.Coalesce) -> str: 1463 func_name = "ISNULL" if expression.args.get("is_null") else "COALESCE" 1464 return rename_func(func_name)(self, expression)
Generator converts a given syntax tree to the corresponding SQL string.
Arguments:
- pretty: Whether to format the produced SQL string. Default: False.
- identify: Determines when an identifier should be quoted. Possible values are: False (default): Never quote, except in cases where it's mandatory by the dialect. True: Always quote except for specials cases. 'safe': Only quote identifiers that are case insensitive.
- normalize: Whether to normalize identifiers to lowercase. Default: False.
- pad: The pad size in a formatted string. For example, this affects the indentation of a projection in a query, relative to its nesting level. Default: 2.
- indent: The indentation size in a formatted string. For example, this affects the
indentation of subqueries and filters under a
WHEREclause. Default: 2. - normalize_functions: How to normalize function names. Possible values are: "upper" or True (default): Convert names to uppercase. "lower": Convert names to lowercase. False: Disables function name normalization.
- unsupported_level: Determines the generator's behavior when it encounters unsupported expressions. Default ErrorLevel.WARN.
- max_unsupported: Maximum number of unsupported messages to include in a raised UnsupportedError. This is only relevant if unsupported_level is ErrorLevel.RAISE. Default: 3
- leading_comma: Whether the comma is leading or trailing in select expressions. This is only relevant when generating in pretty mode. Default: False
- max_text_width: The max number of characters in a segment before creating new lines in pretty mode. The default is on the smaller end because the length only represents a segment and not the true line length. Default: 80
- comments: Whether to preserve comments in the output SQL code. Default: True
1126 def select_sql(self, expression: exp.Select) -> str: 1127 limit = expression.args.get("limit") 1128 offset = expression.args.get("offset") 1129 1130 if isinstance(limit, exp.Fetch) and not offset: 1131 # Dialects like Oracle can FETCH directly from a row set but 1132 # T-SQL requires an ORDER BY + OFFSET clause in order to FETCH 1133 offset = exp.Offset(expression=exp.Literal.number(0)) 1134 expression.set("offset", offset) 1135 1136 if offset: 1137 if not expression.args.get("order"): 1138 # ORDER BY is required in order to use OFFSET in a query, so we use 1139 # a noop order by, since we don't really care about the order. 1140 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 1141 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 1142 1143 if isinstance(limit, exp.Limit): 1144 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 1145 # we replace here because otherwise TOP would be generated in select_sql 1146 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 1147 1148 return super().select_sql(expression)
1156 def queryoption_sql(self, expression: exp.QueryOption) -> str: 1157 option = self.sql(expression, "this") 1158 value = self.sql(expression, "expression") 1159 if value: 1160 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 1161 return f"{option} {optional_equal_sign}{value}" 1162 return option
1164 def lateral_op(self, expression: exp.Lateral) -> str: 1165 cross_apply = expression.args.get("cross_apply") 1166 if cross_apply is True: 1167 return "CROSS APPLY" 1168 if cross_apply is False: 1169 return "OUTER APPLY" 1170 1171 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 1172 self.unsupported("LATERAL clause is not supported.") 1173 return "LATERAL"
1175 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 1176 this = expression.this 1177 split_count = len(this.name.split(".")) 1178 delimiter = expression.args.get("delimiter") 1179 part_index = expression.args.get("part_index") 1180 1181 if ( 1182 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 1183 or (delimiter and delimiter.name != ".") 1184 or not part_index 1185 or split_count > 4 1186 ): 1187 self.unsupported( 1188 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 1189 ) 1190 return "" 1191 1192 return self.func( 1193 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 1194 )
1202 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1203 nano = expression.args.get("nano") 1204 if nano is not None: 1205 nano.pop() 1206 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1207 1208 if expression.args.get("fractions") is None: 1209 expression.set("fractions", exp.Literal.number(0)) 1210 if expression.args.get("precision") is None: 1211 expression.set("precision", exp.Literal.number(0)) 1212 1213 return rename_func("TIMEFROMPARTS")(self, expression)
1215 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1216 zone = expression.args.get("zone") 1217 if zone is not None: 1218 zone.pop() 1219 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1220 1221 nano = expression.args.get("nano") 1222 if nano is not None: 1223 nano.pop() 1224 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1225 1226 if expression.args.get("milli") is None: 1227 expression.set("milli", exp.Literal.number(0)) 1228 1229 return rename_func("DATETIMEFROMPARTS")(self, expression)
1231 def setitem_sql(self, expression: exp.SetItem) -> str: 1232 this = expression.this 1233 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1234 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1235 return f"{self.sql(this.left)} {self.sql(this.right)}" 1236 1237 return super().setitem_sql(expression)
1252 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1253 sql = self.sql(expression, "this") 1254 properties = expression.args.get("properties") 1255 1256 if sql[:1] != "#" and any( 1257 isinstance(prop, exp.TemporaryProperty) 1258 for prop in (properties.expressions if properties else []) 1259 ): 1260 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1261 1262 return sql
1264 def create_sql(self, expression: exp.Create) -> str: 1265 kind = expression.kind 1266 exists = expression.args.get("exists") 1267 expression.set("exists", None) 1268 1269 like_property = expression.find(exp.LikeProperty) 1270 if like_property: 1271 ctas_expression = like_property.this 1272 else: 1273 ctas_expression = expression.expression 1274 1275 if kind == "VIEW": 1276 expression.this.set("catalog", None) 1277 with_ = expression.args.get("with_") 1278 if ctas_expression and with_: 1279 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1280 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1281 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1282 ctas_expression.set("with_", with_.pop()) 1283 1284 table = expression.find(exp.Table) 1285 1286 # Convert CTAS statement to SELECT .. INTO .. 1287 if kind == "TABLE" and ctas_expression: 1288 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1289 ctas_expression = ctas_expression.subquery() 1290 1291 properties = expression.args.get("properties") or exp.Properties() 1292 is_temp = any(isinstance(p, exp.TemporaryProperty) for p in properties.expressions) 1293 1294 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1295 select_into.set("into", exp.Into(this=table, temporary=is_temp)) 1296 1297 if like_property: 1298 select_into.limit(0, copy=False) 1299 1300 sql = self.sql(select_into) 1301 else: 1302 sql = super().create_sql(expression) 1303 1304 if exists: 1305 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1306 sql_with_ctes = self.prepend_ctes(expression, sql) 1307 sql_literal = self.sql(exp.Literal.string(sql_with_ctes)) 1308 if kind == "SCHEMA": 1309 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = {identifier}) EXEC({sql_literal})""" 1310 elif kind == "TABLE": 1311 assert table 1312 where = exp.and_( 1313 exp.column("TABLE_NAME").eq(table.name), 1314 exp.column("TABLE_SCHEMA").eq(table.db) if table.db else None, 1315 exp.column("TABLE_CATALOG").eq(table.catalog) if table.catalog else None, 1316 ) 1317 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE {where}) EXEC({sql_literal})""" 1318 elif kind == "INDEX": 1319 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1320 return f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql_literal})""" 1321 elif expression.args.get("replace"): 1322 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1323 1324 return self.prepend_ctes(expression, sql)
1326 @generator.unsupported_args("unlogged", "expressions") 1327 def into_sql(self, expression: exp.Into) -> str: 1328 if expression.args.get("temporary"): 1329 # If the Into expression has a temporary property, push this down to the Identifier 1330 table = expression.find(exp.Table) 1331 if table and isinstance(table.this, exp.Identifier): 1332 table.this.set("temporary", True) 1333 1334 return f"{self.seg('INTO')} {self.sql(expression, 'this')}"
1347 def version_sql(self, expression: exp.Version) -> str: 1348 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1349 this = f"FOR {name}" 1350 expr = expression.expression 1351 kind = expression.text("kind") 1352 if kind in ("FROM", "BETWEEN"): 1353 args = expr.expressions 1354 sep = "TO" if kind == "FROM" else "AND" 1355 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1356 else: 1357 expr_sql = self.sql(expr) 1358 1359 expr_sql = f" {expr_sql}" if expr_sql else "" 1360 return f"{this} {kind}{expr_sql}"
1379 def commit_sql(self, expression: exp.Commit) -> str: 1380 this = self.sql(expression, "this") 1381 this = f" {this}" if this else "" 1382 durability = expression.args.get("durability") 1383 durability = ( 1384 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1385 if durability is not None 1386 else "" 1387 ) 1388 return f"COMMIT TRANSACTION{this}{durability}"
1395 def identifier_sql(self, expression: exp.Identifier) -> str: 1396 identifier = super().identifier_sql(expression) 1397 1398 if expression.args.get("global_"): 1399 identifier = f"##{identifier}" 1400 elif expression.args.get("temporary"): 1401 identifier = f"#{identifier}" 1402 1403 return identifier
1454 def columndef_sql(self, expression: exp.ColumnDef, sep: str = " ") -> str: 1455 this = super().columndef_sql(expression, sep) 1456 default = self.sql(expression, "default") 1457 default = f" = {default}" if default else "" 1458 output = self.sql(expression, "output") 1459 output = f" {output}" if output else "" 1460 return f"{this}{default}{output}"
Inherited Members
- sqlglot.generator.Generator
- Generator
- IGNORE_NULLS_IN_FUNC
- LOCKING_READS_SUPPORTED
- WRAP_DERIVED_VALUES
- CREATE_FUNCTION_RETURN_AS
- MATCHED_BY_SOURCE
- SINGLE_STRING_INTERVAL
- INTERVAL_ALLOWS_PLURAL_FORM
- LIMIT_ONLY_LITERALS
- RENAME_TABLE_WITH_DB
- GROUPINGS_SEP
- INDEX_ON
- JOIN_HINTS
- TABLE_HINTS
- QUERY_HINT_SEP
- IS_BOOL_ALLOWED
- DUPLICATE_KEY_UPDATE_WITH_SET
- EXTRACT_ALLOWS_QUOTES
- TZ_TO_WITH_TIME_ZONE
- VALUES_AS_TABLE
- UNNEST_WITH_ORDINALITY
- AGGREGATE_FILTER_SUPPORTED
- SEMI_ANTI_JOIN_WITH_SIDE
- SUPPORTS_TABLE_COPY
- TABLESAMPLE_REQUIRES_PARENS
- TABLESAMPLE_SIZE_IS_ROWS
- TABLESAMPLE_KEYWORDS
- TABLESAMPLE_WITH_METHOD
- COLLATE_IS_FUNC
- DATA_TYPE_SPECIFIERS_ALLOWED
- LAST_DAY_SUPPORTS_DATE_PART
- SUPPORTS_TABLE_ALIAS_COLUMNS
- UNPIVOT_ALIASES_ARE_IDENTIFIERS
- JSON_KEY_VALUE_PAIR_SEP
- INSERT_OVERWRITE
- SUPPORTS_UNLOGGED_TABLES
- SUPPORTS_CREATE_TABLE_LIKE
- LIKE_PROPERTY_INSIDE_SCHEMA
- MULTI_ARG_DISTINCT
- JSON_TYPE_REQUIRED_FOR_EXTRACTION
- JSON_PATH_SINGLE_QUOTE_ESCAPE
- CAN_IMPLEMENT_ARRAY_ANY
- SUPPORTS_WINDOW_EXCLUDE
- COPY_PARAMS_ARE_WRAPPED
- COPY_HAS_INTO_KEYWORD
- UNICODE_SUBSTITUTE
- STAR_EXCEPT
- HEX_FUNC
- WITH_PROPERTIES_PREFIX
- QUOTE_JSON_PATH
- PAD_FILL_PATTERN_IS_REQUIRED
- SUPPORTS_EXPLODING_PROJECTIONS
- ARRAY_CONCAT_IS_VAR_LEN
- SUPPORTS_CONVERT_TIMEZONE
- SUPPORTS_MEDIAN
- SUPPORTS_UNIX_SECONDS
- NORMALIZE_EXTRACT_DATE_PARTS
- ARRAY_SIZE_NAME
- ARRAY_SIZE_DIM_REQUIRED
- SUPPORTS_BETWEEN_FLAGS
- SUPPORTS_LIKE_QUANTIFIERS
- MATCH_AGAINST_TABLE_PREFIX
- SET_ASSIGNMENT_REQUIRES_VARIABLE_KEYWORD
- UPDATE_STATEMENT_SUPPORTS_FROM
- UNSUPPORTED_TYPES
- TIME_PART_SINGULARS
- TOKEN_MAPPING
- STRUCT_DELIMITER
- PARAMETER_TOKEN
- NAMED_PLACEHOLDER_TOKEN
- EXPRESSION_PRECEDES_PROPERTIES_CREATABLES
- RESERVED_KEYWORDS
- WITH_SEPARATED_COMMENTS
- EXCLUDE_COMMENTS
- UNWRAPPED_INTERVAL_VALUES
- PARAMETERIZABLE_TEXT_TYPES
- RESPECT_IGNORE_NULLS_UNSUPPORTED_EXPRESSIONS
- SAFE_JSON_PATH_KEY_RE
- SENTINEL_LINE_BREAK
- pretty
- identify
- normalize
- pad
- unsupported_level
- max_unsupported
- leading_comma
- max_text_width
- comments
- dialect
- normalize_functions
- unsupported_messages
- generate
- preprocess
- unsupported
- sep
- seg
- sanitize_comment
- maybe_comment
- wrap
- no_identify
- normalize_func
- indent
- sql
- uncache_sql
- cache_sql
- characterset_sql
- column_parts
- column_sql
- pseudocolumn_sql
- columnposition_sql
- columnconstraint_sql
- computedcolumnconstraint_sql
- autoincrementcolumnconstraint_sql
- compresscolumnconstraint_sql
- generatedasidentitycolumnconstraint_sql
- generatedasrowcolumnconstraint_sql
- periodforsystemtimeconstraint_sql
- notnullcolumnconstraint_sql
- primarykeycolumnconstraint_sql
- uniquecolumnconstraint_sql
- inoutcolumnconstraint_sql
- sequenceproperties_sql
- clone_sql
- describe_sql
- heredoc_sql
- prepend_ctes
- with_sql
- cte_sql
- tablealias_sql
- bitstring_sql
- hexstring_sql
- bytestring_sql
- unicodestring_sql
- rawstring_sql
- datatypeparam_sql
- datatype_sql
- directory_sql
- delete_sql
- set_operation
- set_operations
- fetch_sql
- limitoptions_sql
- filter_sql
- hint_sql
- indexparameters_sql
- index_sql
- hex_sql
- lowerhex_sql
- inputoutputformat_sql
- national_sql
- properties_sql
- root_properties
- properties
- with_properties
- locate_properties
- property_name
- property_sql
- likeproperty_sql
- fallbackproperty_sql
- journalproperty_sql
- freespaceproperty_sql
- checksumproperty_sql
- mergeblockratioproperty_sql
- datablocksizeproperty_sql
- blockcompressionproperty_sql
- isolatedloadingproperty_sql
- partitionboundspec_sql
- partitionedofproperty_sql
- lockingproperty_sql
- withdataproperty_sql
- withsystemversioningproperty_sql
- insert_sql
- introducer_sql
- kill_sql
- pseudotype_sql
- objectidentifier_sql
- onconflict_sql
- rowformatdelimitedproperty_sql
- withtablehint_sql
- indextablehint_sql
- historicaldata_sql
- table_parts
- table_sql
- tablefromrows_sql
- tablesample_sql
- pivot_sql
- tuple_sql
- update_sql
- values_sql
- var_sql
- from_sql
- groupingsets_sql
- rollup_sql
- cube_sql
- group_sql
- having_sql
- connect_sql
- prior_sql
- join_sql
- lambda_sql
- lateral_sql
- limit_sql
- set_sql
- queryband_sql
- pragma_sql
- lock_sql
- literal_sql
- escape_str
- loaddata_sql
- null_sql
- booland_sql
- boolor_sql
- order_sql
- withfill_sql
- cluster_sql
- distribute_sql
- sort_sql
- ordered_sql
- matchrecognizemeasure_sql
- matchrecognize_sql
- query_modifiers
- for_modifiers
- offset_limit_modifiers
- after_limit_modifiers
- schema_sql
- schema_columns_sql
- star_sql
- parameter_sql
- sessionparameter_sql
- placeholder_sql
- subquery_sql
- qualify_sql
- unnest_sql
- prewhere_sql
- where_sql
- window_sql
- partition_by_sql
- windowspec_sql
- withingroup_sql
- between_sql
- bracket_offset_expressions
- bracket_sql
- all_sql
- any_sql
- exists_sql
- case_sql
- nextvaluefor_sql
- trim_sql
- convert_concat_args
- concat_sql
- concatws_sql
- check_sql
- foreignkey_sql
- primarykey_sql
- if_sql
- matchagainst_sql
- jsonkeyvalue_sql
- jsonpath_sql
- json_path_part
- formatjson_sql
- formatphrase_sql
- jsonobject_sql
- jsonobjectagg_sql
- jsonarray_sql
- jsonarrayagg_sql
- jsoncolumndef_sql
- jsonschema_sql
- jsontable_sql
- openjsoncolumndef_sql
- openjson_sql
- in_sql
- in_unnest_op
- interval_sql
- return_sql
- reference_sql
- anonymous_sql
- paren_sql
- neg_sql
- not_sql
- alias_sql
- pivotalias_sql
- aliases_sql
- atindex_sql
- attimezone_sql
- fromtimezone_sql
- add_sql
- and_sql
- or_sql
- xor_sql
- connector_sql
- bitwiseand_sql
- bitwiseleftshift_sql
- bitwisenot_sql
- bitwiseor_sql
- bitwiserightshift_sql
- bitwisexor_sql
- cast_sql
- strtotime_sql
- currentdate_sql
- collate_sql
- command_sql
- comment_sql
- mergetreettlaction_sql
- mergetreettl_sql
- altercolumn_sql
- alterindex_sql
- alterdiststyle_sql
- altersortkey_sql
- alterrename_sql
- renamecolumn_sql
- alterset_sql
- altersession_sql
- add_column_sql
- droppartition_sql
- addconstraint_sql
- addpartition_sql
- distinct_sql
- ignorenulls_sql
- respectnulls_sql
- havingmax_sql
- intdiv_sql
- div_sql
- safedivide_sql
- overlaps_sql
- distance_sql
- dot_sql
- eq_sql
- propertyeq_sql
- escape_sql
- glob_sql
- gt_sql
- gte_sql
- like_sql
- ilike_sql
- match_sql
- similarto_sql
- lt_sql
- lte_sql
- mod_sql
- mul_sql
- neq_sql
- nullsafeeq_sql
- nullsafeneq_sql
- sub_sql
- trycast_sql
- jsoncast_sql
- try_sql
- log_sql
- use_sql
- binary
- ceil_floor
- function_fallback_sql
- func
- format_args
- too_wide
- format_time
- expressions
- op_expressions
- naked_property
- tag_sql
- token_sql
- userdefinedfunction_sql
- joinhint_sql
- kwarg_sql
- when_sql
- whens_sql
- merge_sql
- tochar_sql
- tonumber_sql
- dictproperty_sql
- dictrange_sql
- dictsubproperty_sql
- duplicatekeyproperty_sql
- uniquekeyproperty_sql
- distributedbyproperty_sql
- oncluster_sql
- clusteredbyproperty_sql
- anyvalue_sql
- querytransform_sql
- indexconstraintoption_sql
- checkcolumnconstraint_sql
- indexcolumnconstraint_sql
- nvl2_sql
- comprehension_sql
- columnprefix_sql
- opclass_sql
- predict_sql
- generateembedding_sql
- mltranslate_sql
- mlforecast_sql
- featuresattime_sql
- vectorsearch_sql
- forin_sql
- refresh_sql
- toarray_sql
- tsordstotime_sql
- tsordstotimestamp_sql
- tsordstodatetime_sql
- tsordstodate_sql
- unixdate_sql
- lastday_sql
- dateadd_sql
- arrayany_sql
- struct_sql
- partitionrange_sql
- truncatetable_sql
- copyparameter_sql
- credentials_sql
- copy_sql
- semicolon_sql
- datadeletionproperty_sql
- maskingpolicycolumnconstraint_sql
- gapfill_sql
- scoperesolution_sql
- parsejson_sql
- rand_sql
- changes_sql
- pad_sql
- summarize_sql
- explodinggenerateseries_sql
- arrayconcat_sql
- converttimezone_sql
- json_sql
- jsonvalue_sql
- conditionalinsert_sql
- multitableinserts_sql
- oncondition_sql
- jsonextractquote_sql
- jsonexists_sql
- arrayagg_sql
- slice_sql
- apply_sql
- grant_sql
- revoke_sql
- grantprivilege_sql
- grantprincipal_sql
- columns_sql
- overlay_sql
- todouble_sql
- string_sql
- median_sql
- overflowtruncatebehavior_sql
- unixseconds_sql
- arraysize_sql
- attach_sql
- detach_sql
- attachoption_sql
- watermarkcolumnconstraint_sql
- encodeproperty_sql
- includeproperty_sql
- xmlelement_sql
- xmlkeyvalueoption_sql
- partitionbyrangeproperty_sql
- partitionbyrangepropertydynamic_sql
- unpivotcolumns_sql
- analyzesample_sql
- analyzestatistics_sql
- analyzehistogram_sql
- analyzedelete_sql
- analyzelistchainedrows_sql
- analyzevalidate_sql
- analyze_sql
- xmltable_sql
- xmlnamespace_sql
- export_sql
- declare_sql
- declareitem_sql
- recursivewithsearch_sql
- parameterizedagg_sql
- anonymousaggfunc_sql
- combinedaggfunc_sql
- combinedparameterizedagg_sql
- show_sql
- install_sql
- get_put_sql
- translatecharacters_sql
- decodecase_sql
- semanticview_sql
- getextract_sql
- datefromunixdate_sql
- space_sql
- buildproperty_sql
- refreshtriggerproperty_sql
- modelattribute_sql
- directorystage_sql
- uuid_sql
- initcap_sql
- localtime_sql
- localtimestamp_sql
- weekstart_sql
- chr_sql