sqlglot.dialects.tsql
1from __future__ import annotations 2 3import datetime 4import re 5import typing as t 6from functools import reduce 7 8from sqlglot import exp, generator, parser, tokens, transforms 9from sqlglot.dialects.dialect import ( 10 Dialect, 11 NormalizationStrategy, 12 any_value_to_max_sql, 13 build_date_delta, 14 date_delta_sql, 15 datestrtodate_sql, 16 generatedasidentitycolumnconstraint_sql, 17 max_or_greatest, 18 min_or_least, 19 rename_func, 20 strposition_sql, 21 timestrtotime_sql, 22 trim_sql, 23 map_date_part, 24) 25from sqlglot.helper import seq_get 26from sqlglot.parser import build_coalesce 27from sqlglot.time import format_time 28from sqlglot.tokens import TokenType 29from sqlglot.typing.tsql import EXPRESSION_METADATA 30 31if t.TYPE_CHECKING: 32 from sqlglot._typing import E 33 34FULL_FORMAT_TIME_MAPPING = { 35 "weekday": "%A", 36 "dw": "%A", 37 "w": "%A", 38 "month": "%B", 39 "mm": "%B", 40 "m": "%B", 41} 42 43DATE_DELTA_INTERVAL = { 44 "year": "year", 45 "yyyy": "year", 46 "yy": "year", 47 "quarter": "quarter", 48 "qq": "quarter", 49 "q": "quarter", 50 "month": "month", 51 "mm": "month", 52 "m": "month", 53 "week": "week", 54 "ww": "week", 55 "wk": "week", 56 "day": "day", 57 "dd": "day", 58 "d": "day", 59} 60 61DATE_PART_UNMAPPING = { 62 "WEEKISO": "ISO_WEEK", 63 "DAYOFWEEK": "WEEKDAY", 64 "TIMEZONE_MINUTE": "TZOFFSET", 65} 66 67DATE_FMT_RE = re.compile("([dD]{1,2})|([mM]{1,2})|([yY]{1,4})|([hH]{1,2})|([sS]{1,2})") 68 69# N = Numeric, C=Currency 70TRANSPILE_SAFE_NUMBER_FMT = {"N", "C"} 71 72DEFAULT_START_DATE = datetime.date(1900, 1, 1) 73 74BIT_TYPES = {exp.EQ, exp.NEQ, exp.Is, exp.In, exp.Select, exp.Alias} 75 76# Unsupported options: 77# - OPTIMIZE FOR ( @variable_name { UNKNOWN | = <literal_constant> } [ , ...n ] ) 78# - TABLE HINT 79OPTIONS: parser.OPTIONS_TYPE = { 80 **dict.fromkeys( 81 ( 82 "DISABLE_OPTIMIZED_PLAN_FORCING", 83 "FAST", 84 "IGNORE_NONCLUSTERED_COLUMNSTORE_INDEX", 85 "LABEL", 86 "MAXDOP", 87 "MAXRECURSION", 88 "MAX_GRANT_PERCENT", 89 "MIN_GRANT_PERCENT", 90 "NO_PERFORMANCE_SPOOL", 91 "QUERYTRACEON", 92 "RECOMPILE", 93 ), 94 tuple(), 95 ), 96 "CONCAT": ("UNION",), 97 "DISABLE": ("EXTERNALPUSHDOWN", "SCALEOUTEXECUTION"), 98 "EXPAND": ("VIEWS",), 99 "FORCE": ("EXTERNALPUSHDOWN", "ORDER", "SCALEOUTEXECUTION"), 100 "HASH": ("GROUP", "JOIN", "UNION"), 101 "KEEP": ("PLAN",), 102 "KEEPFIXED": ("PLAN",), 103 "LOOP": ("JOIN",), 104 "MERGE": ("JOIN", "UNION"), 105 "OPTIMIZE": (("FOR", "UNKNOWN"),), 106 "ORDER": ("GROUP",), 107 "PARAMETERIZATION": ("FORCED", "SIMPLE"), 108 "ROBUST": ("PLAN",), 109 "USE": ("PLAN",), 110} 111 112 113XML_OPTIONS: parser.OPTIONS_TYPE = { 114 **dict.fromkeys( 115 ( 116 "AUTO", 117 "EXPLICIT", 118 "TYPE", 119 ), 120 tuple(), 121 ), 122 "ELEMENTS": ( 123 "XSINIL", 124 "ABSENT", 125 ), 126 "BINARY": ("BASE64",), 127} 128 129 130OPTIONS_THAT_REQUIRE_EQUAL = ("MAX_GRANT_PERCENT", "MIN_GRANT_PERCENT", "LABEL") 131 132 133def _build_formatted_time( 134 exp_class: t.Type[E], full_format_mapping: t.Optional[bool] = None 135) -> t.Callable[[t.List], E]: 136 def _builder(args: t.List) -> E: 137 fmt = seq_get(args, 0) 138 if isinstance(fmt, exp.Expression): 139 fmt = exp.Literal.string( 140 format_time( 141 fmt.name.lower(), 142 ( 143 {**TSQL.TIME_MAPPING, **FULL_FORMAT_TIME_MAPPING} 144 if full_format_mapping 145 else TSQL.TIME_MAPPING 146 ), 147 ) 148 ) 149 150 this = seq_get(args, 1) 151 if isinstance(this, exp.Expression): 152 this = exp.cast(this, exp.DataType.Type.DATETIME2) 153 154 return exp_class(this=this, format=fmt) 155 156 return _builder 157 158 159def _build_format(args: t.List) -> exp.NumberToStr | exp.TimeToStr: 160 this = seq_get(args, 0) 161 fmt = seq_get(args, 1) 162 culture = seq_get(args, 2) 163 164 number_fmt = fmt and (fmt.name in TRANSPILE_SAFE_NUMBER_FMT or not DATE_FMT_RE.search(fmt.name)) 165 166 if number_fmt: 167 return exp.NumberToStr(this=this, format=fmt, culture=culture) 168 169 if fmt: 170 fmt = exp.Literal.string( 171 format_time(fmt.name, TSQL.FORMAT_TIME_MAPPING) 172 if len(fmt.name) == 1 173 else format_time(fmt.name, TSQL.TIME_MAPPING) 174 ) 175 176 return exp.TimeToStr(this=this, format=fmt, culture=culture) 177 178 179def _build_eomonth(args: t.List) -> exp.LastDay: 180 date = exp.TsOrDsToDate(this=seq_get(args, 0)) 181 month_lag = seq_get(args, 1) 182 183 if month_lag is None: 184 this: exp.Expression = date 185 else: 186 unit = DATE_DELTA_INTERVAL.get("month") 187 this = exp.DateAdd(this=date, expression=month_lag, unit=unit and exp.var(unit)) 188 189 return exp.LastDay(this=this) 190 191 192def _build_hashbytes(args: t.List) -> exp.Expression: 193 kind, data = args 194 kind = kind.name.upper() if kind.is_string else "" 195 196 if kind == "MD5": 197 args.pop(0) 198 return exp.MD5(this=data) 199 if kind in ("SHA", "SHA1"): 200 args.pop(0) 201 return exp.SHA(this=data) 202 if kind == "SHA2_256": 203 return exp.SHA2(this=data, length=exp.Literal.number(256)) 204 if kind == "SHA2_512": 205 return exp.SHA2(this=data, length=exp.Literal.number(512)) 206 207 return exp.func("HASHBYTES", *args) 208 209 210def _format_sql(self: TSQL.Generator, expression: exp.NumberToStr | exp.TimeToStr) -> str: 211 fmt = expression.args["format"] 212 213 if not isinstance(expression, exp.NumberToStr): 214 if fmt.is_string: 215 mapped_fmt = format_time(fmt.name, TSQL.INVERSE_TIME_MAPPING) 216 fmt_sql = self.sql(exp.Literal.string(mapped_fmt)) 217 else: 218 fmt_sql = self.format_time(expression) or self.sql(fmt) 219 else: 220 fmt_sql = self.sql(fmt) 221 222 return self.func("FORMAT", expression.this, fmt_sql, expression.args.get("culture")) 223 224 225def _string_agg_sql(self: TSQL.Generator, expression: exp.GroupConcat) -> str: 226 this = expression.this 227 distinct = expression.find(exp.Distinct) 228 if distinct: 229 # exp.Distinct can appear below an exp.Order or an exp.GroupConcat expression 230 self.unsupported("T-SQL STRING_AGG doesn't support DISTINCT.") 231 this = distinct.pop().expressions[0] 232 233 order = "" 234 if isinstance(expression.this, exp.Order): 235 if expression.this.this: 236 this = expression.this.this.pop() 237 # Order has a leading space 238 order = f" WITHIN GROUP ({self.sql(expression.this)[1:]})" 239 240 separator = expression.args.get("separator") or exp.Literal.string(",") 241 return f"STRING_AGG({self.format_args(this, separator)}){order}" 242 243 244def _build_date_delta( 245 exp_class: t.Type[E], unit_mapping: t.Optional[t.Dict[str, str]] = None, big_int: bool = False 246) -> t.Callable[[t.List], E]: 247 def _builder(args: t.List) -> E: 248 unit = seq_get(args, 0) 249 if unit and unit_mapping: 250 unit = exp.var(unit_mapping.get(unit.name.lower(), unit.name)) 251 252 start_date = seq_get(args, 1) 253 if start_date and start_date.is_number: 254 # Numeric types are valid DATETIME values 255 if start_date.is_int: 256 adds = DEFAULT_START_DATE + datetime.timedelta(days=start_date.to_py()) 257 start_date = exp.Literal.string(adds.strftime("%F")) 258 else: 259 # We currently don't handle float values, i.e. they're not converted to equivalent DATETIMEs. 260 # This is not a problem when generating T-SQL code, it is when transpiling to other dialects. 261 return exp_class( 262 this=seq_get(args, 2), expression=start_date, unit=unit, big_int=big_int 263 ) 264 265 return exp_class( 266 this=exp.TimeStrToTime(this=seq_get(args, 2)), 267 expression=exp.TimeStrToTime(this=start_date), 268 unit=unit, 269 big_int=big_int, 270 ) 271 272 return _builder 273 274 275def qualify_derived_table_outputs(expression: exp.Expression) -> exp.Expression: 276 """Ensures all (unnamed) output columns are aliased for CTEs and Subqueries.""" 277 alias = expression.args.get("alias") 278 279 if ( 280 isinstance(expression, (exp.CTE, exp.Subquery)) 281 and isinstance(alias, exp.TableAlias) 282 and not alias.columns 283 ): 284 from sqlglot.optimizer.qualify_columns import qualify_outputs 285 286 # We keep track of the unaliased column projection indexes instead of the expressions 287 # themselves, because the latter are going to be replaced by new nodes when the aliases 288 # are added and hence we won't be able to reach these newly added Alias parents 289 query = expression.this 290 unaliased_column_indexes = ( 291 i for i, c in enumerate(query.selects) if isinstance(c, exp.Column) and not c.alias 292 ) 293 294 qualify_outputs(query) 295 296 # Preserve the quoting information of columns for newly added Alias nodes 297 query_selects = query.selects 298 for select_index in unaliased_column_indexes: 299 alias = query_selects[select_index] 300 column = alias.this 301 if isinstance(column.this, exp.Identifier): 302 alias.args["alias"].set("quoted", column.this.quoted) 303 304 return expression 305 306 307# https://learn.microsoft.com/en-us/sql/t-sql/functions/datetimefromparts-transact-sql?view=sql-server-ver16#syntax 308def _build_datetimefromparts(args: t.List) -> exp.TimestampFromParts: 309 return exp.TimestampFromParts( 310 year=seq_get(args, 0), 311 month=seq_get(args, 1), 312 day=seq_get(args, 2), 313 hour=seq_get(args, 3), 314 min=seq_get(args, 4), 315 sec=seq_get(args, 5), 316 milli=seq_get(args, 6), 317 ) 318 319 320# https://learn.microsoft.com/en-us/sql/t-sql/functions/timefromparts-transact-sql?view=sql-server-ver16#syntax 321def _build_timefromparts(args: t.List) -> exp.TimeFromParts: 322 return exp.TimeFromParts( 323 hour=seq_get(args, 0), 324 min=seq_get(args, 1), 325 sec=seq_get(args, 2), 326 fractions=seq_get(args, 3), 327 precision=seq_get(args, 4), 328 ) 329 330 331def _build_with_arg_as_text( 332 klass: t.Type[exp.Expression], 333) -> t.Callable[[t.List[exp.Expression]], exp.Expression]: 334 def _parse(args: t.List[exp.Expression]) -> exp.Expression: 335 this = seq_get(args, 0) 336 337 if this and not this.is_string: 338 this = exp.cast(this, exp.DataType.Type.TEXT) 339 340 expression = seq_get(args, 1) 341 kwargs = {"this": this} 342 343 if expression: 344 kwargs["expression"] = expression 345 346 return klass(**kwargs) 347 348 return _parse 349 350 351# https://learn.microsoft.com/en-us/sql/t-sql/functions/parsename-transact-sql?view=sql-server-ver16 352def _build_parsename(args: t.List) -> exp.SplitPart | exp.Anonymous: 353 # PARSENAME(...) will be stored into exp.SplitPart if: 354 # - All args are literals 355 # - The part index (2nd arg) is <= 4 (max valid value, otherwise TSQL returns NULL) 356 if len(args) == 2 and all(isinstance(arg, exp.Literal) for arg in args): 357 this = args[0] 358 part_index = args[1] 359 split_count = len(this.name.split(".")) 360 if split_count <= 4: 361 return exp.SplitPart( 362 this=this, 363 delimiter=exp.Literal.string("."), 364 part_index=exp.Literal.number(split_count + 1 - part_index.to_py()), 365 ) 366 367 return exp.Anonymous(this="PARSENAME", expressions=args) 368 369 370def _build_json_query(args: t.List, dialect: Dialect) -> exp.JSONExtract: 371 if len(args) == 1: 372 # The default value for path is '$'. As a result, if you don't provide a 373 # value for path, JSON_QUERY returns the input expression. 374 args.append(exp.Literal.string("$")) 375 376 return parser.build_extract_json_with_path(exp.JSONExtract)(args, dialect) 377 378 379def _json_extract_sql( 380 self: TSQL.Generator, expression: exp.JSONExtract | exp.JSONExtractScalar 381) -> str: 382 json_query = self.func("JSON_QUERY", expression.this, expression.expression) 383 json_value = self.func("JSON_VALUE", expression.this, expression.expression) 384 return self.func("ISNULL", json_query, json_value) 385 386 387def _timestrtotime_sql(self: TSQL.Generator, expression: exp.TimeStrToTime): 388 sql = timestrtotime_sql(self, expression) 389 if expression.args.get("zone"): 390 # If there is a timezone, produce an expression like: 391 # CAST('2020-01-01 12:13:14-08:00' AS DATETIMEOFFSET) AT TIME ZONE 'UTC' 392 # If you dont have AT TIME ZONE 'UTC', wrapping that expression in another cast back to DATETIME2 just drops the timezone information 393 return self.sql(exp.AtTimeZone(this=sql, zone=exp.Literal.string("UTC"))) 394 return sql 395 396 397def _build_datetrunc(args: t.List) -> exp.TimestampTrunc: 398 unit = seq_get(args, 0) 399 this = seq_get(args, 1) 400 401 if this and this.is_string: 402 this = exp.cast(this, exp.DataType.Type.DATETIME2) 403 404 return exp.TimestampTrunc(this=this, unit=unit) 405 406 407class TSQL(Dialect): 408 SUPPORTS_SEMI_ANTI_JOIN = False 409 LOG_BASE_FIRST = False 410 TYPED_DIVISION = True 411 CONCAT_COALESCE = True 412 NORMALIZATION_STRATEGY = NormalizationStrategy.CASE_INSENSITIVE 413 ALTER_TABLE_ADD_REQUIRED_FOR_EACH_COLUMN = False 414 415 TIME_FORMAT = "'yyyy-mm-dd hh:mm:ss'" 416 417 EXPRESSION_METADATA = EXPRESSION_METADATA.copy() 418 419 DATE_PART_MAPPING = { 420 **Dialect.DATE_PART_MAPPING, 421 "QQ": "QUARTER", 422 "M": "MONTH", 423 "Y": "DAYOFYEAR", 424 "WW": "WEEK", 425 "N": "MINUTE", 426 "SS": "SECOND", 427 "MCS": "MICROSECOND", 428 "TZOFFSET": "TIMEZONE_MINUTE", 429 "TZ": "TIMEZONE_MINUTE", 430 "ISO_WEEK": "WEEKISO", 431 "ISOWK": "WEEKISO", 432 "ISOWW": "WEEKISO", 433 } 434 435 TIME_MAPPING = { 436 "year": "%Y", 437 "dayofyear": "%j", 438 "day": "%d", 439 "dy": "%d", 440 "y": "%Y", 441 "week": "%W", 442 "ww": "%W", 443 "wk": "%W", 444 "isowk": "%V", 445 "isoww": "%V", 446 "iso_week": "%V", 447 "hour": "%h", 448 "hh": "%I", 449 "minute": "%M", 450 "mi": "%M", 451 "n": "%M", 452 "second": "%S", 453 "ss": "%S", 454 "s": "%-S", 455 "millisecond": "%f", 456 "ms": "%f", 457 "weekday": "%w", 458 "dw": "%w", 459 "month": "%m", 460 "mm": "%M", 461 "m": "%-M", 462 "Y": "%Y", 463 "YYYY": "%Y", 464 "YY": "%y", 465 "MMMM": "%B", 466 "MMM": "%b", 467 "MM": "%m", 468 "M": "%-m", 469 "dddd": "%A", 470 "dd": "%d", 471 "d": "%-d", 472 "HH": "%H", 473 "H": "%-H", 474 "h": "%-I", 475 "ffffff": "%f", 476 "yyyy": "%Y", 477 "yy": "%y", 478 } 479 480 CONVERT_FORMAT_MAPPING = { 481 "0": "%b %d %Y %-I:%M%p", 482 "1": "%m/%d/%y", 483 "2": "%y.%m.%d", 484 "3": "%d/%m/%y", 485 "4": "%d.%m.%y", 486 "5": "%d-%m-%y", 487 "6": "%d %b %y", 488 "7": "%b %d, %y", 489 "8": "%H:%M:%S", 490 "9": "%b %d %Y %-I:%M:%S:%f%p", 491 "10": "mm-dd-yy", 492 "11": "yy/mm/dd", 493 "12": "yymmdd", 494 "13": "%d %b %Y %H:%M:ss:%f", 495 "14": "%H:%M:%S:%f", 496 "20": "%Y-%m-%d %H:%M:%S", 497 "21": "%Y-%m-%d %H:%M:%S.%f", 498 "22": "%m/%d/%y %-I:%M:%S %p", 499 "23": "%Y-%m-%d", 500 "24": "%H:%M:%S", 501 "25": "%Y-%m-%d %H:%M:%S.%f", 502 "100": "%b %d %Y %-I:%M%p", 503 "101": "%m/%d/%Y", 504 "102": "%Y.%m.%d", 505 "103": "%d/%m/%Y", 506 "104": "%d.%m.%Y", 507 "105": "%d-%m-%Y", 508 "106": "%d %b %Y", 509 "107": "%b %d, %Y", 510 "108": "%H:%M:%S", 511 "109": "%b %d %Y %-I:%M:%S:%f%p", 512 "110": "%m-%d-%Y", 513 "111": "%Y/%m/%d", 514 "112": "%Y%m%d", 515 "113": "%d %b %Y %H:%M:%S:%f", 516 "114": "%H:%M:%S:%f", 517 "120": "%Y-%m-%d %H:%M:%S", 518 "121": "%Y-%m-%d %H:%M:%S.%f", 519 "126": "%Y-%m-%dT%H:%M:%S.%f", 520 } 521 522 FORMAT_TIME_MAPPING = { 523 "y": "%B %Y", 524 "d": "%m/%d/%Y", 525 "H": "%-H", 526 "h": "%-I", 527 "s": "%Y-%m-%d %H:%M:%S", 528 "D": "%A,%B,%Y", 529 "f": "%A,%B,%Y %-I:%M %p", 530 "F": "%A,%B,%Y %-I:%M:%S %p", 531 "g": "%m/%d/%Y %-I:%M %p", 532 "G": "%m/%d/%Y %-I:%M:%S %p", 533 "M": "%B %-d", 534 "m": "%B %-d", 535 "O": "%Y-%m-%dT%H:%M:%S", 536 "u": "%Y-%M-%D %H:%M:%S%z", 537 "U": "%A, %B %D, %Y %H:%M:%S%z", 538 "T": "%-I:%M:%S %p", 539 "t": "%-I:%M", 540 "Y": "%a %Y", 541 } 542 543 class Tokenizer(tokens.Tokenizer): 544 IDENTIFIERS = [("[", "]"), '"'] 545 QUOTES = ["'", '"'] 546 HEX_STRINGS = [("0x", ""), ("0X", "")] 547 VAR_SINGLE_TOKENS = {"@", "$", "#"} 548 549 KEYWORDS = { 550 **tokens.Tokenizer.KEYWORDS, 551 "CLUSTERED INDEX": TokenType.INDEX, 552 "DATETIME2": TokenType.DATETIME2, 553 "DATETIMEOFFSET": TokenType.TIMESTAMPTZ, 554 "DECLARE": TokenType.DECLARE, 555 "EXEC": TokenType.COMMAND, 556 "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT, 557 "GO": TokenType.COMMAND, 558 "IMAGE": TokenType.IMAGE, 559 "MONEY": TokenType.MONEY, 560 "NONCLUSTERED INDEX": TokenType.INDEX, 561 "NTEXT": TokenType.TEXT, 562 "OPTION": TokenType.OPTION, 563 "OUTPUT": TokenType.RETURNING, 564 "PRINT": TokenType.COMMAND, 565 "PROC": TokenType.PROCEDURE, 566 "REAL": TokenType.FLOAT, 567 "ROWVERSION": TokenType.ROWVERSION, 568 "SMALLDATETIME": TokenType.SMALLDATETIME, 569 "SMALLMONEY": TokenType.SMALLMONEY, 570 "SQL_VARIANT": TokenType.VARIANT, 571 "SYSTEM_USER": TokenType.CURRENT_USER, 572 "TOP": TokenType.TOP, 573 "TIMESTAMP": TokenType.ROWVERSION, 574 "TINYINT": TokenType.UTINYINT, 575 "UNIQUEIDENTIFIER": TokenType.UUID, 576 "UPDATE STATISTICS": TokenType.COMMAND, 577 "XML": TokenType.XML, 578 } 579 KEYWORDS.pop("/*+") 580 581 COMMANDS = {*tokens.Tokenizer.COMMANDS, TokenType.END} 582 583 class Parser(parser.Parser): 584 SET_REQUIRES_ASSIGNMENT_DELIMITER = False 585 LOG_DEFAULTS_TO_LN = True 586 STRING_ALIASES = True 587 NO_PAREN_IF_COMMANDS = False 588 589 QUERY_MODIFIER_PARSERS = { 590 **parser.Parser.QUERY_MODIFIER_PARSERS, 591 TokenType.OPTION: lambda self: ("options", self._parse_options()), 592 TokenType.FOR: lambda self: ("for_", self._parse_for()), 593 } 594 595 # T-SQL does not allow BEGIN to be used as an identifier 596 ID_VAR_TOKENS = parser.Parser.ID_VAR_TOKENS - {TokenType.BEGIN} 597 ALIAS_TOKENS = parser.Parser.ALIAS_TOKENS - {TokenType.BEGIN} 598 TABLE_ALIAS_TOKENS = parser.Parser.TABLE_ALIAS_TOKENS - {TokenType.BEGIN} 599 COMMENT_TABLE_ALIAS_TOKENS = parser.Parser.COMMENT_TABLE_ALIAS_TOKENS - {TokenType.BEGIN} 600 UPDATE_ALIAS_TOKENS = parser.Parser.UPDATE_ALIAS_TOKENS - {TokenType.BEGIN} 601 602 FUNCTIONS = { 603 **parser.Parser.FUNCTIONS, 604 "ATN2": exp.Atan2.from_arg_list, 605 "CHARINDEX": lambda args: exp.StrPosition( 606 this=seq_get(args, 1), 607 substr=seq_get(args, 0), 608 position=seq_get(args, 2), 609 ), 610 "COUNT": lambda args: exp.Count( 611 this=seq_get(args, 0), expressions=args[1:], big_int=False 612 ), 613 "COUNT_BIG": lambda args: exp.Count( 614 this=seq_get(args, 0), expressions=args[1:], big_int=True 615 ), 616 "DATEADD": build_date_delta(exp.DateAdd, unit_mapping=DATE_DELTA_INTERVAL), 617 "DATEDIFF": _build_date_delta(exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL), 618 "DATEDIFF_BIG": _build_date_delta( 619 exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL, big_int=True 620 ), 621 "DATENAME": _build_formatted_time(exp.TimeToStr, full_format_mapping=True), 622 "DATETIMEFROMPARTS": _build_datetimefromparts, 623 "EOMONTH": _build_eomonth, 624 "FORMAT": _build_format, 625 "GETDATE": exp.CurrentTimestamp.from_arg_list, 626 "HASHBYTES": _build_hashbytes, 627 "ISNULL": lambda args: build_coalesce(args=args, is_null=True), 628 "JSON_QUERY": _build_json_query, 629 "JSON_VALUE": parser.build_extract_json_with_path(exp.JSONExtractScalar), 630 "LEN": _build_with_arg_as_text(exp.Length), 631 "LEFT": _build_with_arg_as_text(exp.Left), 632 "NEWID": exp.Uuid.from_arg_list, 633 "RIGHT": _build_with_arg_as_text(exp.Right), 634 "PARSENAME": _build_parsename, 635 "REPLICATE": exp.Repeat.from_arg_list, 636 "SCHEMA_NAME": exp.CurrentSchema.from_arg_list, 637 "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)), 638 "SYSDATETIME": exp.CurrentTimestamp.from_arg_list, 639 "SUSER_NAME": exp.CurrentUser.from_arg_list, 640 "SUSER_SNAME": exp.CurrentUser.from_arg_list, 641 "SYSDATETIMEOFFSET": exp.CurrentTimestampLTZ.from_arg_list, 642 "SYSTEM_USER": exp.CurrentUser.from_arg_list, 643 "TIMEFROMPARTS": _build_timefromparts, 644 "DATETRUNC": _build_datetrunc, 645 } 646 647 JOIN_HINTS = {"LOOP", "HASH", "MERGE", "REMOTE"} 648 649 PROCEDURE_OPTIONS = dict.fromkeys( 650 ("ENCRYPTION", "RECOMPILE", "SCHEMABINDING", "NATIVE_COMPILATION", "EXECUTE"), tuple() 651 ) 652 653 COLUMN_DEFINITION_MODES = {"OUT", "OUTPUT", "READONLY"} 654 655 RETURNS_TABLE_TOKENS = parser.Parser.ID_VAR_TOKENS - { 656 TokenType.TABLE, 657 *parser.Parser.TYPE_TOKENS, 658 } 659 660 STATEMENT_PARSERS = { 661 **parser.Parser.STATEMENT_PARSERS, 662 TokenType.DECLARE: lambda self: self._parse_declare(), 663 } 664 665 RANGE_PARSERS = { 666 **parser.Parser.RANGE_PARSERS, 667 TokenType.DCOLON: lambda self, this: self.expression( 668 exp.ScopeResolution, 669 this=this, 670 expression=self._parse_function() or self._parse_var(any_token=True), 671 ), 672 } 673 674 NO_PAREN_FUNCTION_PARSERS = { 675 **parser.Parser.NO_PAREN_FUNCTION_PARSERS, 676 "NEXT": lambda self: self._parse_next_value_for(), 677 } 678 679 FUNCTION_PARSERS: t.Dict[str, t.Callable] = { 680 **parser.Parser.FUNCTION_PARSERS, 681 "JSON_ARRAYAGG": lambda self: self.expression( 682 exp.JSONArrayAgg, 683 this=self._parse_bitwise(), 684 order=self._parse_order(), 685 null_handling=self._parse_on_handling("NULL", "NULL", "ABSENT"), 686 ), 687 "DATEPART": lambda self: self._parse_datepart(), 688 } 689 690 # The DCOLON (::) operator serves as a scope resolution (exp.ScopeResolution) operator in T-SQL 691 COLUMN_OPERATORS = { 692 **parser.Parser.COLUMN_OPERATORS, 693 TokenType.DCOLON: lambda self, this, to: self.expression(exp.Cast, this=this, to=to) 694 if isinstance(to, exp.DataType) and to.this != exp.DataType.Type.USERDEFINED 695 else self.expression(exp.ScopeResolution, this=this, expression=to), 696 } 697 698 SET_OP_MODIFIERS = {"offset"} 699 700 ODBC_DATETIME_LITERALS = { 701 "d": exp.Date, 702 "t": exp.Time, 703 "ts": exp.Timestamp, 704 } 705 706 def _parse_datepart(self) -> exp.Extract: 707 this = self._parse_var() 708 expression = self._match(TokenType.COMMA) and self._parse_bitwise() 709 name = map_date_part(this, self.dialect) 710 711 return self.expression(exp.Extract, this=name, expression=expression) 712 713 def _parse_alter_table_set(self) -> exp.AlterSet: 714 return self._parse_wrapped(super()._parse_alter_table_set) 715 716 def _parse_wrapped_select(self, table: bool = False) -> t.Optional[exp.Expression]: 717 if self._match(TokenType.MERGE): 718 comments = self._prev_comments 719 merge = self._parse_merge() 720 merge.add_comments(comments, prepend=True) 721 return merge 722 723 return super()._parse_wrapped_select(table=table) 724 725 def _parse_dcolon(self) -> t.Optional[exp.Expression]: 726 # We want to use _parse_types() if the first token after :: is a known type, 727 # otherwise we could parse something like x::varchar(max) into a function 728 if self._match_set(self.TYPE_TOKENS, advance=False): 729 return self._parse_types() 730 731 return self._parse_function() or self._parse_types() 732 733 def _parse_options(self) -> t.Optional[t.List[exp.Expression]]: 734 if not self._match(TokenType.OPTION): 735 return None 736 737 def _parse_option() -> t.Optional[exp.Expression]: 738 option = self._parse_var_from_options(OPTIONS) 739 if not option: 740 return None 741 742 self._match(TokenType.EQ) 743 return self.expression( 744 exp.QueryOption, this=option, expression=self._parse_primary_or_var() 745 ) 746 747 return self._parse_wrapped_csv(_parse_option) 748 749 def _parse_xml_key_value_option(self) -> exp.XMLKeyValueOption: 750 this = self._parse_primary_or_var() 751 if self._match(TokenType.L_PAREN, advance=False): 752 expression = self._parse_wrapped(self._parse_string) 753 else: 754 expression = None 755 756 return exp.XMLKeyValueOption(this=this, expression=expression) 757 758 def _parse_for(self) -> t.Optional[t.List[exp.Expression]]: 759 if not self._match_pair(TokenType.FOR, TokenType.XML): 760 return None 761 762 def _parse_for_xml() -> t.Optional[exp.Expression]: 763 return self.expression( 764 exp.QueryOption, 765 this=self._parse_var_from_options(XML_OPTIONS, raise_unmatched=False) 766 or self._parse_xml_key_value_option(), 767 ) 768 769 return self._parse_csv(_parse_for_xml) 770 771 def _parse_projections(self) -> t.List[exp.Expression]: 772 """ 773 T-SQL supports the syntax alias = expression in the SELECT's projection list, 774 so we transform all parsed Selects to convert their EQ projections into Aliases. 775 776 See: https://learn.microsoft.com/en-us/sql/t-sql/queries/select-clause-transact-sql?view=sql-server-ver16#syntax 777 """ 778 return [ 779 ( 780 exp.alias_(projection.expression, projection.this.this, copy=False) 781 if isinstance(projection, exp.EQ) and isinstance(projection.this, exp.Column) 782 else projection 783 ) 784 for projection in super()._parse_projections() 785 ] 786 787 def _parse_commit_or_rollback(self) -> exp.Commit | exp.Rollback: 788 """Applies to SQL Server and Azure SQL Database 789 COMMIT [ { TRAN | TRANSACTION } 790 [ transaction_name | @tran_name_variable ] ] 791 [ WITH ( DELAYED_DURABILITY = { OFF | ON } ) ] 792 793 ROLLBACK { TRAN | TRANSACTION } 794 [ transaction_name | @tran_name_variable 795 | savepoint_name | @savepoint_variable ] 796 """ 797 rollback = self._prev.token_type == TokenType.ROLLBACK 798 799 self._match_texts(("TRAN", "TRANSACTION")) 800 this = self._parse_id_var() 801 802 if rollback: 803 return self.expression(exp.Rollback, this=this) 804 805 durability = None 806 if self._match_pair(TokenType.WITH, TokenType.L_PAREN): 807 self._match_text_seq("DELAYED_DURABILITY") 808 self._match(TokenType.EQ) 809 810 if self._match_text_seq("OFF"): 811 durability = False 812 else: 813 self._match(TokenType.ON) 814 durability = True 815 816 self._match_r_paren() 817 818 return self.expression(exp.Commit, this=this, durability=durability) 819 820 def _parse_transaction(self) -> exp.Transaction | exp.Command: 821 """Applies to SQL Server and Azure SQL Database 822 BEGIN { TRAN | TRANSACTION } 823 [ { transaction_name | @tran_name_variable } 824 [ WITH MARK [ 'description' ] ] 825 ] 826 """ 827 if self._match_texts(("TRAN", "TRANSACTION")): 828 transaction = self.expression(exp.Transaction, this=self._parse_id_var()) 829 if self._match_text_seq("WITH", "MARK"): 830 transaction.set("mark", self._parse_string()) 831 832 return transaction 833 834 return self._parse_as_command(self._prev) 835 836 def _parse_returns(self) -> exp.ReturnsProperty: 837 table = self._parse_id_var(any_token=False, tokens=self.RETURNS_TABLE_TOKENS) 838 returns = super()._parse_returns() 839 returns.set("table", table) 840 return returns 841 842 def _parse_convert( 843 self, strict: bool, safe: t.Optional[bool] = None 844 ) -> t.Optional[exp.Expression]: 845 this = self._parse_types() 846 self._match(TokenType.COMMA) 847 args = [this, *self._parse_csv(self._parse_assignment)] 848 convert = exp.Convert.from_arg_list(args) 849 convert.set("safe", safe) 850 return convert 851 852 def _parse_column_def( 853 self, this: t.Optional[exp.Expression], computed_column: bool = True 854 ) -> t.Optional[exp.Expression]: 855 this = super()._parse_column_def(this=this, computed_column=computed_column) 856 if not this: 857 return None 858 if self._match(TokenType.EQ): 859 this.set("default", self._parse_disjunction()) 860 if self._match_texts(self.COLUMN_DEFINITION_MODES): 861 this.set("output", self._prev.text) 862 return this 863 864 def _parse_user_defined_function( 865 self, kind: t.Optional[TokenType] = None 866 ) -> t.Optional[exp.Expression]: 867 this = super()._parse_user_defined_function(kind=kind) 868 869 if ( 870 kind == TokenType.FUNCTION 871 or isinstance(this, exp.UserDefinedFunction) 872 or self._match(TokenType.ALIAS, advance=False) 873 ): 874 return this 875 876 if not self._match(TokenType.WITH, advance=False): 877 expressions = self._parse_csv(self._parse_function_parameter) 878 else: 879 expressions = None 880 881 return self.expression(exp.UserDefinedFunction, this=this, expressions=expressions) 882 883 def _parse_into(self) -> t.Optional[exp.Into]: 884 into = super()._parse_into() 885 886 table = isinstance(into, exp.Into) and into.find(exp.Table) 887 if isinstance(table, exp.Table): 888 table_identifier = table.this 889 if table_identifier.args.get("temporary"): 890 # Promote the temporary property from the Identifier to the Into expression 891 t.cast(exp.Into, into).set("temporary", True) 892 893 return into 894 895 def _parse_id_var( 896 self, 897 any_token: bool = True, 898 tokens: t.Optional[t.Collection[TokenType]] = None, 899 ) -> t.Optional[exp.Expression]: 900 is_temporary = self._match(TokenType.HASH) 901 is_global = is_temporary and self._match(TokenType.HASH) 902 903 this = super()._parse_id_var(any_token=any_token, tokens=tokens) 904 if this: 905 if is_global: 906 this.set("global_", True) 907 elif is_temporary: 908 this.set("temporary", True) 909 910 return this 911 912 def _parse_create(self) -> exp.Create | exp.Command: 913 create = super()._parse_create() 914 915 if isinstance(create, exp.Create): 916 table = create.this.this if isinstance(create.this, exp.Schema) else create.this 917 if isinstance(table, exp.Table) and table.this and table.this.args.get("temporary"): 918 if not create.args.get("properties"): 919 create.set("properties", exp.Properties(expressions=[])) 920 921 create.args["properties"].append("expressions", exp.TemporaryProperty()) 922 923 return create 924 925 def _parse_if(self) -> t.Optional[exp.Expression]: 926 index = self._index 927 928 if self._match_text_seq("OBJECT_ID"): 929 self._parse_wrapped_csv(self._parse_string) 930 if self._match_text_seq("IS", "NOT", "NULL") and self._match(TokenType.DROP): 931 return self._parse_drop(exists=True) 932 self._retreat(index) 933 934 return super()._parse_if() 935 936 def _parse_unique(self) -> exp.UniqueColumnConstraint: 937 if self._match_texts(("CLUSTERED", "NONCLUSTERED")): 938 this = self.CONSTRAINT_PARSERS[self._prev.text.upper()](self) 939 else: 940 this = self._parse_schema(self._parse_id_var(any_token=False)) 941 942 return self.expression(exp.UniqueColumnConstraint, this=this) 943 944 def _parse_update(self) -> exp.Update: 945 expression = super()._parse_update() 946 expression.set("options", self._parse_options()) 947 return expression 948 949 def _parse_partition(self) -> t.Optional[exp.Partition]: 950 if not self._match_text_seq("WITH", "(", "PARTITIONS"): 951 return None 952 953 def parse_range(): 954 low = self._parse_bitwise() 955 high = self._parse_bitwise() if self._match_text_seq("TO") else None 956 957 return ( 958 self.expression(exp.PartitionRange, this=low, expression=high) if high else low 959 ) 960 961 partition = self.expression( 962 exp.Partition, expressions=self._parse_wrapped_csv(parse_range) 963 ) 964 965 self._match_r_paren() 966 967 return partition 968 969 def _parse_declareitem(self) -> t.Optional[exp.DeclareItem]: 970 var = self._parse_id_var() 971 if not var: 972 return None 973 974 self._match(TokenType.ALIAS) 975 return self.expression( 976 exp.DeclareItem, 977 this=var, 978 kind=self._parse_schema() if self._match(TokenType.TABLE) else self._parse_types(), 979 default=self._match(TokenType.EQ) and self._parse_bitwise(), 980 ) 981 982 def _parse_alter_table_alter(self) -> t.Optional[exp.Expression]: 983 expression = super()._parse_alter_table_alter() 984 985 if expression is not None: 986 collation = expression.args.get("collate") 987 if isinstance(collation, exp.Column) and isinstance(collation.this, exp.Identifier): 988 identifier = collation.this 989 collation.set("this", exp.Var(this=identifier.name)) 990 991 return expression 992 993 def _parse_primary_key_part(self) -> t.Optional[exp.Expression]: 994 return self._parse_ordered() 995 996 class Generator(generator.Generator): 997 LIMIT_IS_TOP = True 998 QUERY_HINTS = False 999 RETURNING_END = False 1000 NVL2_SUPPORTED = False 1001 ALTER_TABLE_INCLUDE_COLUMN_KEYWORD = False 1002 LIMIT_FETCH = "FETCH" 1003 COMPUTED_COLUMN_WITH_TYPE = False 1004 CTE_RECURSIVE_KEYWORD_REQUIRED = False 1005 ENSURE_BOOLS = True 1006 NULL_ORDERING_SUPPORTED = None 1007 SUPPORTS_SINGLE_ARG_CONCAT = False 1008 TABLESAMPLE_SEED_KEYWORD = "REPEATABLE" 1009 SUPPORTS_SELECT_INTO = True 1010 JSON_PATH_BRACKETED_KEY_SUPPORTED = False 1011 SUPPORTS_TO_NUMBER = False 1012 SET_OP_MODIFIERS = False 1013 COPY_PARAMS_EQ_REQUIRED = True 1014 PARSE_JSON_NAME = None 1015 EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE = False 1016 ALTER_SET_WRAPPED = True 1017 ALTER_SET_TYPE = "" 1018 1019 EXPRESSIONS_WITHOUT_NESTED_CTES = { 1020 exp.Create, 1021 exp.Delete, 1022 exp.Insert, 1023 exp.Intersect, 1024 exp.Except, 1025 exp.Merge, 1026 exp.Select, 1027 exp.Subquery, 1028 exp.Union, 1029 exp.Update, 1030 } 1031 1032 SUPPORTED_JSON_PATH_PARTS = { 1033 exp.JSONPathKey, 1034 exp.JSONPathRoot, 1035 exp.JSONPathSubscript, 1036 } 1037 1038 TYPE_MAPPING = { 1039 **generator.Generator.TYPE_MAPPING, 1040 exp.DataType.Type.BOOLEAN: "BIT", 1041 exp.DataType.Type.DATETIME2: "DATETIME2", 1042 exp.DataType.Type.DECIMAL: "NUMERIC", 1043 exp.DataType.Type.DOUBLE: "FLOAT", 1044 exp.DataType.Type.INT: "INTEGER", 1045 exp.DataType.Type.ROWVERSION: "ROWVERSION", 1046 exp.DataType.Type.TEXT: "VARCHAR(MAX)", 1047 exp.DataType.Type.TIMESTAMP: "DATETIME2", 1048 exp.DataType.Type.TIMESTAMPNTZ: "DATETIME2", 1049 exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET", 1050 exp.DataType.Type.SMALLDATETIME: "SMALLDATETIME", 1051 exp.DataType.Type.UTINYINT: "TINYINT", 1052 exp.DataType.Type.VARIANT: "SQL_VARIANT", 1053 exp.DataType.Type.UUID: "UNIQUEIDENTIFIER", 1054 } 1055 1056 TYPE_MAPPING.pop(exp.DataType.Type.NCHAR) 1057 TYPE_MAPPING.pop(exp.DataType.Type.NVARCHAR) 1058 1059 TRANSFORMS = { 1060 **generator.Generator.TRANSFORMS, 1061 exp.AnyValue: any_value_to_max_sql, 1062 exp.Atan2: rename_func("ATN2"), 1063 exp.ArrayToString: rename_func("STRING_AGG"), 1064 exp.AutoIncrementColumnConstraint: lambda *_: "IDENTITY", 1065 exp.Ceil: rename_func("CEILING"), 1066 exp.Chr: rename_func("CHAR"), 1067 exp.DateAdd: date_delta_sql("DATEADD"), 1068 exp.CTE: transforms.preprocess([qualify_derived_table_outputs]), 1069 exp.CurrentDate: rename_func("GETDATE"), 1070 exp.CurrentTimestamp: rename_func("GETDATE"), 1071 exp.CurrentTimestampLTZ: rename_func("SYSDATETIMEOFFSET"), 1072 exp.DateStrToDate: datestrtodate_sql, 1073 exp.GeneratedAsIdentityColumnConstraint: generatedasidentitycolumnconstraint_sql, 1074 exp.GroupConcat: _string_agg_sql, 1075 exp.If: rename_func("IIF"), 1076 exp.JSONExtract: _json_extract_sql, 1077 exp.JSONExtractScalar: _json_extract_sql, 1078 exp.LastDay: lambda self, e: self.func("EOMONTH", e.this), 1079 exp.Ln: rename_func("LOG"), 1080 exp.Max: max_or_greatest, 1081 exp.MD5: lambda self, e: self.func("HASHBYTES", exp.Literal.string("MD5"), e.this), 1082 exp.Min: min_or_least, 1083 exp.NumberToStr: _format_sql, 1084 exp.Repeat: rename_func("REPLICATE"), 1085 exp.CurrentSchema: rename_func("SCHEMA_NAME"), 1086 exp.Select: transforms.preprocess( 1087 [ 1088 transforms.eliminate_distinct_on, 1089 transforms.eliminate_semi_and_anti_joins, 1090 transforms.eliminate_qualify, 1091 transforms.unnest_generate_date_array_using_recursive_cte, 1092 ] 1093 ), 1094 exp.Stddev: rename_func("STDEV"), 1095 exp.StrPosition: lambda self, e: strposition_sql( 1096 self, e, func_name="CHARINDEX", supports_position=True 1097 ), 1098 exp.Subquery: transforms.preprocess([qualify_derived_table_outputs]), 1099 exp.SHA: lambda self, e: self.func("HASHBYTES", exp.Literal.string("SHA1"), e.this), 1100 exp.SHA1Digest: lambda self, e: self.func( 1101 "HASHBYTES", exp.Literal.string("SHA1"), e.this 1102 ), 1103 exp.SHA2: lambda self, e: self.func( 1104 "HASHBYTES", exp.Literal.string(f"SHA2_{e.args.get('length', 256)}"), e.this 1105 ), 1106 exp.TemporaryProperty: lambda self, e: "", 1107 exp.TimeStrToTime: _timestrtotime_sql, 1108 exp.TimeToStr: _format_sql, 1109 exp.Trim: trim_sql, 1110 exp.TsOrDsAdd: date_delta_sql("DATEADD", cast=True), 1111 exp.TsOrDsDiff: date_delta_sql("DATEDIFF"), 1112 exp.TimestampTrunc: lambda self, e: self.func("DATETRUNC", e.unit, e.this), 1113 exp.Uuid: lambda *_: "NEWID()", 1114 exp.DateFromParts: rename_func("DATEFROMPARTS"), 1115 } 1116 1117 TRANSFORMS.pop(exp.ReturnsProperty) 1118 1119 PROPERTIES_LOCATION = { 1120 **generator.Generator.PROPERTIES_LOCATION, 1121 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 1122 } 1123 1124 def scope_resolution(self, rhs: str, scope_name: str) -> str: 1125 return f"{scope_name}::{rhs}" 1126 1127 def select_sql(self, expression: exp.Select) -> str: 1128 limit = expression.args.get("limit") 1129 offset = expression.args.get("offset") 1130 1131 if isinstance(limit, exp.Fetch) and not offset: 1132 # Dialects like Oracle can FETCH directly from a row set but 1133 # T-SQL requires an ORDER BY + OFFSET clause in order to FETCH 1134 offset = exp.Offset(expression=exp.Literal.number(0)) 1135 expression.set("offset", offset) 1136 1137 if offset: 1138 if not expression.args.get("order"): 1139 # ORDER BY is required in order to use OFFSET in a query, so we use 1140 # a noop order by, since we don't really care about the order. 1141 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 1142 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 1143 1144 if isinstance(limit, exp.Limit): 1145 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 1146 # we replace here because otherwise TOP would be generated in select_sql 1147 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 1148 1149 return super().select_sql(expression) 1150 1151 def convert_sql(self, expression: exp.Convert) -> str: 1152 name = "TRY_CONVERT" if expression.args.get("safe") else "CONVERT" 1153 return self.func( 1154 name, expression.this, expression.expression, expression.args.get("style") 1155 ) 1156 1157 def queryoption_sql(self, expression: exp.QueryOption) -> str: 1158 option = self.sql(expression, "this") 1159 value = self.sql(expression, "expression") 1160 if value: 1161 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 1162 return f"{option} {optional_equal_sign}{value}" 1163 return option 1164 1165 def lateral_op(self, expression: exp.Lateral) -> str: 1166 cross_apply = expression.args.get("cross_apply") 1167 if cross_apply is True: 1168 return "CROSS APPLY" 1169 if cross_apply is False: 1170 return "OUTER APPLY" 1171 1172 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 1173 self.unsupported("LATERAL clause is not supported.") 1174 return "LATERAL" 1175 1176 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 1177 this = expression.this 1178 split_count = len(this.name.split(".")) 1179 delimiter = expression.args.get("delimiter") 1180 part_index = expression.args.get("part_index") 1181 1182 if ( 1183 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 1184 or (delimiter and delimiter.name != ".") 1185 or not part_index 1186 or split_count > 4 1187 ): 1188 self.unsupported( 1189 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 1190 ) 1191 return "" 1192 1193 return self.func( 1194 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 1195 ) 1196 1197 def extract_sql(self, expression: exp.Extract) -> str: 1198 part = expression.this 1199 name = DATE_PART_UNMAPPING.get(part.name.upper()) or part 1200 1201 return self.func("DATEPART", name, expression.expression) 1202 1203 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1204 nano = expression.args.get("nano") 1205 if nano is not None: 1206 nano.pop() 1207 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1208 1209 if expression.args.get("fractions") is None: 1210 expression.set("fractions", exp.Literal.number(0)) 1211 if expression.args.get("precision") is None: 1212 expression.set("precision", exp.Literal.number(0)) 1213 1214 return rename_func("TIMEFROMPARTS")(self, expression) 1215 1216 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1217 zone = expression.args.get("zone") 1218 if zone is not None: 1219 zone.pop() 1220 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1221 1222 nano = expression.args.get("nano") 1223 if nano is not None: 1224 nano.pop() 1225 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1226 1227 if expression.args.get("milli") is None: 1228 expression.set("milli", exp.Literal.number(0)) 1229 1230 return rename_func("DATETIMEFROMPARTS")(self, expression) 1231 1232 def setitem_sql(self, expression: exp.SetItem) -> str: 1233 this = expression.this 1234 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1235 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1236 return f"{self.sql(this.left)} {self.sql(this.right)}" 1237 1238 return super().setitem_sql(expression) 1239 1240 def boolean_sql(self, expression: exp.Boolean) -> str: 1241 if type(expression.parent) in BIT_TYPES or isinstance( 1242 expression.find_ancestor(exp.Values, exp.Select), exp.Values 1243 ): 1244 return "1" if expression.this else "0" 1245 1246 return "(1 = 1)" if expression.this else "(1 = 0)" 1247 1248 def is_sql(self, expression: exp.Is) -> str: 1249 if isinstance(expression.expression, exp.Boolean): 1250 return self.binary(expression, "=") 1251 return self.binary(expression, "IS") 1252 1253 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1254 sql = self.sql(expression, "this") 1255 properties = expression.args.get("properties") 1256 1257 if sql[:1] != "#" and any( 1258 isinstance(prop, exp.TemporaryProperty) 1259 for prop in (properties.expressions if properties else []) 1260 ): 1261 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1262 1263 return sql 1264 1265 def create_sql(self, expression: exp.Create) -> str: 1266 kind = expression.kind 1267 exists = expression.args.get("exists") 1268 expression.set("exists", None) 1269 1270 like_property = expression.find(exp.LikeProperty) 1271 if like_property: 1272 ctas_expression = like_property.this 1273 else: 1274 ctas_expression = expression.expression 1275 1276 if kind == "VIEW": 1277 expression.this.set("catalog", None) 1278 with_ = expression.args.get("with_") 1279 if ctas_expression and with_: 1280 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1281 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1282 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1283 ctas_expression.set("with_", with_.pop()) 1284 1285 table = expression.find(exp.Table) 1286 1287 # Convert CTAS statement to SELECT .. INTO .. 1288 if kind == "TABLE" and ctas_expression: 1289 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1290 ctas_expression = ctas_expression.subquery() 1291 1292 properties = expression.args.get("properties") or exp.Properties() 1293 is_temp = any(isinstance(p, exp.TemporaryProperty) for p in properties.expressions) 1294 1295 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1296 select_into.set("into", exp.Into(this=table, temporary=is_temp)) 1297 1298 if like_property: 1299 select_into.limit(0, copy=False) 1300 1301 sql = self.sql(select_into) 1302 else: 1303 sql = super().create_sql(expression) 1304 1305 if exists: 1306 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1307 sql_with_ctes = self.prepend_ctes(expression, sql) 1308 sql_literal = self.sql(exp.Literal.string(sql_with_ctes)) 1309 if kind == "SCHEMA": 1310 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = {identifier}) EXEC({sql_literal})""" 1311 elif kind == "TABLE": 1312 assert table 1313 where = exp.and_( 1314 exp.column("TABLE_NAME").eq(table.name), 1315 exp.column("TABLE_SCHEMA").eq(table.db) if table.db else None, 1316 exp.column("TABLE_CATALOG").eq(table.catalog) if table.catalog else None, 1317 ) 1318 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE {where}) EXEC({sql_literal})""" 1319 elif kind == "INDEX": 1320 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1321 return f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql_literal})""" 1322 elif expression.args.get("replace"): 1323 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1324 1325 return self.prepend_ctes(expression, sql) 1326 1327 @generator.unsupported_args("unlogged", "expressions") 1328 def into_sql(self, expression: exp.Into) -> str: 1329 if expression.args.get("temporary"): 1330 # If the Into expression has a temporary property, push this down to the Identifier 1331 table = expression.find(exp.Table) 1332 if table and isinstance(table.this, exp.Identifier): 1333 table.this.set("temporary", True) 1334 1335 return f"{self.seg('INTO')} {self.sql(expression, 'this')}" 1336 1337 def count_sql(self, expression: exp.Count) -> str: 1338 func_name = "COUNT_BIG" if expression.args.get("big_int") else "COUNT" 1339 return rename_func(func_name)(self, expression) 1340 1341 def datediff_sql(self, expression: exp.DateDiff) -> str: 1342 func_name = "DATEDIFF_BIG" if expression.args.get("big_int") else "DATEDIFF" 1343 return date_delta_sql(func_name)(self, expression) 1344 1345 def offset_sql(self, expression: exp.Offset) -> str: 1346 return f"{super().offset_sql(expression)} ROWS" 1347 1348 def version_sql(self, expression: exp.Version) -> str: 1349 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1350 this = f"FOR {name}" 1351 expr = expression.expression 1352 kind = expression.text("kind") 1353 if kind in ("FROM", "BETWEEN"): 1354 args = expr.expressions 1355 sep = "TO" if kind == "FROM" else "AND" 1356 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1357 else: 1358 expr_sql = self.sql(expr) 1359 1360 expr_sql = f" {expr_sql}" if expr_sql else "" 1361 return f"{this} {kind}{expr_sql}" 1362 1363 def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str: 1364 table = expression.args.get("table") 1365 table = f"{table} " if table else "" 1366 return f"RETURNS {table}{self.sql(expression, 'this')}" 1367 1368 def returning_sql(self, expression: exp.Returning) -> str: 1369 into = self.sql(expression, "into") 1370 into = self.seg(f"INTO {into}") if into else "" 1371 return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}" 1372 1373 def transaction_sql(self, expression: exp.Transaction) -> str: 1374 this = self.sql(expression, "this") 1375 this = f" {this}" if this else "" 1376 mark = self.sql(expression, "mark") 1377 mark = f" WITH MARK {mark}" if mark else "" 1378 return f"BEGIN TRANSACTION{this}{mark}" 1379 1380 def commit_sql(self, expression: exp.Commit) -> str: 1381 this = self.sql(expression, "this") 1382 this = f" {this}" if this else "" 1383 durability = expression.args.get("durability") 1384 durability = ( 1385 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1386 if durability is not None 1387 else "" 1388 ) 1389 return f"COMMIT TRANSACTION{this}{durability}" 1390 1391 def rollback_sql(self, expression: exp.Rollback) -> str: 1392 this = self.sql(expression, "this") 1393 this = f" {this}" if this else "" 1394 return f"ROLLBACK TRANSACTION{this}" 1395 1396 def identifier_sql(self, expression: exp.Identifier) -> str: 1397 identifier = super().identifier_sql(expression) 1398 1399 if expression.args.get("global_"): 1400 identifier = f"##{identifier}" 1401 elif expression.args.get("temporary"): 1402 identifier = f"#{identifier}" 1403 1404 return identifier 1405 1406 def constraint_sql(self, expression: exp.Constraint) -> str: 1407 this = self.sql(expression, "this") 1408 expressions = self.expressions(expression, flat=True, sep=" ") 1409 return f"CONSTRAINT {this} {expressions}" 1410 1411 def length_sql(self, expression: exp.Length) -> str: 1412 return self._uncast_text(expression, "LEN") 1413 1414 def right_sql(self, expression: exp.Right) -> str: 1415 return self._uncast_text(expression, "RIGHT") 1416 1417 def left_sql(self, expression: exp.Left) -> str: 1418 return self._uncast_text(expression, "LEFT") 1419 1420 def _uncast_text(self, expression: exp.Expression, name: str) -> str: 1421 this = expression.this 1422 if isinstance(this, exp.Cast) and this.is_type(exp.DataType.Type.TEXT): 1423 this_sql = self.sql(this, "this") 1424 else: 1425 this_sql = self.sql(this) 1426 expression_sql = self.sql(expression, "expression") 1427 return self.func(name, this_sql, expression_sql if expression_sql else None) 1428 1429 def partition_sql(self, expression: exp.Partition) -> str: 1430 return f"WITH (PARTITIONS({self.expressions(expression, flat=True)}))" 1431 1432 def alter_sql(self, expression: exp.Alter) -> str: 1433 action = seq_get(expression.args.get("actions") or [], 0) 1434 if isinstance(action, exp.AlterRename): 1435 return f"EXEC sp_rename '{self.sql(expression.this)}', '{action.this.name}'" 1436 return super().alter_sql(expression) 1437 1438 def drop_sql(self, expression: exp.Drop) -> str: 1439 if expression.args["kind"] == "VIEW": 1440 expression.this.set("catalog", None) 1441 return super().drop_sql(expression) 1442 1443 def options_modifier(self, expression: exp.Expression) -> str: 1444 options = self.expressions(expression, key="options") 1445 return f" OPTION{self.wrap(options)}" if options else "" 1446 1447 def dpipe_sql(self, expression: exp.DPipe) -> str: 1448 return self.sql( 1449 reduce(lambda x, y: exp.Add(this=x, expression=y), expression.flatten()) 1450 ) 1451 1452 def isascii_sql(self, expression: exp.IsAscii) -> str: 1453 return f"(PATINDEX(CONVERT(VARCHAR(MAX), 0x255b5e002d7f5d25) COLLATE Latin1_General_BIN, {self.sql(expression.this)}) = 0)" 1454 1455 def columndef_sql(self, expression: exp.ColumnDef, sep: str = " ") -> str: 1456 this = super().columndef_sql(expression, sep) 1457 default = self.sql(expression, "default") 1458 default = f" = {default}" if default else "" 1459 output = self.sql(expression, "output") 1460 output = f" {output}" if output else "" 1461 return f"{this}{default}{output}" 1462 1463 def coalesce_sql(self, expression: exp.Coalesce) -> str: 1464 func_name = "ISNULL" if expression.args.get("is_null") else "COALESCE" 1465 return rename_func(func_name)(self, expression)
276def qualify_derived_table_outputs(expression: exp.Expression) -> exp.Expression: 277 """Ensures all (unnamed) output columns are aliased for CTEs and Subqueries.""" 278 alias = expression.args.get("alias") 279 280 if ( 281 isinstance(expression, (exp.CTE, exp.Subquery)) 282 and isinstance(alias, exp.TableAlias) 283 and not alias.columns 284 ): 285 from sqlglot.optimizer.qualify_columns import qualify_outputs 286 287 # We keep track of the unaliased column projection indexes instead of the expressions 288 # themselves, because the latter are going to be replaced by new nodes when the aliases 289 # are added and hence we won't be able to reach these newly added Alias parents 290 query = expression.this 291 unaliased_column_indexes = ( 292 i for i, c in enumerate(query.selects) if isinstance(c, exp.Column) and not c.alias 293 ) 294 295 qualify_outputs(query) 296 297 # Preserve the quoting information of columns for newly added Alias nodes 298 query_selects = query.selects 299 for select_index in unaliased_column_indexes: 300 alias = query_selects[select_index] 301 column = alias.this 302 if isinstance(column.this, exp.Identifier): 303 alias.args["alias"].set("quoted", column.this.quoted) 304 305 return expression
Ensures all (unnamed) output columns are aliased for CTEs and Subqueries.
408class TSQL(Dialect): 409 SUPPORTS_SEMI_ANTI_JOIN = False 410 LOG_BASE_FIRST = False 411 TYPED_DIVISION = True 412 CONCAT_COALESCE = True 413 NORMALIZATION_STRATEGY = NormalizationStrategy.CASE_INSENSITIVE 414 ALTER_TABLE_ADD_REQUIRED_FOR_EACH_COLUMN = False 415 416 TIME_FORMAT = "'yyyy-mm-dd hh:mm:ss'" 417 418 EXPRESSION_METADATA = EXPRESSION_METADATA.copy() 419 420 DATE_PART_MAPPING = { 421 **Dialect.DATE_PART_MAPPING, 422 "QQ": "QUARTER", 423 "M": "MONTH", 424 "Y": "DAYOFYEAR", 425 "WW": "WEEK", 426 "N": "MINUTE", 427 "SS": "SECOND", 428 "MCS": "MICROSECOND", 429 "TZOFFSET": "TIMEZONE_MINUTE", 430 "TZ": "TIMEZONE_MINUTE", 431 "ISO_WEEK": "WEEKISO", 432 "ISOWK": "WEEKISO", 433 "ISOWW": "WEEKISO", 434 } 435 436 TIME_MAPPING = { 437 "year": "%Y", 438 "dayofyear": "%j", 439 "day": "%d", 440 "dy": "%d", 441 "y": "%Y", 442 "week": "%W", 443 "ww": "%W", 444 "wk": "%W", 445 "isowk": "%V", 446 "isoww": "%V", 447 "iso_week": "%V", 448 "hour": "%h", 449 "hh": "%I", 450 "minute": "%M", 451 "mi": "%M", 452 "n": "%M", 453 "second": "%S", 454 "ss": "%S", 455 "s": "%-S", 456 "millisecond": "%f", 457 "ms": "%f", 458 "weekday": "%w", 459 "dw": "%w", 460 "month": "%m", 461 "mm": "%M", 462 "m": "%-M", 463 "Y": "%Y", 464 "YYYY": "%Y", 465 "YY": "%y", 466 "MMMM": "%B", 467 "MMM": "%b", 468 "MM": "%m", 469 "M": "%-m", 470 "dddd": "%A", 471 "dd": "%d", 472 "d": "%-d", 473 "HH": "%H", 474 "H": "%-H", 475 "h": "%-I", 476 "ffffff": "%f", 477 "yyyy": "%Y", 478 "yy": "%y", 479 } 480 481 CONVERT_FORMAT_MAPPING = { 482 "0": "%b %d %Y %-I:%M%p", 483 "1": "%m/%d/%y", 484 "2": "%y.%m.%d", 485 "3": "%d/%m/%y", 486 "4": "%d.%m.%y", 487 "5": "%d-%m-%y", 488 "6": "%d %b %y", 489 "7": "%b %d, %y", 490 "8": "%H:%M:%S", 491 "9": "%b %d %Y %-I:%M:%S:%f%p", 492 "10": "mm-dd-yy", 493 "11": "yy/mm/dd", 494 "12": "yymmdd", 495 "13": "%d %b %Y %H:%M:ss:%f", 496 "14": "%H:%M:%S:%f", 497 "20": "%Y-%m-%d %H:%M:%S", 498 "21": "%Y-%m-%d %H:%M:%S.%f", 499 "22": "%m/%d/%y %-I:%M:%S %p", 500 "23": "%Y-%m-%d", 501 "24": "%H:%M:%S", 502 "25": "%Y-%m-%d %H:%M:%S.%f", 503 "100": "%b %d %Y %-I:%M%p", 504 "101": "%m/%d/%Y", 505 "102": "%Y.%m.%d", 506 "103": "%d/%m/%Y", 507 "104": "%d.%m.%Y", 508 "105": "%d-%m-%Y", 509 "106": "%d %b %Y", 510 "107": "%b %d, %Y", 511 "108": "%H:%M:%S", 512 "109": "%b %d %Y %-I:%M:%S:%f%p", 513 "110": "%m-%d-%Y", 514 "111": "%Y/%m/%d", 515 "112": "%Y%m%d", 516 "113": "%d %b %Y %H:%M:%S:%f", 517 "114": "%H:%M:%S:%f", 518 "120": "%Y-%m-%d %H:%M:%S", 519 "121": "%Y-%m-%d %H:%M:%S.%f", 520 "126": "%Y-%m-%dT%H:%M:%S.%f", 521 } 522 523 FORMAT_TIME_MAPPING = { 524 "y": "%B %Y", 525 "d": "%m/%d/%Y", 526 "H": "%-H", 527 "h": "%-I", 528 "s": "%Y-%m-%d %H:%M:%S", 529 "D": "%A,%B,%Y", 530 "f": "%A,%B,%Y %-I:%M %p", 531 "F": "%A,%B,%Y %-I:%M:%S %p", 532 "g": "%m/%d/%Y %-I:%M %p", 533 "G": "%m/%d/%Y %-I:%M:%S %p", 534 "M": "%B %-d", 535 "m": "%B %-d", 536 "O": "%Y-%m-%dT%H:%M:%S", 537 "u": "%Y-%M-%D %H:%M:%S%z", 538 "U": "%A, %B %D, %Y %H:%M:%S%z", 539 "T": "%-I:%M:%S %p", 540 "t": "%-I:%M", 541 "Y": "%a %Y", 542 } 543 544 class Tokenizer(tokens.Tokenizer): 545 IDENTIFIERS = [("[", "]"), '"'] 546 QUOTES = ["'", '"'] 547 HEX_STRINGS = [("0x", ""), ("0X", "")] 548 VAR_SINGLE_TOKENS = {"@", "$", "#"} 549 550 KEYWORDS = { 551 **tokens.Tokenizer.KEYWORDS, 552 "CLUSTERED INDEX": TokenType.INDEX, 553 "DATETIME2": TokenType.DATETIME2, 554 "DATETIMEOFFSET": TokenType.TIMESTAMPTZ, 555 "DECLARE": TokenType.DECLARE, 556 "EXEC": TokenType.COMMAND, 557 "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT, 558 "GO": TokenType.COMMAND, 559 "IMAGE": TokenType.IMAGE, 560 "MONEY": TokenType.MONEY, 561 "NONCLUSTERED INDEX": TokenType.INDEX, 562 "NTEXT": TokenType.TEXT, 563 "OPTION": TokenType.OPTION, 564 "OUTPUT": TokenType.RETURNING, 565 "PRINT": TokenType.COMMAND, 566 "PROC": TokenType.PROCEDURE, 567 "REAL": TokenType.FLOAT, 568 "ROWVERSION": TokenType.ROWVERSION, 569 "SMALLDATETIME": TokenType.SMALLDATETIME, 570 "SMALLMONEY": TokenType.SMALLMONEY, 571 "SQL_VARIANT": TokenType.VARIANT, 572 "SYSTEM_USER": TokenType.CURRENT_USER, 573 "TOP": TokenType.TOP, 574 "TIMESTAMP": TokenType.ROWVERSION, 575 "TINYINT": TokenType.UTINYINT, 576 "UNIQUEIDENTIFIER": TokenType.UUID, 577 "UPDATE STATISTICS": TokenType.COMMAND, 578 "XML": TokenType.XML, 579 } 580 KEYWORDS.pop("/*+") 581 582 COMMANDS = {*tokens.Tokenizer.COMMANDS, TokenType.END} 583 584 class Parser(parser.Parser): 585 SET_REQUIRES_ASSIGNMENT_DELIMITER = False 586 LOG_DEFAULTS_TO_LN = True 587 STRING_ALIASES = True 588 NO_PAREN_IF_COMMANDS = False 589 590 QUERY_MODIFIER_PARSERS = { 591 **parser.Parser.QUERY_MODIFIER_PARSERS, 592 TokenType.OPTION: lambda self: ("options", self._parse_options()), 593 TokenType.FOR: lambda self: ("for_", self._parse_for()), 594 } 595 596 # T-SQL does not allow BEGIN to be used as an identifier 597 ID_VAR_TOKENS = parser.Parser.ID_VAR_TOKENS - {TokenType.BEGIN} 598 ALIAS_TOKENS = parser.Parser.ALIAS_TOKENS - {TokenType.BEGIN} 599 TABLE_ALIAS_TOKENS = parser.Parser.TABLE_ALIAS_TOKENS - {TokenType.BEGIN} 600 COMMENT_TABLE_ALIAS_TOKENS = parser.Parser.COMMENT_TABLE_ALIAS_TOKENS - {TokenType.BEGIN} 601 UPDATE_ALIAS_TOKENS = parser.Parser.UPDATE_ALIAS_TOKENS - {TokenType.BEGIN} 602 603 FUNCTIONS = { 604 **parser.Parser.FUNCTIONS, 605 "ATN2": exp.Atan2.from_arg_list, 606 "CHARINDEX": lambda args: exp.StrPosition( 607 this=seq_get(args, 1), 608 substr=seq_get(args, 0), 609 position=seq_get(args, 2), 610 ), 611 "COUNT": lambda args: exp.Count( 612 this=seq_get(args, 0), expressions=args[1:], big_int=False 613 ), 614 "COUNT_BIG": lambda args: exp.Count( 615 this=seq_get(args, 0), expressions=args[1:], big_int=True 616 ), 617 "DATEADD": build_date_delta(exp.DateAdd, unit_mapping=DATE_DELTA_INTERVAL), 618 "DATEDIFF": _build_date_delta(exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL), 619 "DATEDIFF_BIG": _build_date_delta( 620 exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL, big_int=True 621 ), 622 "DATENAME": _build_formatted_time(exp.TimeToStr, full_format_mapping=True), 623 "DATETIMEFROMPARTS": _build_datetimefromparts, 624 "EOMONTH": _build_eomonth, 625 "FORMAT": _build_format, 626 "GETDATE": exp.CurrentTimestamp.from_arg_list, 627 "HASHBYTES": _build_hashbytes, 628 "ISNULL": lambda args: build_coalesce(args=args, is_null=True), 629 "JSON_QUERY": _build_json_query, 630 "JSON_VALUE": parser.build_extract_json_with_path(exp.JSONExtractScalar), 631 "LEN": _build_with_arg_as_text(exp.Length), 632 "LEFT": _build_with_arg_as_text(exp.Left), 633 "NEWID": exp.Uuid.from_arg_list, 634 "RIGHT": _build_with_arg_as_text(exp.Right), 635 "PARSENAME": _build_parsename, 636 "REPLICATE": exp.Repeat.from_arg_list, 637 "SCHEMA_NAME": exp.CurrentSchema.from_arg_list, 638 "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)), 639 "SYSDATETIME": exp.CurrentTimestamp.from_arg_list, 640 "SUSER_NAME": exp.CurrentUser.from_arg_list, 641 "SUSER_SNAME": exp.CurrentUser.from_arg_list, 642 "SYSDATETIMEOFFSET": exp.CurrentTimestampLTZ.from_arg_list, 643 "SYSTEM_USER": exp.CurrentUser.from_arg_list, 644 "TIMEFROMPARTS": _build_timefromparts, 645 "DATETRUNC": _build_datetrunc, 646 } 647 648 JOIN_HINTS = {"LOOP", "HASH", "MERGE", "REMOTE"} 649 650 PROCEDURE_OPTIONS = dict.fromkeys( 651 ("ENCRYPTION", "RECOMPILE", "SCHEMABINDING", "NATIVE_COMPILATION", "EXECUTE"), tuple() 652 ) 653 654 COLUMN_DEFINITION_MODES = {"OUT", "OUTPUT", "READONLY"} 655 656 RETURNS_TABLE_TOKENS = parser.Parser.ID_VAR_TOKENS - { 657 TokenType.TABLE, 658 *parser.Parser.TYPE_TOKENS, 659 } 660 661 STATEMENT_PARSERS = { 662 **parser.Parser.STATEMENT_PARSERS, 663 TokenType.DECLARE: lambda self: self._parse_declare(), 664 } 665 666 RANGE_PARSERS = { 667 **parser.Parser.RANGE_PARSERS, 668 TokenType.DCOLON: lambda self, this: self.expression( 669 exp.ScopeResolution, 670 this=this, 671 expression=self._parse_function() or self._parse_var(any_token=True), 672 ), 673 } 674 675 NO_PAREN_FUNCTION_PARSERS = { 676 **parser.Parser.NO_PAREN_FUNCTION_PARSERS, 677 "NEXT": lambda self: self._parse_next_value_for(), 678 } 679 680 FUNCTION_PARSERS: t.Dict[str, t.Callable] = { 681 **parser.Parser.FUNCTION_PARSERS, 682 "JSON_ARRAYAGG": lambda self: self.expression( 683 exp.JSONArrayAgg, 684 this=self._parse_bitwise(), 685 order=self._parse_order(), 686 null_handling=self._parse_on_handling("NULL", "NULL", "ABSENT"), 687 ), 688 "DATEPART": lambda self: self._parse_datepart(), 689 } 690 691 # The DCOLON (::) operator serves as a scope resolution (exp.ScopeResolution) operator in T-SQL 692 COLUMN_OPERATORS = { 693 **parser.Parser.COLUMN_OPERATORS, 694 TokenType.DCOLON: lambda self, this, to: self.expression(exp.Cast, this=this, to=to) 695 if isinstance(to, exp.DataType) and to.this != exp.DataType.Type.USERDEFINED 696 else self.expression(exp.ScopeResolution, this=this, expression=to), 697 } 698 699 SET_OP_MODIFIERS = {"offset"} 700 701 ODBC_DATETIME_LITERALS = { 702 "d": exp.Date, 703 "t": exp.Time, 704 "ts": exp.Timestamp, 705 } 706 707 def _parse_datepart(self) -> exp.Extract: 708 this = self._parse_var() 709 expression = self._match(TokenType.COMMA) and self._parse_bitwise() 710 name = map_date_part(this, self.dialect) 711 712 return self.expression(exp.Extract, this=name, expression=expression) 713 714 def _parse_alter_table_set(self) -> exp.AlterSet: 715 return self._parse_wrapped(super()._parse_alter_table_set) 716 717 def _parse_wrapped_select(self, table: bool = False) -> t.Optional[exp.Expression]: 718 if self._match(TokenType.MERGE): 719 comments = self._prev_comments 720 merge = self._parse_merge() 721 merge.add_comments(comments, prepend=True) 722 return merge 723 724 return super()._parse_wrapped_select(table=table) 725 726 def _parse_dcolon(self) -> t.Optional[exp.Expression]: 727 # We want to use _parse_types() if the first token after :: is a known type, 728 # otherwise we could parse something like x::varchar(max) into a function 729 if self._match_set(self.TYPE_TOKENS, advance=False): 730 return self._parse_types() 731 732 return self._parse_function() or self._parse_types() 733 734 def _parse_options(self) -> t.Optional[t.List[exp.Expression]]: 735 if not self._match(TokenType.OPTION): 736 return None 737 738 def _parse_option() -> t.Optional[exp.Expression]: 739 option = self._parse_var_from_options(OPTIONS) 740 if not option: 741 return None 742 743 self._match(TokenType.EQ) 744 return self.expression( 745 exp.QueryOption, this=option, expression=self._parse_primary_or_var() 746 ) 747 748 return self._parse_wrapped_csv(_parse_option) 749 750 def _parse_xml_key_value_option(self) -> exp.XMLKeyValueOption: 751 this = self._parse_primary_or_var() 752 if self._match(TokenType.L_PAREN, advance=False): 753 expression = self._parse_wrapped(self._parse_string) 754 else: 755 expression = None 756 757 return exp.XMLKeyValueOption(this=this, expression=expression) 758 759 def _parse_for(self) -> t.Optional[t.List[exp.Expression]]: 760 if not self._match_pair(TokenType.FOR, TokenType.XML): 761 return None 762 763 def _parse_for_xml() -> t.Optional[exp.Expression]: 764 return self.expression( 765 exp.QueryOption, 766 this=self._parse_var_from_options(XML_OPTIONS, raise_unmatched=False) 767 or self._parse_xml_key_value_option(), 768 ) 769 770 return self._parse_csv(_parse_for_xml) 771 772 def _parse_projections(self) -> t.List[exp.Expression]: 773 """ 774 T-SQL supports the syntax alias = expression in the SELECT's projection list, 775 so we transform all parsed Selects to convert their EQ projections into Aliases. 776 777 See: https://learn.microsoft.com/en-us/sql/t-sql/queries/select-clause-transact-sql?view=sql-server-ver16#syntax 778 """ 779 return [ 780 ( 781 exp.alias_(projection.expression, projection.this.this, copy=False) 782 if isinstance(projection, exp.EQ) and isinstance(projection.this, exp.Column) 783 else projection 784 ) 785 for projection in super()._parse_projections() 786 ] 787 788 def _parse_commit_or_rollback(self) -> exp.Commit | exp.Rollback: 789 """Applies to SQL Server and Azure SQL Database 790 COMMIT [ { TRAN | TRANSACTION } 791 [ transaction_name | @tran_name_variable ] ] 792 [ WITH ( DELAYED_DURABILITY = { OFF | ON } ) ] 793 794 ROLLBACK { TRAN | TRANSACTION } 795 [ transaction_name | @tran_name_variable 796 | savepoint_name | @savepoint_variable ] 797 """ 798 rollback = self._prev.token_type == TokenType.ROLLBACK 799 800 self._match_texts(("TRAN", "TRANSACTION")) 801 this = self._parse_id_var() 802 803 if rollback: 804 return self.expression(exp.Rollback, this=this) 805 806 durability = None 807 if self._match_pair(TokenType.WITH, TokenType.L_PAREN): 808 self._match_text_seq("DELAYED_DURABILITY") 809 self._match(TokenType.EQ) 810 811 if self._match_text_seq("OFF"): 812 durability = False 813 else: 814 self._match(TokenType.ON) 815 durability = True 816 817 self._match_r_paren() 818 819 return self.expression(exp.Commit, this=this, durability=durability) 820 821 def _parse_transaction(self) -> exp.Transaction | exp.Command: 822 """Applies to SQL Server and Azure SQL Database 823 BEGIN { TRAN | TRANSACTION } 824 [ { transaction_name | @tran_name_variable } 825 [ WITH MARK [ 'description' ] ] 826 ] 827 """ 828 if self._match_texts(("TRAN", "TRANSACTION")): 829 transaction = self.expression(exp.Transaction, this=self._parse_id_var()) 830 if self._match_text_seq("WITH", "MARK"): 831 transaction.set("mark", self._parse_string()) 832 833 return transaction 834 835 return self._parse_as_command(self._prev) 836 837 def _parse_returns(self) -> exp.ReturnsProperty: 838 table = self._parse_id_var(any_token=False, tokens=self.RETURNS_TABLE_TOKENS) 839 returns = super()._parse_returns() 840 returns.set("table", table) 841 return returns 842 843 def _parse_convert( 844 self, strict: bool, safe: t.Optional[bool] = None 845 ) -> t.Optional[exp.Expression]: 846 this = self._parse_types() 847 self._match(TokenType.COMMA) 848 args = [this, *self._parse_csv(self._parse_assignment)] 849 convert = exp.Convert.from_arg_list(args) 850 convert.set("safe", safe) 851 return convert 852 853 def _parse_column_def( 854 self, this: t.Optional[exp.Expression], computed_column: bool = True 855 ) -> t.Optional[exp.Expression]: 856 this = super()._parse_column_def(this=this, computed_column=computed_column) 857 if not this: 858 return None 859 if self._match(TokenType.EQ): 860 this.set("default", self._parse_disjunction()) 861 if self._match_texts(self.COLUMN_DEFINITION_MODES): 862 this.set("output", self._prev.text) 863 return this 864 865 def _parse_user_defined_function( 866 self, kind: t.Optional[TokenType] = None 867 ) -> t.Optional[exp.Expression]: 868 this = super()._parse_user_defined_function(kind=kind) 869 870 if ( 871 kind == TokenType.FUNCTION 872 or isinstance(this, exp.UserDefinedFunction) 873 or self._match(TokenType.ALIAS, advance=False) 874 ): 875 return this 876 877 if not self._match(TokenType.WITH, advance=False): 878 expressions = self._parse_csv(self._parse_function_parameter) 879 else: 880 expressions = None 881 882 return self.expression(exp.UserDefinedFunction, this=this, expressions=expressions) 883 884 def _parse_into(self) -> t.Optional[exp.Into]: 885 into = super()._parse_into() 886 887 table = isinstance(into, exp.Into) and into.find(exp.Table) 888 if isinstance(table, exp.Table): 889 table_identifier = table.this 890 if table_identifier.args.get("temporary"): 891 # Promote the temporary property from the Identifier to the Into expression 892 t.cast(exp.Into, into).set("temporary", True) 893 894 return into 895 896 def _parse_id_var( 897 self, 898 any_token: bool = True, 899 tokens: t.Optional[t.Collection[TokenType]] = None, 900 ) -> t.Optional[exp.Expression]: 901 is_temporary = self._match(TokenType.HASH) 902 is_global = is_temporary and self._match(TokenType.HASH) 903 904 this = super()._parse_id_var(any_token=any_token, tokens=tokens) 905 if this: 906 if is_global: 907 this.set("global_", True) 908 elif is_temporary: 909 this.set("temporary", True) 910 911 return this 912 913 def _parse_create(self) -> exp.Create | exp.Command: 914 create = super()._parse_create() 915 916 if isinstance(create, exp.Create): 917 table = create.this.this if isinstance(create.this, exp.Schema) else create.this 918 if isinstance(table, exp.Table) and table.this and table.this.args.get("temporary"): 919 if not create.args.get("properties"): 920 create.set("properties", exp.Properties(expressions=[])) 921 922 create.args["properties"].append("expressions", exp.TemporaryProperty()) 923 924 return create 925 926 def _parse_if(self) -> t.Optional[exp.Expression]: 927 index = self._index 928 929 if self._match_text_seq("OBJECT_ID"): 930 self._parse_wrapped_csv(self._parse_string) 931 if self._match_text_seq("IS", "NOT", "NULL") and self._match(TokenType.DROP): 932 return self._parse_drop(exists=True) 933 self._retreat(index) 934 935 return super()._parse_if() 936 937 def _parse_unique(self) -> exp.UniqueColumnConstraint: 938 if self._match_texts(("CLUSTERED", "NONCLUSTERED")): 939 this = self.CONSTRAINT_PARSERS[self._prev.text.upper()](self) 940 else: 941 this = self._parse_schema(self._parse_id_var(any_token=False)) 942 943 return self.expression(exp.UniqueColumnConstraint, this=this) 944 945 def _parse_update(self) -> exp.Update: 946 expression = super()._parse_update() 947 expression.set("options", self._parse_options()) 948 return expression 949 950 def _parse_partition(self) -> t.Optional[exp.Partition]: 951 if not self._match_text_seq("WITH", "(", "PARTITIONS"): 952 return None 953 954 def parse_range(): 955 low = self._parse_bitwise() 956 high = self._parse_bitwise() if self._match_text_seq("TO") else None 957 958 return ( 959 self.expression(exp.PartitionRange, this=low, expression=high) if high else low 960 ) 961 962 partition = self.expression( 963 exp.Partition, expressions=self._parse_wrapped_csv(parse_range) 964 ) 965 966 self._match_r_paren() 967 968 return partition 969 970 def _parse_declareitem(self) -> t.Optional[exp.DeclareItem]: 971 var = self._parse_id_var() 972 if not var: 973 return None 974 975 self._match(TokenType.ALIAS) 976 return self.expression( 977 exp.DeclareItem, 978 this=var, 979 kind=self._parse_schema() if self._match(TokenType.TABLE) else self._parse_types(), 980 default=self._match(TokenType.EQ) and self._parse_bitwise(), 981 ) 982 983 def _parse_alter_table_alter(self) -> t.Optional[exp.Expression]: 984 expression = super()._parse_alter_table_alter() 985 986 if expression is not None: 987 collation = expression.args.get("collate") 988 if isinstance(collation, exp.Column) and isinstance(collation.this, exp.Identifier): 989 identifier = collation.this 990 collation.set("this", exp.Var(this=identifier.name)) 991 992 return expression 993 994 def _parse_primary_key_part(self) -> t.Optional[exp.Expression]: 995 return self._parse_ordered() 996 997 class Generator(generator.Generator): 998 LIMIT_IS_TOP = True 999 QUERY_HINTS = False 1000 RETURNING_END = False 1001 NVL2_SUPPORTED = False 1002 ALTER_TABLE_INCLUDE_COLUMN_KEYWORD = False 1003 LIMIT_FETCH = "FETCH" 1004 COMPUTED_COLUMN_WITH_TYPE = False 1005 CTE_RECURSIVE_KEYWORD_REQUIRED = False 1006 ENSURE_BOOLS = True 1007 NULL_ORDERING_SUPPORTED = None 1008 SUPPORTS_SINGLE_ARG_CONCAT = False 1009 TABLESAMPLE_SEED_KEYWORD = "REPEATABLE" 1010 SUPPORTS_SELECT_INTO = True 1011 JSON_PATH_BRACKETED_KEY_SUPPORTED = False 1012 SUPPORTS_TO_NUMBER = False 1013 SET_OP_MODIFIERS = False 1014 COPY_PARAMS_EQ_REQUIRED = True 1015 PARSE_JSON_NAME = None 1016 EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE = False 1017 ALTER_SET_WRAPPED = True 1018 ALTER_SET_TYPE = "" 1019 1020 EXPRESSIONS_WITHOUT_NESTED_CTES = { 1021 exp.Create, 1022 exp.Delete, 1023 exp.Insert, 1024 exp.Intersect, 1025 exp.Except, 1026 exp.Merge, 1027 exp.Select, 1028 exp.Subquery, 1029 exp.Union, 1030 exp.Update, 1031 } 1032 1033 SUPPORTED_JSON_PATH_PARTS = { 1034 exp.JSONPathKey, 1035 exp.JSONPathRoot, 1036 exp.JSONPathSubscript, 1037 } 1038 1039 TYPE_MAPPING = { 1040 **generator.Generator.TYPE_MAPPING, 1041 exp.DataType.Type.BOOLEAN: "BIT", 1042 exp.DataType.Type.DATETIME2: "DATETIME2", 1043 exp.DataType.Type.DECIMAL: "NUMERIC", 1044 exp.DataType.Type.DOUBLE: "FLOAT", 1045 exp.DataType.Type.INT: "INTEGER", 1046 exp.DataType.Type.ROWVERSION: "ROWVERSION", 1047 exp.DataType.Type.TEXT: "VARCHAR(MAX)", 1048 exp.DataType.Type.TIMESTAMP: "DATETIME2", 1049 exp.DataType.Type.TIMESTAMPNTZ: "DATETIME2", 1050 exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET", 1051 exp.DataType.Type.SMALLDATETIME: "SMALLDATETIME", 1052 exp.DataType.Type.UTINYINT: "TINYINT", 1053 exp.DataType.Type.VARIANT: "SQL_VARIANT", 1054 exp.DataType.Type.UUID: "UNIQUEIDENTIFIER", 1055 } 1056 1057 TYPE_MAPPING.pop(exp.DataType.Type.NCHAR) 1058 TYPE_MAPPING.pop(exp.DataType.Type.NVARCHAR) 1059 1060 TRANSFORMS = { 1061 **generator.Generator.TRANSFORMS, 1062 exp.AnyValue: any_value_to_max_sql, 1063 exp.Atan2: rename_func("ATN2"), 1064 exp.ArrayToString: rename_func("STRING_AGG"), 1065 exp.AutoIncrementColumnConstraint: lambda *_: "IDENTITY", 1066 exp.Ceil: rename_func("CEILING"), 1067 exp.Chr: rename_func("CHAR"), 1068 exp.DateAdd: date_delta_sql("DATEADD"), 1069 exp.CTE: transforms.preprocess([qualify_derived_table_outputs]), 1070 exp.CurrentDate: rename_func("GETDATE"), 1071 exp.CurrentTimestamp: rename_func("GETDATE"), 1072 exp.CurrentTimestampLTZ: rename_func("SYSDATETIMEOFFSET"), 1073 exp.DateStrToDate: datestrtodate_sql, 1074 exp.GeneratedAsIdentityColumnConstraint: generatedasidentitycolumnconstraint_sql, 1075 exp.GroupConcat: _string_agg_sql, 1076 exp.If: rename_func("IIF"), 1077 exp.JSONExtract: _json_extract_sql, 1078 exp.JSONExtractScalar: _json_extract_sql, 1079 exp.LastDay: lambda self, e: self.func("EOMONTH", e.this), 1080 exp.Ln: rename_func("LOG"), 1081 exp.Max: max_or_greatest, 1082 exp.MD5: lambda self, e: self.func("HASHBYTES", exp.Literal.string("MD5"), e.this), 1083 exp.Min: min_or_least, 1084 exp.NumberToStr: _format_sql, 1085 exp.Repeat: rename_func("REPLICATE"), 1086 exp.CurrentSchema: rename_func("SCHEMA_NAME"), 1087 exp.Select: transforms.preprocess( 1088 [ 1089 transforms.eliminate_distinct_on, 1090 transforms.eliminate_semi_and_anti_joins, 1091 transforms.eliminate_qualify, 1092 transforms.unnest_generate_date_array_using_recursive_cte, 1093 ] 1094 ), 1095 exp.Stddev: rename_func("STDEV"), 1096 exp.StrPosition: lambda self, e: strposition_sql( 1097 self, e, func_name="CHARINDEX", supports_position=True 1098 ), 1099 exp.Subquery: transforms.preprocess([qualify_derived_table_outputs]), 1100 exp.SHA: lambda self, e: self.func("HASHBYTES", exp.Literal.string("SHA1"), e.this), 1101 exp.SHA1Digest: lambda self, e: self.func( 1102 "HASHBYTES", exp.Literal.string("SHA1"), e.this 1103 ), 1104 exp.SHA2: lambda self, e: self.func( 1105 "HASHBYTES", exp.Literal.string(f"SHA2_{e.args.get('length', 256)}"), e.this 1106 ), 1107 exp.TemporaryProperty: lambda self, e: "", 1108 exp.TimeStrToTime: _timestrtotime_sql, 1109 exp.TimeToStr: _format_sql, 1110 exp.Trim: trim_sql, 1111 exp.TsOrDsAdd: date_delta_sql("DATEADD", cast=True), 1112 exp.TsOrDsDiff: date_delta_sql("DATEDIFF"), 1113 exp.TimestampTrunc: lambda self, e: self.func("DATETRUNC", e.unit, e.this), 1114 exp.Uuid: lambda *_: "NEWID()", 1115 exp.DateFromParts: rename_func("DATEFROMPARTS"), 1116 } 1117 1118 TRANSFORMS.pop(exp.ReturnsProperty) 1119 1120 PROPERTIES_LOCATION = { 1121 **generator.Generator.PROPERTIES_LOCATION, 1122 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 1123 } 1124 1125 def scope_resolution(self, rhs: str, scope_name: str) -> str: 1126 return f"{scope_name}::{rhs}" 1127 1128 def select_sql(self, expression: exp.Select) -> str: 1129 limit = expression.args.get("limit") 1130 offset = expression.args.get("offset") 1131 1132 if isinstance(limit, exp.Fetch) and not offset: 1133 # Dialects like Oracle can FETCH directly from a row set but 1134 # T-SQL requires an ORDER BY + OFFSET clause in order to FETCH 1135 offset = exp.Offset(expression=exp.Literal.number(0)) 1136 expression.set("offset", offset) 1137 1138 if offset: 1139 if not expression.args.get("order"): 1140 # ORDER BY is required in order to use OFFSET in a query, so we use 1141 # a noop order by, since we don't really care about the order. 1142 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 1143 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 1144 1145 if isinstance(limit, exp.Limit): 1146 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 1147 # we replace here because otherwise TOP would be generated in select_sql 1148 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 1149 1150 return super().select_sql(expression) 1151 1152 def convert_sql(self, expression: exp.Convert) -> str: 1153 name = "TRY_CONVERT" if expression.args.get("safe") else "CONVERT" 1154 return self.func( 1155 name, expression.this, expression.expression, expression.args.get("style") 1156 ) 1157 1158 def queryoption_sql(self, expression: exp.QueryOption) -> str: 1159 option = self.sql(expression, "this") 1160 value = self.sql(expression, "expression") 1161 if value: 1162 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 1163 return f"{option} {optional_equal_sign}{value}" 1164 return option 1165 1166 def lateral_op(self, expression: exp.Lateral) -> str: 1167 cross_apply = expression.args.get("cross_apply") 1168 if cross_apply is True: 1169 return "CROSS APPLY" 1170 if cross_apply is False: 1171 return "OUTER APPLY" 1172 1173 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 1174 self.unsupported("LATERAL clause is not supported.") 1175 return "LATERAL" 1176 1177 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 1178 this = expression.this 1179 split_count = len(this.name.split(".")) 1180 delimiter = expression.args.get("delimiter") 1181 part_index = expression.args.get("part_index") 1182 1183 if ( 1184 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 1185 or (delimiter and delimiter.name != ".") 1186 or not part_index 1187 or split_count > 4 1188 ): 1189 self.unsupported( 1190 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 1191 ) 1192 return "" 1193 1194 return self.func( 1195 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 1196 ) 1197 1198 def extract_sql(self, expression: exp.Extract) -> str: 1199 part = expression.this 1200 name = DATE_PART_UNMAPPING.get(part.name.upper()) or part 1201 1202 return self.func("DATEPART", name, expression.expression) 1203 1204 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1205 nano = expression.args.get("nano") 1206 if nano is not None: 1207 nano.pop() 1208 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1209 1210 if expression.args.get("fractions") is None: 1211 expression.set("fractions", exp.Literal.number(0)) 1212 if expression.args.get("precision") is None: 1213 expression.set("precision", exp.Literal.number(0)) 1214 1215 return rename_func("TIMEFROMPARTS")(self, expression) 1216 1217 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1218 zone = expression.args.get("zone") 1219 if zone is not None: 1220 zone.pop() 1221 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1222 1223 nano = expression.args.get("nano") 1224 if nano is not None: 1225 nano.pop() 1226 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1227 1228 if expression.args.get("milli") is None: 1229 expression.set("milli", exp.Literal.number(0)) 1230 1231 return rename_func("DATETIMEFROMPARTS")(self, expression) 1232 1233 def setitem_sql(self, expression: exp.SetItem) -> str: 1234 this = expression.this 1235 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1236 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1237 return f"{self.sql(this.left)} {self.sql(this.right)}" 1238 1239 return super().setitem_sql(expression) 1240 1241 def boolean_sql(self, expression: exp.Boolean) -> str: 1242 if type(expression.parent) in BIT_TYPES or isinstance( 1243 expression.find_ancestor(exp.Values, exp.Select), exp.Values 1244 ): 1245 return "1" if expression.this else "0" 1246 1247 return "(1 = 1)" if expression.this else "(1 = 0)" 1248 1249 def is_sql(self, expression: exp.Is) -> str: 1250 if isinstance(expression.expression, exp.Boolean): 1251 return self.binary(expression, "=") 1252 return self.binary(expression, "IS") 1253 1254 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1255 sql = self.sql(expression, "this") 1256 properties = expression.args.get("properties") 1257 1258 if sql[:1] != "#" and any( 1259 isinstance(prop, exp.TemporaryProperty) 1260 for prop in (properties.expressions if properties else []) 1261 ): 1262 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1263 1264 return sql 1265 1266 def create_sql(self, expression: exp.Create) -> str: 1267 kind = expression.kind 1268 exists = expression.args.get("exists") 1269 expression.set("exists", None) 1270 1271 like_property = expression.find(exp.LikeProperty) 1272 if like_property: 1273 ctas_expression = like_property.this 1274 else: 1275 ctas_expression = expression.expression 1276 1277 if kind == "VIEW": 1278 expression.this.set("catalog", None) 1279 with_ = expression.args.get("with_") 1280 if ctas_expression and with_: 1281 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1282 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1283 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1284 ctas_expression.set("with_", with_.pop()) 1285 1286 table = expression.find(exp.Table) 1287 1288 # Convert CTAS statement to SELECT .. INTO .. 1289 if kind == "TABLE" and ctas_expression: 1290 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1291 ctas_expression = ctas_expression.subquery() 1292 1293 properties = expression.args.get("properties") or exp.Properties() 1294 is_temp = any(isinstance(p, exp.TemporaryProperty) for p in properties.expressions) 1295 1296 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1297 select_into.set("into", exp.Into(this=table, temporary=is_temp)) 1298 1299 if like_property: 1300 select_into.limit(0, copy=False) 1301 1302 sql = self.sql(select_into) 1303 else: 1304 sql = super().create_sql(expression) 1305 1306 if exists: 1307 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1308 sql_with_ctes = self.prepend_ctes(expression, sql) 1309 sql_literal = self.sql(exp.Literal.string(sql_with_ctes)) 1310 if kind == "SCHEMA": 1311 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = {identifier}) EXEC({sql_literal})""" 1312 elif kind == "TABLE": 1313 assert table 1314 where = exp.and_( 1315 exp.column("TABLE_NAME").eq(table.name), 1316 exp.column("TABLE_SCHEMA").eq(table.db) if table.db else None, 1317 exp.column("TABLE_CATALOG").eq(table.catalog) if table.catalog else None, 1318 ) 1319 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE {where}) EXEC({sql_literal})""" 1320 elif kind == "INDEX": 1321 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1322 return f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql_literal})""" 1323 elif expression.args.get("replace"): 1324 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1325 1326 return self.prepend_ctes(expression, sql) 1327 1328 @generator.unsupported_args("unlogged", "expressions") 1329 def into_sql(self, expression: exp.Into) -> str: 1330 if expression.args.get("temporary"): 1331 # If the Into expression has a temporary property, push this down to the Identifier 1332 table = expression.find(exp.Table) 1333 if table and isinstance(table.this, exp.Identifier): 1334 table.this.set("temporary", True) 1335 1336 return f"{self.seg('INTO')} {self.sql(expression, 'this')}" 1337 1338 def count_sql(self, expression: exp.Count) -> str: 1339 func_name = "COUNT_BIG" if expression.args.get("big_int") else "COUNT" 1340 return rename_func(func_name)(self, expression) 1341 1342 def datediff_sql(self, expression: exp.DateDiff) -> str: 1343 func_name = "DATEDIFF_BIG" if expression.args.get("big_int") else "DATEDIFF" 1344 return date_delta_sql(func_name)(self, expression) 1345 1346 def offset_sql(self, expression: exp.Offset) -> str: 1347 return f"{super().offset_sql(expression)} ROWS" 1348 1349 def version_sql(self, expression: exp.Version) -> str: 1350 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1351 this = f"FOR {name}" 1352 expr = expression.expression 1353 kind = expression.text("kind") 1354 if kind in ("FROM", "BETWEEN"): 1355 args = expr.expressions 1356 sep = "TO" if kind == "FROM" else "AND" 1357 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1358 else: 1359 expr_sql = self.sql(expr) 1360 1361 expr_sql = f" {expr_sql}" if expr_sql else "" 1362 return f"{this} {kind}{expr_sql}" 1363 1364 def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str: 1365 table = expression.args.get("table") 1366 table = f"{table} " if table else "" 1367 return f"RETURNS {table}{self.sql(expression, 'this')}" 1368 1369 def returning_sql(self, expression: exp.Returning) -> str: 1370 into = self.sql(expression, "into") 1371 into = self.seg(f"INTO {into}") if into else "" 1372 return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}" 1373 1374 def transaction_sql(self, expression: exp.Transaction) -> str: 1375 this = self.sql(expression, "this") 1376 this = f" {this}" if this else "" 1377 mark = self.sql(expression, "mark") 1378 mark = f" WITH MARK {mark}" if mark else "" 1379 return f"BEGIN TRANSACTION{this}{mark}" 1380 1381 def commit_sql(self, expression: exp.Commit) -> str: 1382 this = self.sql(expression, "this") 1383 this = f" {this}" if this else "" 1384 durability = expression.args.get("durability") 1385 durability = ( 1386 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1387 if durability is not None 1388 else "" 1389 ) 1390 return f"COMMIT TRANSACTION{this}{durability}" 1391 1392 def rollback_sql(self, expression: exp.Rollback) -> str: 1393 this = self.sql(expression, "this") 1394 this = f" {this}" if this else "" 1395 return f"ROLLBACK TRANSACTION{this}" 1396 1397 def identifier_sql(self, expression: exp.Identifier) -> str: 1398 identifier = super().identifier_sql(expression) 1399 1400 if expression.args.get("global_"): 1401 identifier = f"##{identifier}" 1402 elif expression.args.get("temporary"): 1403 identifier = f"#{identifier}" 1404 1405 return identifier 1406 1407 def constraint_sql(self, expression: exp.Constraint) -> str: 1408 this = self.sql(expression, "this") 1409 expressions = self.expressions(expression, flat=True, sep=" ") 1410 return f"CONSTRAINT {this} {expressions}" 1411 1412 def length_sql(self, expression: exp.Length) -> str: 1413 return self._uncast_text(expression, "LEN") 1414 1415 def right_sql(self, expression: exp.Right) -> str: 1416 return self._uncast_text(expression, "RIGHT") 1417 1418 def left_sql(self, expression: exp.Left) -> str: 1419 return self._uncast_text(expression, "LEFT") 1420 1421 def _uncast_text(self, expression: exp.Expression, name: str) -> str: 1422 this = expression.this 1423 if isinstance(this, exp.Cast) and this.is_type(exp.DataType.Type.TEXT): 1424 this_sql = self.sql(this, "this") 1425 else: 1426 this_sql = self.sql(this) 1427 expression_sql = self.sql(expression, "expression") 1428 return self.func(name, this_sql, expression_sql if expression_sql else None) 1429 1430 def partition_sql(self, expression: exp.Partition) -> str: 1431 return f"WITH (PARTITIONS({self.expressions(expression, flat=True)}))" 1432 1433 def alter_sql(self, expression: exp.Alter) -> str: 1434 action = seq_get(expression.args.get("actions") or [], 0) 1435 if isinstance(action, exp.AlterRename): 1436 return f"EXEC sp_rename '{self.sql(expression.this)}', '{action.this.name}'" 1437 return super().alter_sql(expression) 1438 1439 def drop_sql(self, expression: exp.Drop) -> str: 1440 if expression.args["kind"] == "VIEW": 1441 expression.this.set("catalog", None) 1442 return super().drop_sql(expression) 1443 1444 def options_modifier(self, expression: exp.Expression) -> str: 1445 options = self.expressions(expression, key="options") 1446 return f" OPTION{self.wrap(options)}" if options else "" 1447 1448 def dpipe_sql(self, expression: exp.DPipe) -> str: 1449 return self.sql( 1450 reduce(lambda x, y: exp.Add(this=x, expression=y), expression.flatten()) 1451 ) 1452 1453 def isascii_sql(self, expression: exp.IsAscii) -> str: 1454 return f"(PATINDEX(CONVERT(VARCHAR(MAX), 0x255b5e002d7f5d25) COLLATE Latin1_General_BIN, {self.sql(expression.this)}) = 0)" 1455 1456 def columndef_sql(self, expression: exp.ColumnDef, sep: str = " ") -> str: 1457 this = super().columndef_sql(expression, sep) 1458 default = self.sql(expression, "default") 1459 default = f" = {default}" if default else "" 1460 output = self.sql(expression, "output") 1461 output = f" {output}" if output else "" 1462 return f"{this}{default}{output}" 1463 1464 def coalesce_sql(self, expression: exp.Coalesce) -> str: 1465 func_name = "ISNULL" if expression.args.get("is_null") else "COALESCE" 1466 return rename_func(func_name)(self, expression)
Whether the base comes first in the LOG function.
Possible values: True, False, None (two arguments are not supported by LOG)
Whether the behavior of a / b depends on the types of a and b.
False means a / b is always float division.
True means a / b is integer division if both a and b are integers.
A NULL arg in CONCAT yields NULL by default, but in some dialects it yields an empty string.
Specifies the strategy according to which identifiers should be normalized.
Associates this dialect's time formats with their equivalent Python strftime formats.
544 class Tokenizer(tokens.Tokenizer): 545 IDENTIFIERS = [("[", "]"), '"'] 546 QUOTES = ["'", '"'] 547 HEX_STRINGS = [("0x", ""), ("0X", "")] 548 VAR_SINGLE_TOKENS = {"@", "$", "#"} 549 550 KEYWORDS = { 551 **tokens.Tokenizer.KEYWORDS, 552 "CLUSTERED INDEX": TokenType.INDEX, 553 "DATETIME2": TokenType.DATETIME2, 554 "DATETIMEOFFSET": TokenType.TIMESTAMPTZ, 555 "DECLARE": TokenType.DECLARE, 556 "EXEC": TokenType.COMMAND, 557 "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT, 558 "GO": TokenType.COMMAND, 559 "IMAGE": TokenType.IMAGE, 560 "MONEY": TokenType.MONEY, 561 "NONCLUSTERED INDEX": TokenType.INDEX, 562 "NTEXT": TokenType.TEXT, 563 "OPTION": TokenType.OPTION, 564 "OUTPUT": TokenType.RETURNING, 565 "PRINT": TokenType.COMMAND, 566 "PROC": TokenType.PROCEDURE, 567 "REAL": TokenType.FLOAT, 568 "ROWVERSION": TokenType.ROWVERSION, 569 "SMALLDATETIME": TokenType.SMALLDATETIME, 570 "SMALLMONEY": TokenType.SMALLMONEY, 571 "SQL_VARIANT": TokenType.VARIANT, 572 "SYSTEM_USER": TokenType.CURRENT_USER, 573 "TOP": TokenType.TOP, 574 "TIMESTAMP": TokenType.ROWVERSION, 575 "TINYINT": TokenType.UTINYINT, 576 "UNIQUEIDENTIFIER": TokenType.UUID, 577 "UPDATE STATISTICS": TokenType.COMMAND, 578 "XML": TokenType.XML, 579 } 580 KEYWORDS.pop("/*+") 581 582 COMMANDS = {*tokens.Tokenizer.COMMANDS, TokenType.END}
Inherited Members
- sqlglot.tokens.Tokenizer
- Tokenizer
- SINGLE_TOKENS
- BIT_STRINGS
- BYTE_STRINGS
- RAW_STRINGS
- HEREDOC_STRINGS
- UNICODE_STRINGS
- STRING_ESCAPES
- ESCAPE_FOLLOW_CHARS
- IDENTIFIER_ESCAPES
- HEREDOC_TAG_IS_IDENTIFIER
- HEREDOC_STRING_ALTERNATIVE
- STRING_ESCAPES_ALLOWED_IN_RAW_STRINGS
- NESTED_COMMENTS
- HINT_START
- TOKENS_PRECEDING_HINT
- WHITE_SPACE
- COMMAND_PREFIX_TOKENS
- NUMERIC_LITERALS
- COMMENTS
- dialect
- use_rs_tokenizer
- reset
- tokenize
- tokenize_rs
- size
- sql
- tokens
584 class Parser(parser.Parser): 585 SET_REQUIRES_ASSIGNMENT_DELIMITER = False 586 LOG_DEFAULTS_TO_LN = True 587 STRING_ALIASES = True 588 NO_PAREN_IF_COMMANDS = False 589 590 QUERY_MODIFIER_PARSERS = { 591 **parser.Parser.QUERY_MODIFIER_PARSERS, 592 TokenType.OPTION: lambda self: ("options", self._parse_options()), 593 TokenType.FOR: lambda self: ("for_", self._parse_for()), 594 } 595 596 # T-SQL does not allow BEGIN to be used as an identifier 597 ID_VAR_TOKENS = parser.Parser.ID_VAR_TOKENS - {TokenType.BEGIN} 598 ALIAS_TOKENS = parser.Parser.ALIAS_TOKENS - {TokenType.BEGIN} 599 TABLE_ALIAS_TOKENS = parser.Parser.TABLE_ALIAS_TOKENS - {TokenType.BEGIN} 600 COMMENT_TABLE_ALIAS_TOKENS = parser.Parser.COMMENT_TABLE_ALIAS_TOKENS - {TokenType.BEGIN} 601 UPDATE_ALIAS_TOKENS = parser.Parser.UPDATE_ALIAS_TOKENS - {TokenType.BEGIN} 602 603 FUNCTIONS = { 604 **parser.Parser.FUNCTIONS, 605 "ATN2": exp.Atan2.from_arg_list, 606 "CHARINDEX": lambda args: exp.StrPosition( 607 this=seq_get(args, 1), 608 substr=seq_get(args, 0), 609 position=seq_get(args, 2), 610 ), 611 "COUNT": lambda args: exp.Count( 612 this=seq_get(args, 0), expressions=args[1:], big_int=False 613 ), 614 "COUNT_BIG": lambda args: exp.Count( 615 this=seq_get(args, 0), expressions=args[1:], big_int=True 616 ), 617 "DATEADD": build_date_delta(exp.DateAdd, unit_mapping=DATE_DELTA_INTERVAL), 618 "DATEDIFF": _build_date_delta(exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL), 619 "DATEDIFF_BIG": _build_date_delta( 620 exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL, big_int=True 621 ), 622 "DATENAME": _build_formatted_time(exp.TimeToStr, full_format_mapping=True), 623 "DATETIMEFROMPARTS": _build_datetimefromparts, 624 "EOMONTH": _build_eomonth, 625 "FORMAT": _build_format, 626 "GETDATE": exp.CurrentTimestamp.from_arg_list, 627 "HASHBYTES": _build_hashbytes, 628 "ISNULL": lambda args: build_coalesce(args=args, is_null=True), 629 "JSON_QUERY": _build_json_query, 630 "JSON_VALUE": parser.build_extract_json_with_path(exp.JSONExtractScalar), 631 "LEN": _build_with_arg_as_text(exp.Length), 632 "LEFT": _build_with_arg_as_text(exp.Left), 633 "NEWID": exp.Uuid.from_arg_list, 634 "RIGHT": _build_with_arg_as_text(exp.Right), 635 "PARSENAME": _build_parsename, 636 "REPLICATE": exp.Repeat.from_arg_list, 637 "SCHEMA_NAME": exp.CurrentSchema.from_arg_list, 638 "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)), 639 "SYSDATETIME": exp.CurrentTimestamp.from_arg_list, 640 "SUSER_NAME": exp.CurrentUser.from_arg_list, 641 "SUSER_SNAME": exp.CurrentUser.from_arg_list, 642 "SYSDATETIMEOFFSET": exp.CurrentTimestampLTZ.from_arg_list, 643 "SYSTEM_USER": exp.CurrentUser.from_arg_list, 644 "TIMEFROMPARTS": _build_timefromparts, 645 "DATETRUNC": _build_datetrunc, 646 } 647 648 JOIN_HINTS = {"LOOP", "HASH", "MERGE", "REMOTE"} 649 650 PROCEDURE_OPTIONS = dict.fromkeys( 651 ("ENCRYPTION", "RECOMPILE", "SCHEMABINDING", "NATIVE_COMPILATION", "EXECUTE"), tuple() 652 ) 653 654 COLUMN_DEFINITION_MODES = {"OUT", "OUTPUT", "READONLY"} 655 656 RETURNS_TABLE_TOKENS = parser.Parser.ID_VAR_TOKENS - { 657 TokenType.TABLE, 658 *parser.Parser.TYPE_TOKENS, 659 } 660 661 STATEMENT_PARSERS = { 662 **parser.Parser.STATEMENT_PARSERS, 663 TokenType.DECLARE: lambda self: self._parse_declare(), 664 } 665 666 RANGE_PARSERS = { 667 **parser.Parser.RANGE_PARSERS, 668 TokenType.DCOLON: lambda self, this: self.expression( 669 exp.ScopeResolution, 670 this=this, 671 expression=self._parse_function() or self._parse_var(any_token=True), 672 ), 673 } 674 675 NO_PAREN_FUNCTION_PARSERS = { 676 **parser.Parser.NO_PAREN_FUNCTION_PARSERS, 677 "NEXT": lambda self: self._parse_next_value_for(), 678 } 679 680 FUNCTION_PARSERS: t.Dict[str, t.Callable] = { 681 **parser.Parser.FUNCTION_PARSERS, 682 "JSON_ARRAYAGG": lambda self: self.expression( 683 exp.JSONArrayAgg, 684 this=self._parse_bitwise(), 685 order=self._parse_order(), 686 null_handling=self._parse_on_handling("NULL", "NULL", "ABSENT"), 687 ), 688 "DATEPART": lambda self: self._parse_datepart(), 689 } 690 691 # The DCOLON (::) operator serves as a scope resolution (exp.ScopeResolution) operator in T-SQL 692 COLUMN_OPERATORS = { 693 **parser.Parser.COLUMN_OPERATORS, 694 TokenType.DCOLON: lambda self, this, to: self.expression(exp.Cast, this=this, to=to) 695 if isinstance(to, exp.DataType) and to.this != exp.DataType.Type.USERDEFINED 696 else self.expression(exp.ScopeResolution, this=this, expression=to), 697 } 698 699 SET_OP_MODIFIERS = {"offset"} 700 701 ODBC_DATETIME_LITERALS = { 702 "d": exp.Date, 703 "t": exp.Time, 704 "ts": exp.Timestamp, 705 } 706 707 def _parse_datepart(self) -> exp.Extract: 708 this = self._parse_var() 709 expression = self._match(TokenType.COMMA) and self._parse_bitwise() 710 name = map_date_part(this, self.dialect) 711 712 return self.expression(exp.Extract, this=name, expression=expression) 713 714 def _parse_alter_table_set(self) -> exp.AlterSet: 715 return self._parse_wrapped(super()._parse_alter_table_set) 716 717 def _parse_wrapped_select(self, table: bool = False) -> t.Optional[exp.Expression]: 718 if self._match(TokenType.MERGE): 719 comments = self._prev_comments 720 merge = self._parse_merge() 721 merge.add_comments(comments, prepend=True) 722 return merge 723 724 return super()._parse_wrapped_select(table=table) 725 726 def _parse_dcolon(self) -> t.Optional[exp.Expression]: 727 # We want to use _parse_types() if the first token after :: is a known type, 728 # otherwise we could parse something like x::varchar(max) into a function 729 if self._match_set(self.TYPE_TOKENS, advance=False): 730 return self._parse_types() 731 732 return self._parse_function() or self._parse_types() 733 734 def _parse_options(self) -> t.Optional[t.List[exp.Expression]]: 735 if not self._match(TokenType.OPTION): 736 return None 737 738 def _parse_option() -> t.Optional[exp.Expression]: 739 option = self._parse_var_from_options(OPTIONS) 740 if not option: 741 return None 742 743 self._match(TokenType.EQ) 744 return self.expression( 745 exp.QueryOption, this=option, expression=self._parse_primary_or_var() 746 ) 747 748 return self._parse_wrapped_csv(_parse_option) 749 750 def _parse_xml_key_value_option(self) -> exp.XMLKeyValueOption: 751 this = self._parse_primary_or_var() 752 if self._match(TokenType.L_PAREN, advance=False): 753 expression = self._parse_wrapped(self._parse_string) 754 else: 755 expression = None 756 757 return exp.XMLKeyValueOption(this=this, expression=expression) 758 759 def _parse_for(self) -> t.Optional[t.List[exp.Expression]]: 760 if not self._match_pair(TokenType.FOR, TokenType.XML): 761 return None 762 763 def _parse_for_xml() -> t.Optional[exp.Expression]: 764 return self.expression( 765 exp.QueryOption, 766 this=self._parse_var_from_options(XML_OPTIONS, raise_unmatched=False) 767 or self._parse_xml_key_value_option(), 768 ) 769 770 return self._parse_csv(_parse_for_xml) 771 772 def _parse_projections(self) -> t.List[exp.Expression]: 773 """ 774 T-SQL supports the syntax alias = expression in the SELECT's projection list, 775 so we transform all parsed Selects to convert their EQ projections into Aliases. 776 777 See: https://learn.microsoft.com/en-us/sql/t-sql/queries/select-clause-transact-sql?view=sql-server-ver16#syntax 778 """ 779 return [ 780 ( 781 exp.alias_(projection.expression, projection.this.this, copy=False) 782 if isinstance(projection, exp.EQ) and isinstance(projection.this, exp.Column) 783 else projection 784 ) 785 for projection in super()._parse_projections() 786 ] 787 788 def _parse_commit_or_rollback(self) -> exp.Commit | exp.Rollback: 789 """Applies to SQL Server and Azure SQL Database 790 COMMIT [ { TRAN | TRANSACTION } 791 [ transaction_name | @tran_name_variable ] ] 792 [ WITH ( DELAYED_DURABILITY = { OFF | ON } ) ] 793 794 ROLLBACK { TRAN | TRANSACTION } 795 [ transaction_name | @tran_name_variable 796 | savepoint_name | @savepoint_variable ] 797 """ 798 rollback = self._prev.token_type == TokenType.ROLLBACK 799 800 self._match_texts(("TRAN", "TRANSACTION")) 801 this = self._parse_id_var() 802 803 if rollback: 804 return self.expression(exp.Rollback, this=this) 805 806 durability = None 807 if self._match_pair(TokenType.WITH, TokenType.L_PAREN): 808 self._match_text_seq("DELAYED_DURABILITY") 809 self._match(TokenType.EQ) 810 811 if self._match_text_seq("OFF"): 812 durability = False 813 else: 814 self._match(TokenType.ON) 815 durability = True 816 817 self._match_r_paren() 818 819 return self.expression(exp.Commit, this=this, durability=durability) 820 821 def _parse_transaction(self) -> exp.Transaction | exp.Command: 822 """Applies to SQL Server and Azure SQL Database 823 BEGIN { TRAN | TRANSACTION } 824 [ { transaction_name | @tran_name_variable } 825 [ WITH MARK [ 'description' ] ] 826 ] 827 """ 828 if self._match_texts(("TRAN", "TRANSACTION")): 829 transaction = self.expression(exp.Transaction, this=self._parse_id_var()) 830 if self._match_text_seq("WITH", "MARK"): 831 transaction.set("mark", self._parse_string()) 832 833 return transaction 834 835 return self._parse_as_command(self._prev) 836 837 def _parse_returns(self) -> exp.ReturnsProperty: 838 table = self._parse_id_var(any_token=False, tokens=self.RETURNS_TABLE_TOKENS) 839 returns = super()._parse_returns() 840 returns.set("table", table) 841 return returns 842 843 def _parse_convert( 844 self, strict: bool, safe: t.Optional[bool] = None 845 ) -> t.Optional[exp.Expression]: 846 this = self._parse_types() 847 self._match(TokenType.COMMA) 848 args = [this, *self._parse_csv(self._parse_assignment)] 849 convert = exp.Convert.from_arg_list(args) 850 convert.set("safe", safe) 851 return convert 852 853 def _parse_column_def( 854 self, this: t.Optional[exp.Expression], computed_column: bool = True 855 ) -> t.Optional[exp.Expression]: 856 this = super()._parse_column_def(this=this, computed_column=computed_column) 857 if not this: 858 return None 859 if self._match(TokenType.EQ): 860 this.set("default", self._parse_disjunction()) 861 if self._match_texts(self.COLUMN_DEFINITION_MODES): 862 this.set("output", self._prev.text) 863 return this 864 865 def _parse_user_defined_function( 866 self, kind: t.Optional[TokenType] = None 867 ) -> t.Optional[exp.Expression]: 868 this = super()._parse_user_defined_function(kind=kind) 869 870 if ( 871 kind == TokenType.FUNCTION 872 or isinstance(this, exp.UserDefinedFunction) 873 or self._match(TokenType.ALIAS, advance=False) 874 ): 875 return this 876 877 if not self._match(TokenType.WITH, advance=False): 878 expressions = self._parse_csv(self._parse_function_parameter) 879 else: 880 expressions = None 881 882 return self.expression(exp.UserDefinedFunction, this=this, expressions=expressions) 883 884 def _parse_into(self) -> t.Optional[exp.Into]: 885 into = super()._parse_into() 886 887 table = isinstance(into, exp.Into) and into.find(exp.Table) 888 if isinstance(table, exp.Table): 889 table_identifier = table.this 890 if table_identifier.args.get("temporary"): 891 # Promote the temporary property from the Identifier to the Into expression 892 t.cast(exp.Into, into).set("temporary", True) 893 894 return into 895 896 def _parse_id_var( 897 self, 898 any_token: bool = True, 899 tokens: t.Optional[t.Collection[TokenType]] = None, 900 ) -> t.Optional[exp.Expression]: 901 is_temporary = self._match(TokenType.HASH) 902 is_global = is_temporary and self._match(TokenType.HASH) 903 904 this = super()._parse_id_var(any_token=any_token, tokens=tokens) 905 if this: 906 if is_global: 907 this.set("global_", True) 908 elif is_temporary: 909 this.set("temporary", True) 910 911 return this 912 913 def _parse_create(self) -> exp.Create | exp.Command: 914 create = super()._parse_create() 915 916 if isinstance(create, exp.Create): 917 table = create.this.this if isinstance(create.this, exp.Schema) else create.this 918 if isinstance(table, exp.Table) and table.this and table.this.args.get("temporary"): 919 if not create.args.get("properties"): 920 create.set("properties", exp.Properties(expressions=[])) 921 922 create.args["properties"].append("expressions", exp.TemporaryProperty()) 923 924 return create 925 926 def _parse_if(self) -> t.Optional[exp.Expression]: 927 index = self._index 928 929 if self._match_text_seq("OBJECT_ID"): 930 self._parse_wrapped_csv(self._parse_string) 931 if self._match_text_seq("IS", "NOT", "NULL") and self._match(TokenType.DROP): 932 return self._parse_drop(exists=True) 933 self._retreat(index) 934 935 return super()._parse_if() 936 937 def _parse_unique(self) -> exp.UniqueColumnConstraint: 938 if self._match_texts(("CLUSTERED", "NONCLUSTERED")): 939 this = self.CONSTRAINT_PARSERS[self._prev.text.upper()](self) 940 else: 941 this = self._parse_schema(self._parse_id_var(any_token=False)) 942 943 return self.expression(exp.UniqueColumnConstraint, this=this) 944 945 def _parse_update(self) -> exp.Update: 946 expression = super()._parse_update() 947 expression.set("options", self._parse_options()) 948 return expression 949 950 def _parse_partition(self) -> t.Optional[exp.Partition]: 951 if not self._match_text_seq("WITH", "(", "PARTITIONS"): 952 return None 953 954 def parse_range(): 955 low = self._parse_bitwise() 956 high = self._parse_bitwise() if self._match_text_seq("TO") else None 957 958 return ( 959 self.expression(exp.PartitionRange, this=low, expression=high) if high else low 960 ) 961 962 partition = self.expression( 963 exp.Partition, expressions=self._parse_wrapped_csv(parse_range) 964 ) 965 966 self._match_r_paren() 967 968 return partition 969 970 def _parse_declareitem(self) -> t.Optional[exp.DeclareItem]: 971 var = self._parse_id_var() 972 if not var: 973 return None 974 975 self._match(TokenType.ALIAS) 976 return self.expression( 977 exp.DeclareItem, 978 this=var, 979 kind=self._parse_schema() if self._match(TokenType.TABLE) else self._parse_types(), 980 default=self._match(TokenType.EQ) and self._parse_bitwise(), 981 ) 982 983 def _parse_alter_table_alter(self) -> t.Optional[exp.Expression]: 984 expression = super()._parse_alter_table_alter() 985 986 if expression is not None: 987 collation = expression.args.get("collate") 988 if isinstance(collation, exp.Column) and isinstance(collation.this, exp.Identifier): 989 identifier = collation.this 990 collation.set("this", exp.Var(this=identifier.name)) 991 992 return expression 993 994 def _parse_primary_key_part(self) -> t.Optional[exp.Expression]: 995 return self._parse_ordered()
Parser consumes a list of tokens produced by the Tokenizer and produces a parsed syntax tree.
Arguments:
- error_level: The desired error level. Default: ErrorLevel.IMMEDIATE
- error_message_context: The amount of context to capture from a query string when displaying the error message (in number of characters). Default: 100
- max_errors: Maximum number of error messages to include in a raised ParseError. This is only relevant if error_level is ErrorLevel.RAISE. Default: 3
Inherited Members
- sqlglot.parser.Parser
- Parser
- STRUCT_TYPE_TOKENS
- NESTED_TYPE_TOKENS
- ENUM_TYPE_TOKENS
- AGGREGATE_TYPE_TOKENS
- TYPE_TOKENS
- SIGNED_TO_UNSIGNED_TYPE_TOKEN
- SUBQUERY_PREDICATES
- RESERVED_TOKENS
- DB_CREATABLES
- CREATABLES
- ALTERABLES
- COLON_PLACEHOLDER_TOKENS
- ARRAY_CONSTRUCTORS
- TRIM_TYPES
- FUNC_TOKENS
- CONJUNCTION
- ASSIGNMENT
- DISJUNCTION
- EQUALITY
- COMPARISON
- BITWISE
- TERM
- FACTOR
- EXPONENT
- TIMES
- TIMESTAMPS
- SET_OPERATIONS
- JOIN_METHODS
- JOIN_SIDES
- JOIN_KINDS
- LAMBDAS
- CAST_COLUMN_OPERATORS
- EXPRESSION_PARSERS
- UNARY_PARSERS
- STRING_PARSERS
- NUMERIC_PARSERS
- PRIMARY_PARSERS
- PLACEHOLDER_PARSERS
- PIPE_SYNTAX_TRANSFORM_PARSERS
- PROPERTY_PARSERS
- CONSTRAINT_PARSERS
- ALTER_PARSERS
- ALTER_ALTER_PARSERS
- SCHEMA_UNNAMED_CONSTRAINTS
- INVALID_FUNC_NAME_TOKENS
- FUNCTIONS_WITH_ALIASED_ARGS
- KEY_VALUE_DEFINITIONS
- QUERY_MODIFIER_TOKENS
- SET_PARSERS
- SHOW_PARSERS
- TYPE_LITERAL_PARSERS
- TYPE_CONVERTERS
- DDL_SELECT_TOKENS
- PRE_VOLATILE_TOKENS
- TRANSACTION_KIND
- TRANSACTION_CHARACTERISTICS
- CONFLICT_ACTIONS
- CREATE_SEQUENCE
- ISOLATED_LOADING_OPTIONS
- USABLES
- CAST_ACTIONS
- SCHEMA_BINDING_OPTIONS
- EXECUTE_AS_OPTIONS
- KEY_CONSTRAINT_OPTIONS
- WINDOW_EXCLUDE_OPTIONS
- INSERT_ALTERNATIVES
- CLONE_KEYWORDS
- HISTORICAL_DATA_PREFIX
- HISTORICAL_DATA_KIND
- OPCLASS_FOLLOW_KEYWORDS
- OPTYPE_FOLLOW_TOKENS
- TABLE_INDEX_HINT_TOKENS
- VIEW_ATTRIBUTES
- WINDOW_ALIAS_TOKENS
- WINDOW_BEFORE_PAREN_TOKENS
- WINDOW_SIDES
- JSON_KEY_VALUE_SEPARATOR_TOKENS
- FETCH_TOKENS
- ADD_CONSTRAINT_TOKENS
- DISTINCT_TOKENS
- UNNEST_OFFSET_ALIAS_TOKENS
- SELECT_START_TOKENS
- COPY_INTO_VARLEN_OPTIONS
- IS_JSON_PREDICATE_KIND
- ON_CONDITION_TOKENS
- PRIVILEGE_FOLLOW_TOKENS
- DESCRIBE_STYLES
- SET_ASSIGNMENT_DELIMITERS
- ANALYZE_STYLES
- ANALYZE_EXPRESSION_PARSERS
- PARTITION_KEYWORDS
- AMBIGUOUS_ALIAS_TOKENS
- OPERATION_MODIFIERS
- RECURSIVE_CTE_SEARCH_KIND
- MODIFIABLES
- STRICT_CAST
- PREFIXED_PIVOT_COLUMNS
- IDENTIFY_PIVOT_STRINGS
- TABLESAMPLE_CSV
- DEFAULT_SAMPLING_METHOD
- TRIM_PATTERN_FIRST
- MODIFIERS_ATTACHED_TO_SET_OP
- JSON_ARROWS_REQUIRE_JSON_TYPE
- COLON_IS_VARIANT_EXTRACT
- VALUES_FOLLOWED_BY_PAREN
- SUPPORTS_IMPLICIT_UNNEST
- INTERVAL_SPANS
- SUPPORTS_PARTITION_SELECTION
- WRAPPED_TRANSFORM_COLUMN_CONSTRAINT
- OPTIONAL_ALIAS_TOKEN_CTE
- ALTER_RENAME_REQUIRES_COLUMN
- ALTER_TABLE_PARTITIONS
- JOINS_HAVE_EQUAL_PRECEDENCE
- ZONE_AWARE_TIMESTAMP_CONSTRUCTOR
- MAP_KEYS_ARE_ARBITRARY_EXPRESSIONS
- JSON_EXTRACT_REQUIRES_JSON_EXPRESSION
- ADD_JOIN_ON_TRUE
- SUPPORTS_OMITTED_INTERVAL_SPAN_UNIT
- error_level
- error_message_context
- max_errors
- dialect
- reset
- parse
- parse_into
- check_errors
- raise_error
- expression
- validate_expression
- parse_set_operation
- build_cast
- errors
- sql
997 class Generator(generator.Generator): 998 LIMIT_IS_TOP = True 999 QUERY_HINTS = False 1000 RETURNING_END = False 1001 NVL2_SUPPORTED = False 1002 ALTER_TABLE_INCLUDE_COLUMN_KEYWORD = False 1003 LIMIT_FETCH = "FETCH" 1004 COMPUTED_COLUMN_WITH_TYPE = False 1005 CTE_RECURSIVE_KEYWORD_REQUIRED = False 1006 ENSURE_BOOLS = True 1007 NULL_ORDERING_SUPPORTED = None 1008 SUPPORTS_SINGLE_ARG_CONCAT = False 1009 TABLESAMPLE_SEED_KEYWORD = "REPEATABLE" 1010 SUPPORTS_SELECT_INTO = True 1011 JSON_PATH_BRACKETED_KEY_SUPPORTED = False 1012 SUPPORTS_TO_NUMBER = False 1013 SET_OP_MODIFIERS = False 1014 COPY_PARAMS_EQ_REQUIRED = True 1015 PARSE_JSON_NAME = None 1016 EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE = False 1017 ALTER_SET_WRAPPED = True 1018 ALTER_SET_TYPE = "" 1019 1020 EXPRESSIONS_WITHOUT_NESTED_CTES = { 1021 exp.Create, 1022 exp.Delete, 1023 exp.Insert, 1024 exp.Intersect, 1025 exp.Except, 1026 exp.Merge, 1027 exp.Select, 1028 exp.Subquery, 1029 exp.Union, 1030 exp.Update, 1031 } 1032 1033 SUPPORTED_JSON_PATH_PARTS = { 1034 exp.JSONPathKey, 1035 exp.JSONPathRoot, 1036 exp.JSONPathSubscript, 1037 } 1038 1039 TYPE_MAPPING = { 1040 **generator.Generator.TYPE_MAPPING, 1041 exp.DataType.Type.BOOLEAN: "BIT", 1042 exp.DataType.Type.DATETIME2: "DATETIME2", 1043 exp.DataType.Type.DECIMAL: "NUMERIC", 1044 exp.DataType.Type.DOUBLE: "FLOAT", 1045 exp.DataType.Type.INT: "INTEGER", 1046 exp.DataType.Type.ROWVERSION: "ROWVERSION", 1047 exp.DataType.Type.TEXT: "VARCHAR(MAX)", 1048 exp.DataType.Type.TIMESTAMP: "DATETIME2", 1049 exp.DataType.Type.TIMESTAMPNTZ: "DATETIME2", 1050 exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET", 1051 exp.DataType.Type.SMALLDATETIME: "SMALLDATETIME", 1052 exp.DataType.Type.UTINYINT: "TINYINT", 1053 exp.DataType.Type.VARIANT: "SQL_VARIANT", 1054 exp.DataType.Type.UUID: "UNIQUEIDENTIFIER", 1055 } 1056 1057 TYPE_MAPPING.pop(exp.DataType.Type.NCHAR) 1058 TYPE_MAPPING.pop(exp.DataType.Type.NVARCHAR) 1059 1060 TRANSFORMS = { 1061 **generator.Generator.TRANSFORMS, 1062 exp.AnyValue: any_value_to_max_sql, 1063 exp.Atan2: rename_func("ATN2"), 1064 exp.ArrayToString: rename_func("STRING_AGG"), 1065 exp.AutoIncrementColumnConstraint: lambda *_: "IDENTITY", 1066 exp.Ceil: rename_func("CEILING"), 1067 exp.Chr: rename_func("CHAR"), 1068 exp.DateAdd: date_delta_sql("DATEADD"), 1069 exp.CTE: transforms.preprocess([qualify_derived_table_outputs]), 1070 exp.CurrentDate: rename_func("GETDATE"), 1071 exp.CurrentTimestamp: rename_func("GETDATE"), 1072 exp.CurrentTimestampLTZ: rename_func("SYSDATETIMEOFFSET"), 1073 exp.DateStrToDate: datestrtodate_sql, 1074 exp.GeneratedAsIdentityColumnConstraint: generatedasidentitycolumnconstraint_sql, 1075 exp.GroupConcat: _string_agg_sql, 1076 exp.If: rename_func("IIF"), 1077 exp.JSONExtract: _json_extract_sql, 1078 exp.JSONExtractScalar: _json_extract_sql, 1079 exp.LastDay: lambda self, e: self.func("EOMONTH", e.this), 1080 exp.Ln: rename_func("LOG"), 1081 exp.Max: max_or_greatest, 1082 exp.MD5: lambda self, e: self.func("HASHBYTES", exp.Literal.string("MD5"), e.this), 1083 exp.Min: min_or_least, 1084 exp.NumberToStr: _format_sql, 1085 exp.Repeat: rename_func("REPLICATE"), 1086 exp.CurrentSchema: rename_func("SCHEMA_NAME"), 1087 exp.Select: transforms.preprocess( 1088 [ 1089 transforms.eliminate_distinct_on, 1090 transforms.eliminate_semi_and_anti_joins, 1091 transforms.eliminate_qualify, 1092 transforms.unnest_generate_date_array_using_recursive_cte, 1093 ] 1094 ), 1095 exp.Stddev: rename_func("STDEV"), 1096 exp.StrPosition: lambda self, e: strposition_sql( 1097 self, e, func_name="CHARINDEX", supports_position=True 1098 ), 1099 exp.Subquery: transforms.preprocess([qualify_derived_table_outputs]), 1100 exp.SHA: lambda self, e: self.func("HASHBYTES", exp.Literal.string("SHA1"), e.this), 1101 exp.SHA1Digest: lambda self, e: self.func( 1102 "HASHBYTES", exp.Literal.string("SHA1"), e.this 1103 ), 1104 exp.SHA2: lambda self, e: self.func( 1105 "HASHBYTES", exp.Literal.string(f"SHA2_{e.args.get('length', 256)}"), e.this 1106 ), 1107 exp.TemporaryProperty: lambda self, e: "", 1108 exp.TimeStrToTime: _timestrtotime_sql, 1109 exp.TimeToStr: _format_sql, 1110 exp.Trim: trim_sql, 1111 exp.TsOrDsAdd: date_delta_sql("DATEADD", cast=True), 1112 exp.TsOrDsDiff: date_delta_sql("DATEDIFF"), 1113 exp.TimestampTrunc: lambda self, e: self.func("DATETRUNC", e.unit, e.this), 1114 exp.Uuid: lambda *_: "NEWID()", 1115 exp.DateFromParts: rename_func("DATEFROMPARTS"), 1116 } 1117 1118 TRANSFORMS.pop(exp.ReturnsProperty) 1119 1120 PROPERTIES_LOCATION = { 1121 **generator.Generator.PROPERTIES_LOCATION, 1122 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 1123 } 1124 1125 def scope_resolution(self, rhs: str, scope_name: str) -> str: 1126 return f"{scope_name}::{rhs}" 1127 1128 def select_sql(self, expression: exp.Select) -> str: 1129 limit = expression.args.get("limit") 1130 offset = expression.args.get("offset") 1131 1132 if isinstance(limit, exp.Fetch) and not offset: 1133 # Dialects like Oracle can FETCH directly from a row set but 1134 # T-SQL requires an ORDER BY + OFFSET clause in order to FETCH 1135 offset = exp.Offset(expression=exp.Literal.number(0)) 1136 expression.set("offset", offset) 1137 1138 if offset: 1139 if not expression.args.get("order"): 1140 # ORDER BY is required in order to use OFFSET in a query, so we use 1141 # a noop order by, since we don't really care about the order. 1142 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 1143 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 1144 1145 if isinstance(limit, exp.Limit): 1146 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 1147 # we replace here because otherwise TOP would be generated in select_sql 1148 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 1149 1150 return super().select_sql(expression) 1151 1152 def convert_sql(self, expression: exp.Convert) -> str: 1153 name = "TRY_CONVERT" if expression.args.get("safe") else "CONVERT" 1154 return self.func( 1155 name, expression.this, expression.expression, expression.args.get("style") 1156 ) 1157 1158 def queryoption_sql(self, expression: exp.QueryOption) -> str: 1159 option = self.sql(expression, "this") 1160 value = self.sql(expression, "expression") 1161 if value: 1162 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 1163 return f"{option} {optional_equal_sign}{value}" 1164 return option 1165 1166 def lateral_op(self, expression: exp.Lateral) -> str: 1167 cross_apply = expression.args.get("cross_apply") 1168 if cross_apply is True: 1169 return "CROSS APPLY" 1170 if cross_apply is False: 1171 return "OUTER APPLY" 1172 1173 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 1174 self.unsupported("LATERAL clause is not supported.") 1175 return "LATERAL" 1176 1177 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 1178 this = expression.this 1179 split_count = len(this.name.split(".")) 1180 delimiter = expression.args.get("delimiter") 1181 part_index = expression.args.get("part_index") 1182 1183 if ( 1184 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 1185 or (delimiter and delimiter.name != ".") 1186 or not part_index 1187 or split_count > 4 1188 ): 1189 self.unsupported( 1190 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 1191 ) 1192 return "" 1193 1194 return self.func( 1195 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 1196 ) 1197 1198 def extract_sql(self, expression: exp.Extract) -> str: 1199 part = expression.this 1200 name = DATE_PART_UNMAPPING.get(part.name.upper()) or part 1201 1202 return self.func("DATEPART", name, expression.expression) 1203 1204 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1205 nano = expression.args.get("nano") 1206 if nano is not None: 1207 nano.pop() 1208 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1209 1210 if expression.args.get("fractions") is None: 1211 expression.set("fractions", exp.Literal.number(0)) 1212 if expression.args.get("precision") is None: 1213 expression.set("precision", exp.Literal.number(0)) 1214 1215 return rename_func("TIMEFROMPARTS")(self, expression) 1216 1217 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1218 zone = expression.args.get("zone") 1219 if zone is not None: 1220 zone.pop() 1221 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1222 1223 nano = expression.args.get("nano") 1224 if nano is not None: 1225 nano.pop() 1226 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1227 1228 if expression.args.get("milli") is None: 1229 expression.set("milli", exp.Literal.number(0)) 1230 1231 return rename_func("DATETIMEFROMPARTS")(self, expression) 1232 1233 def setitem_sql(self, expression: exp.SetItem) -> str: 1234 this = expression.this 1235 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1236 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1237 return f"{self.sql(this.left)} {self.sql(this.right)}" 1238 1239 return super().setitem_sql(expression) 1240 1241 def boolean_sql(self, expression: exp.Boolean) -> str: 1242 if type(expression.parent) in BIT_TYPES or isinstance( 1243 expression.find_ancestor(exp.Values, exp.Select), exp.Values 1244 ): 1245 return "1" if expression.this else "0" 1246 1247 return "(1 = 1)" if expression.this else "(1 = 0)" 1248 1249 def is_sql(self, expression: exp.Is) -> str: 1250 if isinstance(expression.expression, exp.Boolean): 1251 return self.binary(expression, "=") 1252 return self.binary(expression, "IS") 1253 1254 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1255 sql = self.sql(expression, "this") 1256 properties = expression.args.get("properties") 1257 1258 if sql[:1] != "#" and any( 1259 isinstance(prop, exp.TemporaryProperty) 1260 for prop in (properties.expressions if properties else []) 1261 ): 1262 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1263 1264 return sql 1265 1266 def create_sql(self, expression: exp.Create) -> str: 1267 kind = expression.kind 1268 exists = expression.args.get("exists") 1269 expression.set("exists", None) 1270 1271 like_property = expression.find(exp.LikeProperty) 1272 if like_property: 1273 ctas_expression = like_property.this 1274 else: 1275 ctas_expression = expression.expression 1276 1277 if kind == "VIEW": 1278 expression.this.set("catalog", None) 1279 with_ = expression.args.get("with_") 1280 if ctas_expression and with_: 1281 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1282 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1283 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1284 ctas_expression.set("with_", with_.pop()) 1285 1286 table = expression.find(exp.Table) 1287 1288 # Convert CTAS statement to SELECT .. INTO .. 1289 if kind == "TABLE" and ctas_expression: 1290 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1291 ctas_expression = ctas_expression.subquery() 1292 1293 properties = expression.args.get("properties") or exp.Properties() 1294 is_temp = any(isinstance(p, exp.TemporaryProperty) for p in properties.expressions) 1295 1296 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1297 select_into.set("into", exp.Into(this=table, temporary=is_temp)) 1298 1299 if like_property: 1300 select_into.limit(0, copy=False) 1301 1302 sql = self.sql(select_into) 1303 else: 1304 sql = super().create_sql(expression) 1305 1306 if exists: 1307 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1308 sql_with_ctes = self.prepend_ctes(expression, sql) 1309 sql_literal = self.sql(exp.Literal.string(sql_with_ctes)) 1310 if kind == "SCHEMA": 1311 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = {identifier}) EXEC({sql_literal})""" 1312 elif kind == "TABLE": 1313 assert table 1314 where = exp.and_( 1315 exp.column("TABLE_NAME").eq(table.name), 1316 exp.column("TABLE_SCHEMA").eq(table.db) if table.db else None, 1317 exp.column("TABLE_CATALOG").eq(table.catalog) if table.catalog else None, 1318 ) 1319 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE {where}) EXEC({sql_literal})""" 1320 elif kind == "INDEX": 1321 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1322 return f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql_literal})""" 1323 elif expression.args.get("replace"): 1324 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1325 1326 return self.prepend_ctes(expression, sql) 1327 1328 @generator.unsupported_args("unlogged", "expressions") 1329 def into_sql(self, expression: exp.Into) -> str: 1330 if expression.args.get("temporary"): 1331 # If the Into expression has a temporary property, push this down to the Identifier 1332 table = expression.find(exp.Table) 1333 if table and isinstance(table.this, exp.Identifier): 1334 table.this.set("temporary", True) 1335 1336 return f"{self.seg('INTO')} {self.sql(expression, 'this')}" 1337 1338 def count_sql(self, expression: exp.Count) -> str: 1339 func_name = "COUNT_BIG" if expression.args.get("big_int") else "COUNT" 1340 return rename_func(func_name)(self, expression) 1341 1342 def datediff_sql(self, expression: exp.DateDiff) -> str: 1343 func_name = "DATEDIFF_BIG" if expression.args.get("big_int") else "DATEDIFF" 1344 return date_delta_sql(func_name)(self, expression) 1345 1346 def offset_sql(self, expression: exp.Offset) -> str: 1347 return f"{super().offset_sql(expression)} ROWS" 1348 1349 def version_sql(self, expression: exp.Version) -> str: 1350 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1351 this = f"FOR {name}" 1352 expr = expression.expression 1353 kind = expression.text("kind") 1354 if kind in ("FROM", "BETWEEN"): 1355 args = expr.expressions 1356 sep = "TO" if kind == "FROM" else "AND" 1357 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1358 else: 1359 expr_sql = self.sql(expr) 1360 1361 expr_sql = f" {expr_sql}" if expr_sql else "" 1362 return f"{this} {kind}{expr_sql}" 1363 1364 def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str: 1365 table = expression.args.get("table") 1366 table = f"{table} " if table else "" 1367 return f"RETURNS {table}{self.sql(expression, 'this')}" 1368 1369 def returning_sql(self, expression: exp.Returning) -> str: 1370 into = self.sql(expression, "into") 1371 into = self.seg(f"INTO {into}") if into else "" 1372 return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}" 1373 1374 def transaction_sql(self, expression: exp.Transaction) -> str: 1375 this = self.sql(expression, "this") 1376 this = f" {this}" if this else "" 1377 mark = self.sql(expression, "mark") 1378 mark = f" WITH MARK {mark}" if mark else "" 1379 return f"BEGIN TRANSACTION{this}{mark}" 1380 1381 def commit_sql(self, expression: exp.Commit) -> str: 1382 this = self.sql(expression, "this") 1383 this = f" {this}" if this else "" 1384 durability = expression.args.get("durability") 1385 durability = ( 1386 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1387 if durability is not None 1388 else "" 1389 ) 1390 return f"COMMIT TRANSACTION{this}{durability}" 1391 1392 def rollback_sql(self, expression: exp.Rollback) -> str: 1393 this = self.sql(expression, "this") 1394 this = f" {this}" if this else "" 1395 return f"ROLLBACK TRANSACTION{this}" 1396 1397 def identifier_sql(self, expression: exp.Identifier) -> str: 1398 identifier = super().identifier_sql(expression) 1399 1400 if expression.args.get("global_"): 1401 identifier = f"##{identifier}" 1402 elif expression.args.get("temporary"): 1403 identifier = f"#{identifier}" 1404 1405 return identifier 1406 1407 def constraint_sql(self, expression: exp.Constraint) -> str: 1408 this = self.sql(expression, "this") 1409 expressions = self.expressions(expression, flat=True, sep=" ") 1410 return f"CONSTRAINT {this} {expressions}" 1411 1412 def length_sql(self, expression: exp.Length) -> str: 1413 return self._uncast_text(expression, "LEN") 1414 1415 def right_sql(self, expression: exp.Right) -> str: 1416 return self._uncast_text(expression, "RIGHT") 1417 1418 def left_sql(self, expression: exp.Left) -> str: 1419 return self._uncast_text(expression, "LEFT") 1420 1421 def _uncast_text(self, expression: exp.Expression, name: str) -> str: 1422 this = expression.this 1423 if isinstance(this, exp.Cast) and this.is_type(exp.DataType.Type.TEXT): 1424 this_sql = self.sql(this, "this") 1425 else: 1426 this_sql = self.sql(this) 1427 expression_sql = self.sql(expression, "expression") 1428 return self.func(name, this_sql, expression_sql if expression_sql else None) 1429 1430 def partition_sql(self, expression: exp.Partition) -> str: 1431 return f"WITH (PARTITIONS({self.expressions(expression, flat=True)}))" 1432 1433 def alter_sql(self, expression: exp.Alter) -> str: 1434 action = seq_get(expression.args.get("actions") or [], 0) 1435 if isinstance(action, exp.AlterRename): 1436 return f"EXEC sp_rename '{self.sql(expression.this)}', '{action.this.name}'" 1437 return super().alter_sql(expression) 1438 1439 def drop_sql(self, expression: exp.Drop) -> str: 1440 if expression.args["kind"] == "VIEW": 1441 expression.this.set("catalog", None) 1442 return super().drop_sql(expression) 1443 1444 def options_modifier(self, expression: exp.Expression) -> str: 1445 options = self.expressions(expression, key="options") 1446 return f" OPTION{self.wrap(options)}" if options else "" 1447 1448 def dpipe_sql(self, expression: exp.DPipe) -> str: 1449 return self.sql( 1450 reduce(lambda x, y: exp.Add(this=x, expression=y), expression.flatten()) 1451 ) 1452 1453 def isascii_sql(self, expression: exp.IsAscii) -> str: 1454 return f"(PATINDEX(CONVERT(VARCHAR(MAX), 0x255b5e002d7f5d25) COLLATE Latin1_General_BIN, {self.sql(expression.this)}) = 0)" 1455 1456 def columndef_sql(self, expression: exp.ColumnDef, sep: str = " ") -> str: 1457 this = super().columndef_sql(expression, sep) 1458 default = self.sql(expression, "default") 1459 default = f" = {default}" if default else "" 1460 output = self.sql(expression, "output") 1461 output = f" {output}" if output else "" 1462 return f"{this}{default}{output}" 1463 1464 def coalesce_sql(self, expression: exp.Coalesce) -> str: 1465 func_name = "ISNULL" if expression.args.get("is_null") else "COALESCE" 1466 return rename_func(func_name)(self, expression)
Generator converts a given syntax tree to the corresponding SQL string.
Arguments:
- pretty: Whether to format the produced SQL string. Default: False.
- identify: Determines when an identifier should be quoted. Possible values are: False (default): Never quote, except in cases where it's mandatory by the dialect. True: Always quote except for specials cases. 'safe': Only quote identifiers that are case insensitive.
- normalize: Whether to normalize identifiers to lowercase. Default: False.
- pad: The pad size in a formatted string. For example, this affects the indentation of a projection in a query, relative to its nesting level. Default: 2.
- indent: The indentation size in a formatted string. For example, this affects the
indentation of subqueries and filters under a
WHEREclause. Default: 2. - normalize_functions: How to normalize function names. Possible values are: "upper" or True (default): Convert names to uppercase. "lower": Convert names to lowercase. False: Disables function name normalization.
- unsupported_level: Determines the generator's behavior when it encounters unsupported expressions. Default ErrorLevel.WARN.
- max_unsupported: Maximum number of unsupported messages to include in a raised UnsupportedError. This is only relevant if unsupported_level is ErrorLevel.RAISE. Default: 3
- leading_comma: Whether the comma is leading or trailing in select expressions. This is only relevant when generating in pretty mode. Default: False
- max_text_width: The max number of characters in a segment before creating new lines in pretty mode. The default is on the smaller end because the length only represents a segment and not the true line length. Default: 80
- comments: Whether to preserve comments in the output SQL code. Default: True
1128 def select_sql(self, expression: exp.Select) -> str: 1129 limit = expression.args.get("limit") 1130 offset = expression.args.get("offset") 1131 1132 if isinstance(limit, exp.Fetch) and not offset: 1133 # Dialects like Oracle can FETCH directly from a row set but 1134 # T-SQL requires an ORDER BY + OFFSET clause in order to FETCH 1135 offset = exp.Offset(expression=exp.Literal.number(0)) 1136 expression.set("offset", offset) 1137 1138 if offset: 1139 if not expression.args.get("order"): 1140 # ORDER BY is required in order to use OFFSET in a query, so we use 1141 # a noop order by, since we don't really care about the order. 1142 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 1143 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 1144 1145 if isinstance(limit, exp.Limit): 1146 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 1147 # we replace here because otherwise TOP would be generated in select_sql 1148 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 1149 1150 return super().select_sql(expression)
1158 def queryoption_sql(self, expression: exp.QueryOption) -> str: 1159 option = self.sql(expression, "this") 1160 value = self.sql(expression, "expression") 1161 if value: 1162 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 1163 return f"{option} {optional_equal_sign}{value}" 1164 return option
1166 def lateral_op(self, expression: exp.Lateral) -> str: 1167 cross_apply = expression.args.get("cross_apply") 1168 if cross_apply is True: 1169 return "CROSS APPLY" 1170 if cross_apply is False: 1171 return "OUTER APPLY" 1172 1173 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 1174 self.unsupported("LATERAL clause is not supported.") 1175 return "LATERAL"
1177 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 1178 this = expression.this 1179 split_count = len(this.name.split(".")) 1180 delimiter = expression.args.get("delimiter") 1181 part_index = expression.args.get("part_index") 1182 1183 if ( 1184 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 1185 or (delimiter and delimiter.name != ".") 1186 or not part_index 1187 or split_count > 4 1188 ): 1189 self.unsupported( 1190 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 1191 ) 1192 return "" 1193 1194 return self.func( 1195 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 1196 )
1204 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1205 nano = expression.args.get("nano") 1206 if nano is not None: 1207 nano.pop() 1208 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1209 1210 if expression.args.get("fractions") is None: 1211 expression.set("fractions", exp.Literal.number(0)) 1212 if expression.args.get("precision") is None: 1213 expression.set("precision", exp.Literal.number(0)) 1214 1215 return rename_func("TIMEFROMPARTS")(self, expression)
1217 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1218 zone = expression.args.get("zone") 1219 if zone is not None: 1220 zone.pop() 1221 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1222 1223 nano = expression.args.get("nano") 1224 if nano is not None: 1225 nano.pop() 1226 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1227 1228 if expression.args.get("milli") is None: 1229 expression.set("milli", exp.Literal.number(0)) 1230 1231 return rename_func("DATETIMEFROMPARTS")(self, expression)
1233 def setitem_sql(self, expression: exp.SetItem) -> str: 1234 this = expression.this 1235 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1236 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1237 return f"{self.sql(this.left)} {self.sql(this.right)}" 1238 1239 return super().setitem_sql(expression)
1254 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1255 sql = self.sql(expression, "this") 1256 properties = expression.args.get("properties") 1257 1258 if sql[:1] != "#" and any( 1259 isinstance(prop, exp.TemporaryProperty) 1260 for prop in (properties.expressions if properties else []) 1261 ): 1262 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1263 1264 return sql
1266 def create_sql(self, expression: exp.Create) -> str: 1267 kind = expression.kind 1268 exists = expression.args.get("exists") 1269 expression.set("exists", None) 1270 1271 like_property = expression.find(exp.LikeProperty) 1272 if like_property: 1273 ctas_expression = like_property.this 1274 else: 1275 ctas_expression = expression.expression 1276 1277 if kind == "VIEW": 1278 expression.this.set("catalog", None) 1279 with_ = expression.args.get("with_") 1280 if ctas_expression and with_: 1281 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1282 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1283 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1284 ctas_expression.set("with_", with_.pop()) 1285 1286 table = expression.find(exp.Table) 1287 1288 # Convert CTAS statement to SELECT .. INTO .. 1289 if kind == "TABLE" and ctas_expression: 1290 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1291 ctas_expression = ctas_expression.subquery() 1292 1293 properties = expression.args.get("properties") or exp.Properties() 1294 is_temp = any(isinstance(p, exp.TemporaryProperty) for p in properties.expressions) 1295 1296 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1297 select_into.set("into", exp.Into(this=table, temporary=is_temp)) 1298 1299 if like_property: 1300 select_into.limit(0, copy=False) 1301 1302 sql = self.sql(select_into) 1303 else: 1304 sql = super().create_sql(expression) 1305 1306 if exists: 1307 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1308 sql_with_ctes = self.prepend_ctes(expression, sql) 1309 sql_literal = self.sql(exp.Literal.string(sql_with_ctes)) 1310 if kind == "SCHEMA": 1311 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = {identifier}) EXEC({sql_literal})""" 1312 elif kind == "TABLE": 1313 assert table 1314 where = exp.and_( 1315 exp.column("TABLE_NAME").eq(table.name), 1316 exp.column("TABLE_SCHEMA").eq(table.db) if table.db else None, 1317 exp.column("TABLE_CATALOG").eq(table.catalog) if table.catalog else None, 1318 ) 1319 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE {where}) EXEC({sql_literal})""" 1320 elif kind == "INDEX": 1321 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1322 return f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql_literal})""" 1323 elif expression.args.get("replace"): 1324 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1325 1326 return self.prepend_ctes(expression, sql)
1328 @generator.unsupported_args("unlogged", "expressions") 1329 def into_sql(self, expression: exp.Into) -> str: 1330 if expression.args.get("temporary"): 1331 # If the Into expression has a temporary property, push this down to the Identifier 1332 table = expression.find(exp.Table) 1333 if table and isinstance(table.this, exp.Identifier): 1334 table.this.set("temporary", True) 1335 1336 return f"{self.seg('INTO')} {self.sql(expression, 'this')}"
1349 def version_sql(self, expression: exp.Version) -> str: 1350 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1351 this = f"FOR {name}" 1352 expr = expression.expression 1353 kind = expression.text("kind") 1354 if kind in ("FROM", "BETWEEN"): 1355 args = expr.expressions 1356 sep = "TO" if kind == "FROM" else "AND" 1357 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1358 else: 1359 expr_sql = self.sql(expr) 1360 1361 expr_sql = f" {expr_sql}" if expr_sql else "" 1362 return f"{this} {kind}{expr_sql}"
1381 def commit_sql(self, expression: exp.Commit) -> str: 1382 this = self.sql(expression, "this") 1383 this = f" {this}" if this else "" 1384 durability = expression.args.get("durability") 1385 durability = ( 1386 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1387 if durability is not None 1388 else "" 1389 ) 1390 return f"COMMIT TRANSACTION{this}{durability}"
1397 def identifier_sql(self, expression: exp.Identifier) -> str: 1398 identifier = super().identifier_sql(expression) 1399 1400 if expression.args.get("global_"): 1401 identifier = f"##{identifier}" 1402 elif expression.args.get("temporary"): 1403 identifier = f"#{identifier}" 1404 1405 return identifier
1456 def columndef_sql(self, expression: exp.ColumnDef, sep: str = " ") -> str: 1457 this = super().columndef_sql(expression, sep) 1458 default = self.sql(expression, "default") 1459 default = f" = {default}" if default else "" 1460 output = self.sql(expression, "output") 1461 output = f" {output}" if output else "" 1462 return f"{this}{default}{output}"
Inherited Members
- sqlglot.generator.Generator
- Generator
- IGNORE_NULLS_IN_FUNC
- LOCKING_READS_SUPPORTED
- WRAP_DERIVED_VALUES
- CREATE_FUNCTION_RETURN_AS
- MATCHED_BY_SOURCE
- SINGLE_STRING_INTERVAL
- INTERVAL_ALLOWS_PLURAL_FORM
- LIMIT_ONLY_LITERALS
- RENAME_TABLE_WITH_DB
- GROUPINGS_SEP
- INDEX_ON
- INOUT_SEPARATOR
- JOIN_HINTS
- DIRECTED_JOINS
- TABLE_HINTS
- QUERY_HINT_SEP
- IS_BOOL_ALLOWED
- DUPLICATE_KEY_UPDATE_WITH_SET
- EXTRACT_ALLOWS_QUOTES
- TZ_TO_WITH_TIME_ZONE
- VALUES_AS_TABLE
- UNNEST_WITH_ORDINALITY
- AGGREGATE_FILTER_SUPPORTED
- SEMI_ANTI_JOIN_WITH_SIDE
- SUPPORTS_TABLE_COPY
- TABLESAMPLE_REQUIRES_PARENS
- TABLESAMPLE_SIZE_IS_ROWS
- TABLESAMPLE_KEYWORDS
- TABLESAMPLE_WITH_METHOD
- COLLATE_IS_FUNC
- DATA_TYPE_SPECIFIERS_ALLOWED
- LAST_DAY_SUPPORTS_DATE_PART
- SUPPORTS_TABLE_ALIAS_COLUMNS
- UNPIVOT_ALIASES_ARE_IDENTIFIERS
- JSON_KEY_VALUE_PAIR_SEP
- INSERT_OVERWRITE
- SUPPORTS_UNLOGGED_TABLES
- SUPPORTS_CREATE_TABLE_LIKE
- LIKE_PROPERTY_INSIDE_SCHEMA
- MULTI_ARG_DISTINCT
- JSON_TYPE_REQUIRED_FOR_EXTRACTION
- JSON_PATH_SINGLE_QUOTE_ESCAPE
- CAN_IMPLEMENT_ARRAY_ANY
- SUPPORTS_WINDOW_EXCLUDE
- COPY_PARAMS_ARE_WRAPPED
- COPY_HAS_INTO_KEYWORD
- UNICODE_SUBSTITUTE
- STAR_EXCEPT
- HEX_FUNC
- WITH_PROPERTIES_PREFIX
- QUOTE_JSON_PATH
- PAD_FILL_PATTERN_IS_REQUIRED
- SUPPORTS_EXPLODING_PROJECTIONS
- ARRAY_CONCAT_IS_VAR_LEN
- SUPPORTS_CONVERT_TIMEZONE
- SUPPORTS_MEDIAN
- SUPPORTS_UNIX_SECONDS
- NORMALIZE_EXTRACT_DATE_PARTS
- ARRAY_SIZE_NAME
- ARRAY_SIZE_DIM_REQUIRED
- SUPPORTS_BETWEEN_FLAGS
- SUPPORTS_LIKE_QUANTIFIERS
- MATCH_AGAINST_TABLE_PREFIX
- SET_ASSIGNMENT_REQUIRES_VARIABLE_KEYWORD
- UPDATE_STATEMENT_SUPPORTS_FROM
- UNSUPPORTED_TYPES
- TIME_PART_SINGULARS
- TOKEN_MAPPING
- STRUCT_DELIMITER
- PARAMETER_TOKEN
- NAMED_PLACEHOLDER_TOKEN
- EXPRESSION_PRECEDES_PROPERTIES_CREATABLES
- RESERVED_KEYWORDS
- WITH_SEPARATED_COMMENTS
- EXCLUDE_COMMENTS
- UNWRAPPED_INTERVAL_VALUES
- PARAMETERIZABLE_TEXT_TYPES
- RESPECT_IGNORE_NULLS_UNSUPPORTED_EXPRESSIONS
- SAFE_JSON_PATH_KEY_RE
- SENTINEL_LINE_BREAK
- pretty
- identify
- normalize
- pad
- unsupported_level
- max_unsupported
- leading_comma
- max_text_width
- comments
- dialect
- normalize_functions
- unsupported_messages
- generate
- preprocess
- unsupported
- sep
- seg
- sanitize_comment
- maybe_comment
- wrap
- no_identify
- normalize_func
- indent
- sql
- uncache_sql
- cache_sql
- characterset_sql
- column_parts
- column_sql
- pseudocolumn_sql
- columnposition_sql
- columnconstraint_sql
- computedcolumnconstraint_sql
- autoincrementcolumnconstraint_sql
- compresscolumnconstraint_sql
- generatedasidentitycolumnconstraint_sql
- generatedasrowcolumnconstraint_sql
- periodforsystemtimeconstraint_sql
- notnullcolumnconstraint_sql
- primarykeycolumnconstraint_sql
- uniquecolumnconstraint_sql
- inoutcolumnconstraint_sql
- sequenceproperties_sql
- clone_sql
- describe_sql
- heredoc_sql
- prepend_ctes
- with_sql
- cte_sql
- tablealias_sql
- bitstring_sql
- hexstring_sql
- bytestring_sql
- unicodestring_sql
- rawstring_sql
- datatypeparam_sql
- datatype_sql
- directory_sql
- delete_sql
- set_operation
- set_operations
- fetch_sql
- limitoptions_sql
- filter_sql
- hint_sql
- indexparameters_sql
- index_sql
- hex_sql
- lowerhex_sql
- inputoutputformat_sql
- national_sql
- properties_sql
- root_properties
- properties
- with_properties
- locate_properties
- property_name
- property_sql
- likeproperty_sql
- fallbackproperty_sql
- journalproperty_sql
- freespaceproperty_sql
- checksumproperty_sql
- mergeblockratioproperty_sql
- datablocksizeproperty_sql
- blockcompressionproperty_sql
- isolatedloadingproperty_sql
- partitionboundspec_sql
- partitionedofproperty_sql
- lockingproperty_sql
- withdataproperty_sql
- withsystemversioningproperty_sql
- insert_sql
- introducer_sql
- kill_sql
- pseudotype_sql
- objectidentifier_sql
- onconflict_sql
- rowformatdelimitedproperty_sql
- withtablehint_sql
- indextablehint_sql
- historicaldata_sql
- table_parts
- table_sql
- tablefromrows_sql
- tablesample_sql
- pivot_sql
- tuple_sql
- update_sql
- values_sql
- var_sql
- from_sql
- groupingsets_sql
- rollup_sql
- rollupindex_sql
- rollupproperty_sql
- cube_sql
- group_sql
- having_sql
- connect_sql
- prior_sql
- join_sql
- lambda_sql
- lateral_sql
- limit_sql
- set_sql
- queryband_sql
- pragma_sql
- lock_sql
- literal_sql
- escape_str
- loaddata_sql
- null_sql
- booland_sql
- boolor_sql
- order_sql
- withfill_sql
- cluster_sql
- distribute_sql
- sort_sql
- ordered_sql
- matchrecognizemeasure_sql
- matchrecognize_sql
- query_modifiers
- for_modifiers
- offset_limit_modifiers
- after_limit_modifiers
- schema_sql
- schema_columns_sql
- star_sql
- parameter_sql
- sessionparameter_sql
- placeholder_sql
- subquery_sql
- qualify_sql
- unnest_sql
- prewhere_sql
- where_sql
- window_sql
- partition_by_sql
- windowspec_sql
- withingroup_sql
- between_sql
- bracket_offset_expressions
- bracket_sql
- all_sql
- any_sql
- exists_sql
- case_sql
- nextvaluefor_sql
- trim_sql
- convert_concat_args
- concat_sql
- concatws_sql
- check_sql
- foreignkey_sql
- primarykey_sql
- if_sql
- matchagainst_sql
- jsonkeyvalue_sql
- jsonpath_sql
- json_path_part
- formatjson_sql
- formatphrase_sql
- jsonobject_sql
- jsonobjectagg_sql
- jsonarray_sql
- jsonarrayagg_sql
- jsoncolumndef_sql
- jsonschema_sql
- jsontable_sql
- openjsoncolumndef_sql
- openjson_sql
- in_sql
- in_unnest_op
- interval_sql
- return_sql
- reference_sql
- anonymous_sql
- paren_sql
- neg_sql
- not_sql
- alias_sql
- pivotalias_sql
- aliases_sql
- atindex_sql
- attimezone_sql
- fromtimezone_sql
- add_sql
- and_sql
- or_sql
- xor_sql
- connector_sql
- bitwiseand_sql
- bitwiseleftshift_sql
- bitwisenot_sql
- bitwiseor_sql
- bitwiserightshift_sql
- bitwisexor_sql
- cast_sql
- strtotime_sql
- currentdate_sql
- collate_sql
- command_sql
- comment_sql
- mergetreettlaction_sql
- mergetreettl_sql
- altercolumn_sql
- alterindex_sql
- alterdiststyle_sql
- altersortkey_sql
- alterrename_sql
- renamecolumn_sql
- alterset_sql
- altersession_sql
- add_column_sql
- droppartition_sql
- addconstraint_sql
- addpartition_sql
- distinct_sql
- ignorenulls_sql
- respectnulls_sql
- havingmax_sql
- intdiv_sql
- div_sql
- safedivide_sql
- overlaps_sql
- distance_sql
- dot_sql
- eq_sql
- propertyeq_sql
- escape_sql
- glob_sql
- gt_sql
- gte_sql
- like_sql
- ilike_sql
- match_sql
- similarto_sql
- lt_sql
- lte_sql
- mod_sql
- mul_sql
- neq_sql
- nullsafeeq_sql
- nullsafeneq_sql
- sub_sql
- trycast_sql
- jsoncast_sql
- try_sql
- log_sql
- use_sql
- binary
- ceil_floor
- function_fallback_sql
- func
- format_args
- too_wide
- format_time
- expressions
- op_expressions
- naked_property
- tag_sql
- token_sql
- userdefinedfunction_sql
- joinhint_sql
- kwarg_sql
- when_sql
- whens_sql
- merge_sql
- tochar_sql
- tonumber_sql
- dictproperty_sql
- dictrange_sql
- dictsubproperty_sql
- duplicatekeyproperty_sql
- uniquekeyproperty_sql
- distributedbyproperty_sql
- oncluster_sql
- clusteredbyproperty_sql
- anyvalue_sql
- querytransform_sql
- indexconstraintoption_sql
- checkcolumnconstraint_sql
- indexcolumnconstraint_sql
- nvl2_sql
- comprehension_sql
- columnprefix_sql
- opclass_sql
- predict_sql
- generateembedding_sql
- mltranslate_sql
- mlforecast_sql
- featuresattime_sql
- vectorsearch_sql
- forin_sql
- refresh_sql
- toarray_sql
- tsordstotime_sql
- tsordstotimestamp_sql
- tsordstodatetime_sql
- tsordstodate_sql
- unixdate_sql
- lastday_sql
- dateadd_sql
- arrayany_sql
- struct_sql
- partitionrange_sql
- truncatetable_sql
- copyparameter_sql
- credentials_sql
- copy_sql
- semicolon_sql
- datadeletionproperty_sql
- maskingpolicycolumnconstraint_sql
- gapfill_sql
- scoperesolution_sql
- parsejson_sql
- rand_sql
- changes_sql
- pad_sql
- summarize_sql
- explodinggenerateseries_sql
- converttimezone_sql
- json_sql
- jsonvalue_sql
- conditionalinsert_sql
- multitableinserts_sql
- oncondition_sql
- jsonextractquote_sql
- jsonexists_sql
- arrayagg_sql
- slice_sql
- apply_sql
- grant_sql
- revoke_sql
- grantprivilege_sql
- grantprincipal_sql
- columns_sql
- overlay_sql
- todouble_sql
- string_sql
- median_sql
- overflowtruncatebehavior_sql
- unixseconds_sql
- arraysize_sql
- attach_sql
- detach_sql
- attachoption_sql
- watermarkcolumnconstraint_sql
- encodeproperty_sql
- includeproperty_sql
- xmlelement_sql
- xmlkeyvalueoption_sql
- partitionbyrangeproperty_sql
- partitionbyrangepropertydynamic_sql
- unpivotcolumns_sql
- analyzesample_sql
- analyzestatistics_sql
- analyzehistogram_sql
- analyzedelete_sql
- analyzelistchainedrows_sql
- analyzevalidate_sql
- analyze_sql
- xmltable_sql
- xmlnamespace_sql
- export_sql
- declare_sql
- declareitem_sql
- recursivewithsearch_sql
- parameterizedagg_sql
- anonymousaggfunc_sql
- combinedaggfunc_sql
- combinedparameterizedagg_sql
- show_sql
- install_sql
- get_put_sql
- translatecharacters_sql
- decodecase_sql
- semanticview_sql
- getextract_sql
- datefromunixdate_sql
- space_sql
- buildproperty_sql
- refreshtriggerproperty_sql
- modelattribute_sql
- directorystage_sql
- uuid_sql
- initcap_sql
- localtime_sql
- localtimestamp_sql
- weekstart_sql
- chr_sql