sqlglot.dialects.tsql
1from __future__ import annotations 2 3import datetime 4import re 5import typing as t 6from functools import reduce 7 8from sqlglot import exp, generator, parser, tokens, transforms 9from sqlglot.dialects.dialect import ( 10 Dialect, 11 NormalizationStrategy, 12 any_value_to_max_sql, 13 build_date_delta, 14 date_delta_sql, 15 datestrtodate_sql, 16 generatedasidentitycolumnconstraint_sql, 17 max_or_greatest, 18 min_or_least, 19 rename_func, 20 strposition_sql, 21 timestrtotime_sql, 22 trim_sql, 23) 24from sqlglot.helper import seq_get 25from sqlglot.parser import build_coalesce 26from sqlglot.time import format_time 27from sqlglot.tokens import TokenType 28 29if t.TYPE_CHECKING: 30 from sqlglot._typing import E 31 32FULL_FORMAT_TIME_MAPPING = { 33 "weekday": "%A", 34 "dw": "%A", 35 "w": "%A", 36 "month": "%B", 37 "mm": "%B", 38 "m": "%B", 39} 40 41DATE_DELTA_INTERVAL = { 42 "year": "year", 43 "yyyy": "year", 44 "yy": "year", 45 "quarter": "quarter", 46 "qq": "quarter", 47 "q": "quarter", 48 "month": "month", 49 "mm": "month", 50 "m": "month", 51 "week": "week", 52 "ww": "week", 53 "wk": "week", 54 "day": "day", 55 "dd": "day", 56 "d": "day", 57} 58 59 60DATE_FMT_RE = re.compile("([dD]{1,2})|([mM]{1,2})|([yY]{1,4})|([hH]{1,2})|([sS]{1,2})") 61 62# N = Numeric, C=Currency 63TRANSPILE_SAFE_NUMBER_FMT = {"N", "C"} 64 65DEFAULT_START_DATE = datetime.date(1900, 1, 1) 66 67BIT_TYPES = {exp.EQ, exp.NEQ, exp.Is, exp.In, exp.Select, exp.Alias} 68 69# Unsupported options: 70# - OPTIMIZE FOR ( @variable_name { UNKNOWN | = <literal_constant> } [ , ...n ] ) 71# - TABLE HINT 72OPTIONS: parser.OPTIONS_TYPE = { 73 **dict.fromkeys( 74 ( 75 "DISABLE_OPTIMIZED_PLAN_FORCING", 76 "FAST", 77 "IGNORE_NONCLUSTERED_COLUMNSTORE_INDEX", 78 "LABEL", 79 "MAXDOP", 80 "MAXRECURSION", 81 "MAX_GRANT_PERCENT", 82 "MIN_GRANT_PERCENT", 83 "NO_PERFORMANCE_SPOOL", 84 "QUERYTRACEON", 85 "RECOMPILE", 86 ), 87 tuple(), 88 ), 89 "CONCAT": ("UNION",), 90 "DISABLE": ("EXTERNALPUSHDOWN", "SCALEOUTEXECUTION"), 91 "EXPAND": ("VIEWS",), 92 "FORCE": ("EXTERNALPUSHDOWN", "ORDER", "SCALEOUTEXECUTION"), 93 "HASH": ("GROUP", "JOIN", "UNION"), 94 "KEEP": ("PLAN",), 95 "KEEPFIXED": ("PLAN",), 96 "LOOP": ("JOIN",), 97 "MERGE": ("JOIN", "UNION"), 98 "OPTIMIZE": (("FOR", "UNKNOWN"),), 99 "ORDER": ("GROUP",), 100 "PARAMETERIZATION": ("FORCED", "SIMPLE"), 101 "ROBUST": ("PLAN",), 102 "USE": ("PLAN",), 103} 104 105 106XML_OPTIONS: parser.OPTIONS_TYPE = { 107 **dict.fromkeys( 108 ( 109 "AUTO", 110 "EXPLICIT", 111 "TYPE", 112 ), 113 tuple(), 114 ), 115 "ELEMENTS": ( 116 "XSINIL", 117 "ABSENT", 118 ), 119 "BINARY": ("BASE64",), 120} 121 122 123OPTIONS_THAT_REQUIRE_EQUAL = ("MAX_GRANT_PERCENT", "MIN_GRANT_PERCENT", "LABEL") 124 125 126def _build_formatted_time( 127 exp_class: t.Type[E], full_format_mapping: t.Optional[bool] = None 128) -> t.Callable[[t.List], E]: 129 def _builder(args: t.List) -> E: 130 fmt = seq_get(args, 0) 131 if isinstance(fmt, exp.Expression): 132 fmt = exp.Literal.string( 133 format_time( 134 fmt.name.lower(), 135 ( 136 {**TSQL.TIME_MAPPING, **FULL_FORMAT_TIME_MAPPING} 137 if full_format_mapping 138 else TSQL.TIME_MAPPING 139 ), 140 ) 141 ) 142 143 this = seq_get(args, 1) 144 if isinstance(this, exp.Expression): 145 this = exp.cast(this, exp.DataType.Type.DATETIME2) 146 147 return exp_class(this=this, format=fmt) 148 149 return _builder 150 151 152def _build_format(args: t.List) -> exp.NumberToStr | exp.TimeToStr: 153 this = seq_get(args, 0) 154 fmt = seq_get(args, 1) 155 culture = seq_get(args, 2) 156 157 number_fmt = fmt and (fmt.name in TRANSPILE_SAFE_NUMBER_FMT or not DATE_FMT_RE.search(fmt.name)) 158 159 if number_fmt: 160 return exp.NumberToStr(this=this, format=fmt, culture=culture) 161 162 if fmt: 163 fmt = exp.Literal.string( 164 format_time(fmt.name, TSQL.FORMAT_TIME_MAPPING) 165 if len(fmt.name) == 1 166 else format_time(fmt.name, TSQL.TIME_MAPPING) 167 ) 168 169 return exp.TimeToStr(this=this, format=fmt, culture=culture) 170 171 172def _build_eomonth(args: t.List) -> exp.LastDay: 173 date = exp.TsOrDsToDate(this=seq_get(args, 0)) 174 month_lag = seq_get(args, 1) 175 176 if month_lag is None: 177 this: exp.Expression = date 178 else: 179 unit = DATE_DELTA_INTERVAL.get("month") 180 this = exp.DateAdd(this=date, expression=month_lag, unit=unit and exp.var(unit)) 181 182 return exp.LastDay(this=this) 183 184 185def _build_hashbytes(args: t.List) -> exp.Expression: 186 kind, data = args 187 kind = kind.name.upper() if kind.is_string else "" 188 189 if kind == "MD5": 190 args.pop(0) 191 return exp.MD5(this=data) 192 if kind in ("SHA", "SHA1"): 193 args.pop(0) 194 return exp.SHA(this=data) 195 if kind == "SHA2_256": 196 return exp.SHA2(this=data, length=exp.Literal.number(256)) 197 if kind == "SHA2_512": 198 return exp.SHA2(this=data, length=exp.Literal.number(512)) 199 200 return exp.func("HASHBYTES", *args) 201 202 203DATEPART_ONLY_FORMATS = {"DW", "WK", "HOUR", "QUARTER"} 204 205 206def _format_sql(self: TSQL.Generator, expression: exp.NumberToStr | exp.TimeToStr) -> str: 207 fmt = expression.args["format"] 208 209 if not isinstance(expression, exp.NumberToStr): 210 if fmt.is_string: 211 mapped_fmt = format_time(fmt.name, TSQL.INVERSE_TIME_MAPPING) 212 213 name = (mapped_fmt or "").upper() 214 if name in DATEPART_ONLY_FORMATS: 215 return self.func("DATEPART", name, expression.this) 216 217 fmt_sql = self.sql(exp.Literal.string(mapped_fmt)) 218 else: 219 fmt_sql = self.format_time(expression) or self.sql(fmt) 220 else: 221 fmt_sql = self.sql(fmt) 222 223 return self.func("FORMAT", expression.this, fmt_sql, expression.args.get("culture")) 224 225 226def _string_agg_sql(self: TSQL.Generator, expression: exp.GroupConcat) -> str: 227 this = expression.this 228 distinct = expression.find(exp.Distinct) 229 if distinct: 230 # exp.Distinct can appear below an exp.Order or an exp.GroupConcat expression 231 self.unsupported("T-SQL STRING_AGG doesn't support DISTINCT.") 232 this = distinct.pop().expressions[0] 233 234 order = "" 235 if isinstance(expression.this, exp.Order): 236 if expression.this.this: 237 this = expression.this.this.pop() 238 # Order has a leading space 239 order = f" WITHIN GROUP ({self.sql(expression.this)[1:]})" 240 241 separator = expression.args.get("separator") or exp.Literal.string(",") 242 return f"STRING_AGG({self.format_args(this, separator)}){order}" 243 244 245def _build_date_delta( 246 exp_class: t.Type[E], unit_mapping: t.Optional[t.Dict[str, str]] = None 247) -> t.Callable[[t.List], E]: 248 def _builder(args: t.List) -> E: 249 unit = seq_get(args, 0) 250 if unit and unit_mapping: 251 unit = exp.var(unit_mapping.get(unit.name.lower(), unit.name)) 252 253 start_date = seq_get(args, 1) 254 if start_date and start_date.is_number: 255 # Numeric types are valid DATETIME values 256 if start_date.is_int: 257 adds = DEFAULT_START_DATE + datetime.timedelta(days=start_date.to_py()) 258 start_date = exp.Literal.string(adds.strftime("%F")) 259 else: 260 # We currently don't handle float values, i.e. they're not converted to equivalent DATETIMEs. 261 # This is not a problem when generating T-SQL code, it is when transpiling to other dialects. 262 return exp_class(this=seq_get(args, 2), expression=start_date, unit=unit) 263 264 return exp_class( 265 this=exp.TimeStrToTime(this=seq_get(args, 2)), 266 expression=exp.TimeStrToTime(this=start_date), 267 unit=unit, 268 ) 269 270 return _builder 271 272 273def qualify_derived_table_outputs(expression: exp.Expression) -> exp.Expression: 274 """Ensures all (unnamed) output columns are aliased for CTEs and Subqueries.""" 275 alias = expression.args.get("alias") 276 277 if ( 278 isinstance(expression, (exp.CTE, exp.Subquery)) 279 and isinstance(alias, exp.TableAlias) 280 and not alias.columns 281 ): 282 from sqlglot.optimizer.qualify_columns import qualify_outputs 283 284 # We keep track of the unaliased column projection indexes instead of the expressions 285 # themselves, because the latter are going to be replaced by new nodes when the aliases 286 # are added and hence we won't be able to reach these newly added Alias parents 287 query = expression.this 288 unaliased_column_indexes = ( 289 i for i, c in enumerate(query.selects) if isinstance(c, exp.Column) and not c.alias 290 ) 291 292 qualify_outputs(query) 293 294 # Preserve the quoting information of columns for newly added Alias nodes 295 query_selects = query.selects 296 for select_index in unaliased_column_indexes: 297 alias = query_selects[select_index] 298 column = alias.this 299 if isinstance(column.this, exp.Identifier): 300 alias.args["alias"].set("quoted", column.this.quoted) 301 302 return expression 303 304 305# https://learn.microsoft.com/en-us/sql/t-sql/functions/datetimefromparts-transact-sql?view=sql-server-ver16#syntax 306def _build_datetimefromparts(args: t.List) -> exp.TimestampFromParts: 307 return exp.TimestampFromParts( 308 year=seq_get(args, 0), 309 month=seq_get(args, 1), 310 day=seq_get(args, 2), 311 hour=seq_get(args, 3), 312 min=seq_get(args, 4), 313 sec=seq_get(args, 5), 314 milli=seq_get(args, 6), 315 ) 316 317 318# https://learn.microsoft.com/en-us/sql/t-sql/functions/timefromparts-transact-sql?view=sql-server-ver16#syntax 319def _build_timefromparts(args: t.List) -> exp.TimeFromParts: 320 return exp.TimeFromParts( 321 hour=seq_get(args, 0), 322 min=seq_get(args, 1), 323 sec=seq_get(args, 2), 324 fractions=seq_get(args, 3), 325 precision=seq_get(args, 4), 326 ) 327 328 329def _build_with_arg_as_text( 330 klass: t.Type[exp.Expression], 331) -> t.Callable[[t.List[exp.Expression]], exp.Expression]: 332 def _parse(args: t.List[exp.Expression]) -> exp.Expression: 333 this = seq_get(args, 0) 334 335 if this and not this.is_string: 336 this = exp.cast(this, exp.DataType.Type.TEXT) 337 338 expression = seq_get(args, 1) 339 kwargs = {"this": this} 340 341 if expression: 342 kwargs["expression"] = expression 343 344 return klass(**kwargs) 345 346 return _parse 347 348 349# https://learn.microsoft.com/en-us/sql/t-sql/functions/parsename-transact-sql?view=sql-server-ver16 350def _build_parsename(args: t.List) -> exp.SplitPart | exp.Anonymous: 351 # PARSENAME(...) will be stored into exp.SplitPart if: 352 # - All args are literals 353 # - The part index (2nd arg) is <= 4 (max valid value, otherwise TSQL returns NULL) 354 if len(args) == 2 and all(isinstance(arg, exp.Literal) for arg in args): 355 this = args[0] 356 part_index = args[1] 357 split_count = len(this.name.split(".")) 358 if split_count <= 4: 359 return exp.SplitPart( 360 this=this, 361 delimiter=exp.Literal.string("."), 362 part_index=exp.Literal.number(split_count + 1 - part_index.to_py()), 363 ) 364 365 return exp.Anonymous(this="PARSENAME", expressions=args) 366 367 368def _build_json_query(args: t.List, dialect: Dialect) -> exp.JSONExtract: 369 if len(args) == 1: 370 # The default value for path is '$'. As a result, if you don't provide a 371 # value for path, JSON_QUERY returns the input expression. 372 args.append(exp.Literal.string("$")) 373 374 return parser.build_extract_json_with_path(exp.JSONExtract)(args, dialect) 375 376 377def _json_extract_sql( 378 self: TSQL.Generator, expression: exp.JSONExtract | exp.JSONExtractScalar 379) -> str: 380 json_query = self.func("JSON_QUERY", expression.this, expression.expression) 381 json_value = self.func("JSON_VALUE", expression.this, expression.expression) 382 return self.func("ISNULL", json_query, json_value) 383 384 385def _timestrtotime_sql(self: TSQL.Generator, expression: exp.TimeStrToTime): 386 sql = timestrtotime_sql(self, expression) 387 if expression.args.get("zone"): 388 # If there is a timezone, produce an expression like: 389 # CAST('2020-01-01 12:13:14-08:00' AS DATETIMEOFFSET) AT TIME ZONE 'UTC' 390 # If you dont have AT TIME ZONE 'UTC', wrapping that expression in another cast back to DATETIME2 just drops the timezone information 391 return self.sql(exp.AtTimeZone(this=sql, zone=exp.Literal.string("UTC"))) 392 return sql 393 394 395def _build_datetrunc(args: t.List) -> exp.TimestampTrunc: 396 unit = seq_get(args, 0) 397 this = seq_get(args, 1) 398 399 if this and this.is_string: 400 this = exp.cast(this, exp.DataType.Type.DATETIME2) 401 402 return exp.TimestampTrunc(this=this, unit=unit) 403 404 405class TSQL(Dialect): 406 SUPPORTS_SEMI_ANTI_JOIN = False 407 LOG_BASE_FIRST = False 408 TYPED_DIVISION = True 409 CONCAT_COALESCE = True 410 NORMALIZATION_STRATEGY = NormalizationStrategy.CASE_INSENSITIVE 411 ALTER_TABLE_ADD_REQUIRED_FOR_EACH_COLUMN = False 412 413 TIME_FORMAT = "'yyyy-mm-dd hh:mm:ss'" 414 415 TIME_MAPPING = { 416 "year": "%Y", 417 "dayofyear": "%j", 418 "day": "%d", 419 "dy": "%d", 420 "y": "%Y", 421 "week": "%W", 422 "ww": "%W", 423 "wk": "%W", 424 "hour": "%h", 425 "hh": "%I", 426 "minute": "%M", 427 "mi": "%M", 428 "n": "%M", 429 "second": "%S", 430 "ss": "%S", 431 "s": "%-S", 432 "millisecond": "%f", 433 "ms": "%f", 434 "weekday": "%w", 435 "dw": "%w", 436 "month": "%m", 437 "mm": "%M", 438 "m": "%-M", 439 "Y": "%Y", 440 "YYYY": "%Y", 441 "YY": "%y", 442 "MMMM": "%B", 443 "MMM": "%b", 444 "MM": "%m", 445 "M": "%-m", 446 "dddd": "%A", 447 "dd": "%d", 448 "d": "%-d", 449 "HH": "%H", 450 "H": "%-H", 451 "h": "%-I", 452 "ffffff": "%f", 453 "yyyy": "%Y", 454 "yy": "%y", 455 } 456 457 CONVERT_FORMAT_MAPPING = { 458 "0": "%b %d %Y %-I:%M%p", 459 "1": "%m/%d/%y", 460 "2": "%y.%m.%d", 461 "3": "%d/%m/%y", 462 "4": "%d.%m.%y", 463 "5": "%d-%m-%y", 464 "6": "%d %b %y", 465 "7": "%b %d, %y", 466 "8": "%H:%M:%S", 467 "9": "%b %d %Y %-I:%M:%S:%f%p", 468 "10": "mm-dd-yy", 469 "11": "yy/mm/dd", 470 "12": "yymmdd", 471 "13": "%d %b %Y %H:%M:ss:%f", 472 "14": "%H:%M:%S:%f", 473 "20": "%Y-%m-%d %H:%M:%S", 474 "21": "%Y-%m-%d %H:%M:%S.%f", 475 "22": "%m/%d/%y %-I:%M:%S %p", 476 "23": "%Y-%m-%d", 477 "24": "%H:%M:%S", 478 "25": "%Y-%m-%d %H:%M:%S.%f", 479 "100": "%b %d %Y %-I:%M%p", 480 "101": "%m/%d/%Y", 481 "102": "%Y.%m.%d", 482 "103": "%d/%m/%Y", 483 "104": "%d.%m.%Y", 484 "105": "%d-%m-%Y", 485 "106": "%d %b %Y", 486 "107": "%b %d, %Y", 487 "108": "%H:%M:%S", 488 "109": "%b %d %Y %-I:%M:%S:%f%p", 489 "110": "%m-%d-%Y", 490 "111": "%Y/%m/%d", 491 "112": "%Y%m%d", 492 "113": "%d %b %Y %H:%M:%S:%f", 493 "114": "%H:%M:%S:%f", 494 "120": "%Y-%m-%d %H:%M:%S", 495 "121": "%Y-%m-%d %H:%M:%S.%f", 496 "126": "%Y-%m-%dT%H:%M:%S.%f", 497 } 498 499 FORMAT_TIME_MAPPING = { 500 "y": "%B %Y", 501 "d": "%m/%d/%Y", 502 "H": "%-H", 503 "h": "%-I", 504 "s": "%Y-%m-%d %H:%M:%S", 505 "D": "%A,%B,%Y", 506 "f": "%A,%B,%Y %-I:%M %p", 507 "F": "%A,%B,%Y %-I:%M:%S %p", 508 "g": "%m/%d/%Y %-I:%M %p", 509 "G": "%m/%d/%Y %-I:%M:%S %p", 510 "M": "%B %-d", 511 "m": "%B %-d", 512 "O": "%Y-%m-%dT%H:%M:%S", 513 "u": "%Y-%M-%D %H:%M:%S%z", 514 "U": "%A, %B %D, %Y %H:%M:%S%z", 515 "T": "%-I:%M:%S %p", 516 "t": "%-I:%M", 517 "Y": "%a %Y", 518 } 519 520 class Tokenizer(tokens.Tokenizer): 521 IDENTIFIERS = [("[", "]"), '"'] 522 QUOTES = ["'", '"'] 523 HEX_STRINGS = [("0x", ""), ("0X", "")] 524 VAR_SINGLE_TOKENS = {"@", "$", "#"} 525 526 KEYWORDS = { 527 **tokens.Tokenizer.KEYWORDS, 528 "CLUSTERED INDEX": TokenType.INDEX, 529 "DATETIME2": TokenType.DATETIME2, 530 "DATETIMEOFFSET": TokenType.TIMESTAMPTZ, 531 "DECLARE": TokenType.DECLARE, 532 "EXEC": TokenType.COMMAND, 533 "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT, 534 "GO": TokenType.COMMAND, 535 "IMAGE": TokenType.IMAGE, 536 "MONEY": TokenType.MONEY, 537 "NONCLUSTERED INDEX": TokenType.INDEX, 538 "NTEXT": TokenType.TEXT, 539 "OPTION": TokenType.OPTION, 540 "OUTPUT": TokenType.RETURNING, 541 "PRINT": TokenType.COMMAND, 542 "PROC": TokenType.PROCEDURE, 543 "REAL": TokenType.FLOAT, 544 "ROWVERSION": TokenType.ROWVERSION, 545 "SMALLDATETIME": TokenType.SMALLDATETIME, 546 "SMALLMONEY": TokenType.SMALLMONEY, 547 "SQL_VARIANT": TokenType.VARIANT, 548 "SYSTEM_USER": TokenType.CURRENT_USER, 549 "TOP": TokenType.TOP, 550 "TIMESTAMP": TokenType.ROWVERSION, 551 "TINYINT": TokenType.UTINYINT, 552 "UNIQUEIDENTIFIER": TokenType.UUID, 553 "UPDATE STATISTICS": TokenType.COMMAND, 554 "XML": TokenType.XML, 555 } 556 KEYWORDS.pop("/*+") 557 558 COMMANDS = {*tokens.Tokenizer.COMMANDS, TokenType.END} 559 560 class Parser(parser.Parser): 561 SET_REQUIRES_ASSIGNMENT_DELIMITER = False 562 LOG_DEFAULTS_TO_LN = True 563 STRING_ALIASES = True 564 NO_PAREN_IF_COMMANDS = False 565 566 QUERY_MODIFIER_PARSERS = { 567 **parser.Parser.QUERY_MODIFIER_PARSERS, 568 TokenType.OPTION: lambda self: ("options", self._parse_options()), 569 TokenType.FOR: lambda self: ("for", self._parse_for()), 570 } 571 572 # T-SQL does not allow BEGIN to be used as an identifier 573 ID_VAR_TOKENS = parser.Parser.ID_VAR_TOKENS - {TokenType.BEGIN} 574 ALIAS_TOKENS = parser.Parser.ALIAS_TOKENS - {TokenType.BEGIN} 575 TABLE_ALIAS_TOKENS = parser.Parser.TABLE_ALIAS_TOKENS - {TokenType.BEGIN} 576 COMMENT_TABLE_ALIAS_TOKENS = parser.Parser.COMMENT_TABLE_ALIAS_TOKENS - {TokenType.BEGIN} 577 UPDATE_ALIAS_TOKENS = parser.Parser.UPDATE_ALIAS_TOKENS - {TokenType.BEGIN} 578 579 FUNCTIONS = { 580 **parser.Parser.FUNCTIONS, 581 "CHARINDEX": lambda args: exp.StrPosition( 582 this=seq_get(args, 1), 583 substr=seq_get(args, 0), 584 position=seq_get(args, 2), 585 ), 586 "COUNT": lambda args: exp.Count( 587 this=seq_get(args, 0), expressions=args[1:], big_int=False 588 ), 589 "COUNT_BIG": lambda args: exp.Count( 590 this=seq_get(args, 0), expressions=args[1:], big_int=True 591 ), 592 "DATEADD": build_date_delta(exp.DateAdd, unit_mapping=DATE_DELTA_INTERVAL), 593 "DATEDIFF": _build_date_delta(exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL), 594 "DATENAME": _build_formatted_time(exp.TimeToStr, full_format_mapping=True), 595 "DATEPART": _build_formatted_time(exp.TimeToStr), 596 "DATETIMEFROMPARTS": _build_datetimefromparts, 597 "EOMONTH": _build_eomonth, 598 "FORMAT": _build_format, 599 "GETDATE": exp.CurrentTimestamp.from_arg_list, 600 "HASHBYTES": _build_hashbytes, 601 "ISNULL": lambda args: build_coalesce(args=args, is_null=True), 602 "JSON_QUERY": _build_json_query, 603 "JSON_VALUE": parser.build_extract_json_with_path(exp.JSONExtractScalar), 604 "LEN": _build_with_arg_as_text(exp.Length), 605 "LEFT": _build_with_arg_as_text(exp.Left), 606 "NEWID": exp.Uuid.from_arg_list, 607 "RIGHT": _build_with_arg_as_text(exp.Right), 608 "PARSENAME": _build_parsename, 609 "REPLICATE": exp.Repeat.from_arg_list, 610 "SCHEMA_NAME": exp.CurrentSchema.from_arg_list, 611 "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)), 612 "SYSDATETIME": exp.CurrentTimestamp.from_arg_list, 613 "SUSER_NAME": exp.CurrentUser.from_arg_list, 614 "SUSER_SNAME": exp.CurrentUser.from_arg_list, 615 "SYSDATETIMEOFFSET": exp.CurrentTimestampLTZ.from_arg_list, 616 "SYSTEM_USER": exp.CurrentUser.from_arg_list, 617 "TIMEFROMPARTS": _build_timefromparts, 618 "DATETRUNC": _build_datetrunc, 619 } 620 621 JOIN_HINTS = {"LOOP", "HASH", "MERGE", "REMOTE"} 622 623 PROCEDURE_OPTIONS = dict.fromkeys( 624 ("ENCRYPTION", "RECOMPILE", "SCHEMABINDING", "NATIVE_COMPILATION", "EXECUTE"), tuple() 625 ) 626 627 COLUMN_DEFINITION_MODES = {"OUT", "OUTPUT", "READONLY"} 628 629 RETURNS_TABLE_TOKENS = parser.Parser.ID_VAR_TOKENS - { 630 TokenType.TABLE, 631 *parser.Parser.TYPE_TOKENS, 632 } 633 634 STATEMENT_PARSERS = { 635 **parser.Parser.STATEMENT_PARSERS, 636 TokenType.DECLARE: lambda self: self._parse_declare(), 637 } 638 639 RANGE_PARSERS = { 640 **parser.Parser.RANGE_PARSERS, 641 TokenType.DCOLON: lambda self, this: self.expression( 642 exp.ScopeResolution, 643 this=this, 644 expression=self._parse_function() or self._parse_var(any_token=True), 645 ), 646 } 647 648 NO_PAREN_FUNCTION_PARSERS = { 649 **parser.Parser.NO_PAREN_FUNCTION_PARSERS, 650 "NEXT": lambda self: self._parse_next_value_for(), 651 } 652 653 # The DCOLON (::) operator serves as a scope resolution (exp.ScopeResolution) operator in T-SQL 654 COLUMN_OPERATORS = { 655 **parser.Parser.COLUMN_OPERATORS, 656 TokenType.DCOLON: lambda self, this, to: self.expression(exp.Cast, this=this, to=to) 657 if isinstance(to, exp.DataType) and to.this != exp.DataType.Type.USERDEFINED 658 else self.expression(exp.ScopeResolution, this=this, expression=to), 659 } 660 661 def _parse_alter_table_set(self) -> exp.AlterSet: 662 return self._parse_wrapped(super()._parse_alter_table_set) 663 664 def _parse_wrapped_select(self, table: bool = False) -> t.Optional[exp.Expression]: 665 if self._match(TokenType.MERGE): 666 comments = self._prev_comments 667 merge = self._parse_merge() 668 merge.add_comments(comments, prepend=True) 669 return merge 670 671 return super()._parse_wrapped_select(table=table) 672 673 def _parse_dcolon(self) -> t.Optional[exp.Expression]: 674 # We want to use _parse_types() if the first token after :: is a known type, 675 # otherwise we could parse something like x::varchar(max) into a function 676 if self._match_set(self.TYPE_TOKENS, advance=False): 677 return self._parse_types() 678 679 return self._parse_function() or self._parse_types() 680 681 def _parse_options(self) -> t.Optional[t.List[exp.Expression]]: 682 if not self._match(TokenType.OPTION): 683 return None 684 685 def _parse_option() -> t.Optional[exp.Expression]: 686 option = self._parse_var_from_options(OPTIONS) 687 if not option: 688 return None 689 690 self._match(TokenType.EQ) 691 return self.expression( 692 exp.QueryOption, this=option, expression=self._parse_primary_or_var() 693 ) 694 695 return self._parse_wrapped_csv(_parse_option) 696 697 def _parse_xml_key_value_option(self) -> exp.XMLKeyValueOption: 698 this = self._parse_primary_or_var() 699 if self._match(TokenType.L_PAREN, advance=False): 700 expression = self._parse_wrapped(self._parse_string) 701 else: 702 expression = None 703 704 return exp.XMLKeyValueOption(this=this, expression=expression) 705 706 def _parse_for(self) -> t.Optional[t.List[exp.Expression]]: 707 if not self._match_pair(TokenType.FOR, TokenType.XML): 708 return None 709 710 def _parse_for_xml() -> t.Optional[exp.Expression]: 711 return self.expression( 712 exp.QueryOption, 713 this=self._parse_var_from_options(XML_OPTIONS, raise_unmatched=False) 714 or self._parse_xml_key_value_option(), 715 ) 716 717 return self._parse_csv(_parse_for_xml) 718 719 def _parse_projections(self) -> t.List[exp.Expression]: 720 """ 721 T-SQL supports the syntax alias = expression in the SELECT's projection list, 722 so we transform all parsed Selects to convert their EQ projections into Aliases. 723 724 See: https://learn.microsoft.com/en-us/sql/t-sql/queries/select-clause-transact-sql?view=sql-server-ver16#syntax 725 """ 726 return [ 727 ( 728 exp.alias_(projection.expression, projection.this.this, copy=False) 729 if isinstance(projection, exp.EQ) and isinstance(projection.this, exp.Column) 730 else projection 731 ) 732 for projection in super()._parse_projections() 733 ] 734 735 def _parse_commit_or_rollback(self) -> exp.Commit | exp.Rollback: 736 """Applies to SQL Server and Azure SQL Database 737 COMMIT [ { TRAN | TRANSACTION } 738 [ transaction_name | @tran_name_variable ] ] 739 [ WITH ( DELAYED_DURABILITY = { OFF | ON } ) ] 740 741 ROLLBACK { TRAN | TRANSACTION } 742 [ transaction_name | @tran_name_variable 743 | savepoint_name | @savepoint_variable ] 744 """ 745 rollback = self._prev.token_type == TokenType.ROLLBACK 746 747 self._match_texts(("TRAN", "TRANSACTION")) 748 this = self._parse_id_var() 749 750 if rollback: 751 return self.expression(exp.Rollback, this=this) 752 753 durability = None 754 if self._match_pair(TokenType.WITH, TokenType.L_PAREN): 755 self._match_text_seq("DELAYED_DURABILITY") 756 self._match(TokenType.EQ) 757 758 if self._match_text_seq("OFF"): 759 durability = False 760 else: 761 self._match(TokenType.ON) 762 durability = True 763 764 self._match_r_paren() 765 766 return self.expression(exp.Commit, this=this, durability=durability) 767 768 def _parse_transaction(self) -> exp.Transaction | exp.Command: 769 """Applies to SQL Server and Azure SQL Database 770 BEGIN { TRAN | TRANSACTION } 771 [ { transaction_name | @tran_name_variable } 772 [ WITH MARK [ 'description' ] ] 773 ] 774 """ 775 if self._match_texts(("TRAN", "TRANSACTION")): 776 transaction = self.expression(exp.Transaction, this=self._parse_id_var()) 777 if self._match_text_seq("WITH", "MARK"): 778 transaction.set("mark", self._parse_string()) 779 780 return transaction 781 782 return self._parse_as_command(self._prev) 783 784 def _parse_returns(self) -> exp.ReturnsProperty: 785 table = self._parse_id_var(any_token=False, tokens=self.RETURNS_TABLE_TOKENS) 786 returns = super()._parse_returns() 787 returns.set("table", table) 788 return returns 789 790 def _parse_convert( 791 self, strict: bool, safe: t.Optional[bool] = None 792 ) -> t.Optional[exp.Expression]: 793 this = self._parse_types() 794 self._match(TokenType.COMMA) 795 args = [this, *self._parse_csv(self._parse_assignment)] 796 convert = exp.Convert.from_arg_list(args) 797 convert.set("safe", safe) 798 convert.set("strict", strict) 799 return convert 800 801 def _parse_column_def( 802 self, this: t.Optional[exp.Expression], computed_column: bool = True 803 ) -> t.Optional[exp.Expression]: 804 this = super()._parse_column_def(this=this, computed_column=computed_column) 805 if not this: 806 return None 807 if self._match(TokenType.EQ): 808 this.set("default", self._parse_disjunction()) 809 if self._match_texts(self.COLUMN_DEFINITION_MODES): 810 this.set("output", self._prev.text) 811 return this 812 813 def _parse_user_defined_function( 814 self, kind: t.Optional[TokenType] = None 815 ) -> t.Optional[exp.Expression]: 816 this = super()._parse_user_defined_function(kind=kind) 817 818 if ( 819 kind == TokenType.FUNCTION 820 or isinstance(this, exp.UserDefinedFunction) 821 or self._match(TokenType.ALIAS, advance=False) 822 ): 823 return this 824 825 if not self._match(TokenType.WITH, advance=False): 826 expressions = self._parse_csv(self._parse_function_parameter) 827 else: 828 expressions = None 829 830 return self.expression(exp.UserDefinedFunction, this=this, expressions=expressions) 831 832 def _parse_into(self) -> t.Optional[exp.Into]: 833 into = super()._parse_into() 834 835 table = isinstance(into, exp.Into) and into.find(exp.Table) 836 if isinstance(table, exp.Table): 837 table_identifier = table.this 838 if table_identifier.args.get("temporary"): 839 # Promote the temporary property from the Identifier to the Into expression 840 t.cast(exp.Into, into).set("temporary", True) 841 842 return into 843 844 def _parse_id_var( 845 self, 846 any_token: bool = True, 847 tokens: t.Optional[t.Collection[TokenType]] = None, 848 ) -> t.Optional[exp.Expression]: 849 is_temporary = self._match(TokenType.HASH) 850 is_global = is_temporary and self._match(TokenType.HASH) 851 852 this = super()._parse_id_var(any_token=any_token, tokens=tokens) 853 if this: 854 if is_global: 855 this.set("global", True) 856 elif is_temporary: 857 this.set("temporary", True) 858 859 return this 860 861 def _parse_create(self) -> exp.Create | exp.Command: 862 create = super()._parse_create() 863 864 if isinstance(create, exp.Create): 865 table = create.this.this if isinstance(create.this, exp.Schema) else create.this 866 if isinstance(table, exp.Table) and table.this and table.this.args.get("temporary"): 867 if not create.args.get("properties"): 868 create.set("properties", exp.Properties(expressions=[])) 869 870 create.args["properties"].append("expressions", exp.TemporaryProperty()) 871 872 return create 873 874 def _parse_if(self) -> t.Optional[exp.Expression]: 875 index = self._index 876 877 if self._match_text_seq("OBJECT_ID"): 878 self._parse_wrapped_csv(self._parse_string) 879 if self._match_text_seq("IS", "NOT", "NULL") and self._match(TokenType.DROP): 880 return self._parse_drop(exists=True) 881 self._retreat(index) 882 883 return super()._parse_if() 884 885 def _parse_unique(self) -> exp.UniqueColumnConstraint: 886 if self._match_texts(("CLUSTERED", "NONCLUSTERED")): 887 this = self.CONSTRAINT_PARSERS[self._prev.text.upper()](self) 888 else: 889 this = self._parse_schema(self._parse_id_var(any_token=False)) 890 891 return self.expression(exp.UniqueColumnConstraint, this=this) 892 893 def _parse_partition(self) -> t.Optional[exp.Partition]: 894 if not self._match_text_seq("WITH", "(", "PARTITIONS"): 895 return None 896 897 def parse_range(): 898 low = self._parse_bitwise() 899 high = self._parse_bitwise() if self._match_text_seq("TO") else None 900 901 return ( 902 self.expression(exp.PartitionRange, this=low, expression=high) if high else low 903 ) 904 905 partition = self.expression( 906 exp.Partition, expressions=self._parse_wrapped_csv(parse_range) 907 ) 908 909 self._match_r_paren() 910 911 return partition 912 913 def _parse_declareitem(self) -> t.Optional[exp.DeclareItem]: 914 var = self._parse_id_var() 915 if not var: 916 return None 917 918 self._match(TokenType.ALIAS) 919 return self.expression( 920 exp.DeclareItem, 921 this=var, 922 kind=self._parse_schema() if self._match(TokenType.TABLE) else self._parse_types(), 923 default=self._match(TokenType.EQ) and self._parse_bitwise(), 924 ) 925 926 def _parse_alter_table_alter(self) -> t.Optional[exp.Expression]: 927 expression = super()._parse_alter_table_alter() 928 929 if expression is not None: 930 collation = expression.args.get("collate") 931 if isinstance(collation, exp.Column) and isinstance(collation.this, exp.Identifier): 932 identifier = collation.this 933 collation.set("this", exp.Var(this=identifier.name)) 934 935 return expression 936 937 class Generator(generator.Generator): 938 LIMIT_IS_TOP = True 939 QUERY_HINTS = False 940 RETURNING_END = False 941 NVL2_SUPPORTED = False 942 ALTER_TABLE_INCLUDE_COLUMN_KEYWORD = False 943 LIMIT_FETCH = "FETCH" 944 COMPUTED_COLUMN_WITH_TYPE = False 945 CTE_RECURSIVE_KEYWORD_REQUIRED = False 946 ENSURE_BOOLS = True 947 NULL_ORDERING_SUPPORTED = None 948 SUPPORTS_SINGLE_ARG_CONCAT = False 949 TABLESAMPLE_SEED_KEYWORD = "REPEATABLE" 950 SUPPORTS_SELECT_INTO = True 951 JSON_PATH_BRACKETED_KEY_SUPPORTED = False 952 SUPPORTS_TO_NUMBER = False 953 SET_OP_MODIFIERS = False 954 COPY_PARAMS_EQ_REQUIRED = True 955 PARSE_JSON_NAME = None 956 EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE = False 957 ALTER_SET_WRAPPED = True 958 ALTER_SET_TYPE = "" 959 960 EXPRESSIONS_WITHOUT_NESTED_CTES = { 961 exp.Create, 962 exp.Delete, 963 exp.Insert, 964 exp.Intersect, 965 exp.Except, 966 exp.Merge, 967 exp.Select, 968 exp.Subquery, 969 exp.Union, 970 exp.Update, 971 } 972 973 SUPPORTED_JSON_PATH_PARTS = { 974 exp.JSONPathKey, 975 exp.JSONPathRoot, 976 exp.JSONPathSubscript, 977 } 978 979 TYPE_MAPPING = { 980 **generator.Generator.TYPE_MAPPING, 981 exp.DataType.Type.BOOLEAN: "BIT", 982 exp.DataType.Type.DATETIME2: "DATETIME2", 983 exp.DataType.Type.DECIMAL: "NUMERIC", 984 exp.DataType.Type.DOUBLE: "FLOAT", 985 exp.DataType.Type.INT: "INTEGER", 986 exp.DataType.Type.ROWVERSION: "ROWVERSION", 987 exp.DataType.Type.TEXT: "VARCHAR(MAX)", 988 exp.DataType.Type.TIMESTAMP: "DATETIME2", 989 exp.DataType.Type.TIMESTAMPNTZ: "DATETIME2", 990 exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET", 991 exp.DataType.Type.SMALLDATETIME: "SMALLDATETIME", 992 exp.DataType.Type.UTINYINT: "TINYINT", 993 exp.DataType.Type.VARIANT: "SQL_VARIANT", 994 exp.DataType.Type.UUID: "UNIQUEIDENTIFIER", 995 } 996 997 TYPE_MAPPING.pop(exp.DataType.Type.NCHAR) 998 TYPE_MAPPING.pop(exp.DataType.Type.NVARCHAR) 999 1000 TRANSFORMS = { 1001 **generator.Generator.TRANSFORMS, 1002 exp.AnyValue: any_value_to_max_sql, 1003 exp.ArrayToString: rename_func("STRING_AGG"), 1004 exp.AutoIncrementColumnConstraint: lambda *_: "IDENTITY", 1005 exp.Chr: rename_func("CHAR"), 1006 exp.DateAdd: date_delta_sql("DATEADD"), 1007 exp.DateDiff: date_delta_sql("DATEDIFF"), 1008 exp.CTE: transforms.preprocess([qualify_derived_table_outputs]), 1009 exp.CurrentDate: rename_func("GETDATE"), 1010 exp.CurrentTimestamp: rename_func("GETDATE"), 1011 exp.CurrentTimestampLTZ: rename_func("SYSDATETIMEOFFSET"), 1012 exp.DateStrToDate: datestrtodate_sql, 1013 exp.Extract: rename_func("DATEPART"), 1014 exp.GeneratedAsIdentityColumnConstraint: generatedasidentitycolumnconstraint_sql, 1015 exp.GroupConcat: _string_agg_sql, 1016 exp.If: rename_func("IIF"), 1017 exp.JSONExtract: _json_extract_sql, 1018 exp.JSONExtractScalar: _json_extract_sql, 1019 exp.LastDay: lambda self, e: self.func("EOMONTH", e.this), 1020 exp.Ln: rename_func("LOG"), 1021 exp.Max: max_or_greatest, 1022 exp.MD5: lambda self, e: self.func("HASHBYTES", exp.Literal.string("MD5"), e.this), 1023 exp.Min: min_or_least, 1024 exp.NumberToStr: _format_sql, 1025 exp.Repeat: rename_func("REPLICATE"), 1026 exp.CurrentSchema: rename_func("SCHEMA_NAME"), 1027 exp.Select: transforms.preprocess( 1028 [ 1029 transforms.eliminate_distinct_on, 1030 transforms.eliminate_semi_and_anti_joins, 1031 transforms.eliminate_qualify, 1032 transforms.unnest_generate_date_array_using_recursive_cte, 1033 ] 1034 ), 1035 exp.Stddev: rename_func("STDEV"), 1036 exp.StrPosition: lambda self, e: strposition_sql( 1037 self, e, func_name="CHARINDEX", supports_position=True 1038 ), 1039 exp.Subquery: transforms.preprocess([qualify_derived_table_outputs]), 1040 exp.SHA: lambda self, e: self.func("HASHBYTES", exp.Literal.string("SHA1"), e.this), 1041 exp.SHA2: lambda self, e: self.func( 1042 "HASHBYTES", exp.Literal.string(f"SHA2_{e.args.get('length', 256)}"), e.this 1043 ), 1044 exp.TemporaryProperty: lambda self, e: "", 1045 exp.TimeStrToTime: _timestrtotime_sql, 1046 exp.TimeToStr: _format_sql, 1047 exp.Trim: trim_sql, 1048 exp.TsOrDsAdd: date_delta_sql("DATEADD", cast=True), 1049 exp.TsOrDsDiff: date_delta_sql("DATEDIFF"), 1050 exp.TimestampTrunc: lambda self, e: self.func("DATETRUNC", e.unit, e.this), 1051 exp.Uuid: lambda *_: "NEWID()", 1052 exp.DateFromParts: rename_func("DATEFROMPARTS"), 1053 } 1054 1055 TRANSFORMS.pop(exp.ReturnsProperty) 1056 1057 PROPERTIES_LOCATION = { 1058 **generator.Generator.PROPERTIES_LOCATION, 1059 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 1060 } 1061 1062 def scope_resolution(self, rhs: str, scope_name: str) -> str: 1063 return f"{scope_name}::{rhs}" 1064 1065 def select_sql(self, expression: exp.Select) -> str: 1066 limit = expression.args.get("limit") 1067 offset = expression.args.get("offset") 1068 1069 if isinstance(limit, exp.Fetch) and not offset: 1070 # Dialects like Oracle can FETCH directly from a row set but 1071 # T-SQL requires an ORDER BY + OFFSET clause in order to FETCH 1072 offset = exp.Offset(expression=exp.Literal.number(0)) 1073 expression.set("offset", offset) 1074 1075 if offset: 1076 if not expression.args.get("order"): 1077 # ORDER BY is required in order to use OFFSET in a query, so we use 1078 # a noop order by, since we don't really care about the order. 1079 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 1080 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 1081 1082 if isinstance(limit, exp.Limit): 1083 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 1084 # we replace here because otherwise TOP would be generated in select_sql 1085 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 1086 1087 return super().select_sql(expression) 1088 1089 def convert_sql(self, expression: exp.Convert) -> str: 1090 name = "TRY_CONVERT" if expression.args.get("safe") else "CONVERT" 1091 return self.func( 1092 name, expression.this, expression.expression, expression.args.get("style") 1093 ) 1094 1095 def queryoption_sql(self, expression: exp.QueryOption) -> str: 1096 option = self.sql(expression, "this") 1097 value = self.sql(expression, "expression") 1098 if value: 1099 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 1100 return f"{option} {optional_equal_sign}{value}" 1101 return option 1102 1103 def lateral_op(self, expression: exp.Lateral) -> str: 1104 cross_apply = expression.args.get("cross_apply") 1105 if cross_apply is True: 1106 return "CROSS APPLY" 1107 if cross_apply is False: 1108 return "OUTER APPLY" 1109 1110 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 1111 self.unsupported("LATERAL clause is not supported.") 1112 return "LATERAL" 1113 1114 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 1115 this = expression.this 1116 split_count = len(this.name.split(".")) 1117 delimiter = expression.args.get("delimiter") 1118 part_index = expression.args.get("part_index") 1119 1120 if ( 1121 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 1122 or (delimiter and delimiter.name != ".") 1123 or not part_index 1124 or split_count > 4 1125 ): 1126 self.unsupported( 1127 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 1128 ) 1129 return "" 1130 1131 return self.func( 1132 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 1133 ) 1134 1135 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1136 nano = expression.args.get("nano") 1137 if nano is not None: 1138 nano.pop() 1139 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1140 1141 if expression.args.get("fractions") is None: 1142 expression.set("fractions", exp.Literal.number(0)) 1143 if expression.args.get("precision") is None: 1144 expression.set("precision", exp.Literal.number(0)) 1145 1146 return rename_func("TIMEFROMPARTS")(self, expression) 1147 1148 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1149 zone = expression.args.get("zone") 1150 if zone is not None: 1151 zone.pop() 1152 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1153 1154 nano = expression.args.get("nano") 1155 if nano is not None: 1156 nano.pop() 1157 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1158 1159 if expression.args.get("milli") is None: 1160 expression.set("milli", exp.Literal.number(0)) 1161 1162 return rename_func("DATETIMEFROMPARTS")(self, expression) 1163 1164 def setitem_sql(self, expression: exp.SetItem) -> str: 1165 this = expression.this 1166 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1167 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1168 return f"{self.sql(this.left)} {self.sql(this.right)}" 1169 1170 return super().setitem_sql(expression) 1171 1172 def boolean_sql(self, expression: exp.Boolean) -> str: 1173 if type(expression.parent) in BIT_TYPES or isinstance( 1174 expression.find_ancestor(exp.Values, exp.Select), exp.Values 1175 ): 1176 return "1" if expression.this else "0" 1177 1178 return "(1 = 1)" if expression.this else "(1 = 0)" 1179 1180 def is_sql(self, expression: exp.Is) -> str: 1181 if isinstance(expression.expression, exp.Boolean): 1182 return self.binary(expression, "=") 1183 return self.binary(expression, "IS") 1184 1185 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1186 sql = self.sql(expression, "this") 1187 properties = expression.args.get("properties") 1188 1189 if sql[:1] != "#" and any( 1190 isinstance(prop, exp.TemporaryProperty) 1191 for prop in (properties.expressions if properties else []) 1192 ): 1193 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1194 1195 return sql 1196 1197 def create_sql(self, expression: exp.Create) -> str: 1198 kind = expression.kind 1199 exists = expression.args.pop("exists", None) 1200 1201 like_property = expression.find(exp.LikeProperty) 1202 if like_property: 1203 ctas_expression = like_property.this 1204 else: 1205 ctas_expression = expression.expression 1206 1207 if kind == "VIEW": 1208 expression.this.set("catalog", None) 1209 with_ = expression.args.get("with") 1210 if ctas_expression and with_: 1211 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1212 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1213 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1214 ctas_expression.set("with", with_.pop()) 1215 1216 table = expression.find(exp.Table) 1217 1218 # Convert CTAS statement to SELECT .. INTO .. 1219 if kind == "TABLE" and ctas_expression: 1220 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1221 ctas_expression = ctas_expression.subquery() 1222 1223 properties = expression.args.get("properties") or exp.Properties() 1224 is_temp = any(isinstance(p, exp.TemporaryProperty) for p in properties.expressions) 1225 1226 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1227 select_into.set("into", exp.Into(this=table, temporary=is_temp)) 1228 1229 if like_property: 1230 select_into.limit(0, copy=False) 1231 1232 sql = self.sql(select_into) 1233 else: 1234 sql = super().create_sql(expression) 1235 1236 if exists: 1237 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1238 sql_with_ctes = self.prepend_ctes(expression, sql) 1239 sql_literal = self.sql(exp.Literal.string(sql_with_ctes)) 1240 if kind == "SCHEMA": 1241 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = {identifier}) EXEC({sql_literal})""" 1242 elif kind == "TABLE": 1243 assert table 1244 where = exp.and_( 1245 exp.column("TABLE_NAME").eq(table.name), 1246 exp.column("TABLE_SCHEMA").eq(table.db) if table.db else None, 1247 exp.column("TABLE_CATALOG").eq(table.catalog) if table.catalog else None, 1248 ) 1249 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE {where}) EXEC({sql_literal})""" 1250 elif kind == "INDEX": 1251 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1252 return f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql_literal})""" 1253 elif expression.args.get("replace"): 1254 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1255 1256 return self.prepend_ctes(expression, sql) 1257 1258 @generator.unsupported_args("unlogged", "expressions") 1259 def into_sql(self, expression: exp.Into) -> str: 1260 if expression.args.get("temporary"): 1261 # If the Into expression has a temporary property, push this down to the Identifier 1262 table = expression.find(exp.Table) 1263 if table and isinstance(table.this, exp.Identifier): 1264 table.this.set("temporary", True) 1265 1266 return f"{self.seg('INTO')} {self.sql(expression, 'this')}" 1267 1268 def count_sql(self, expression: exp.Count) -> str: 1269 func_name = "COUNT_BIG" if expression.args.get("big_int") else "COUNT" 1270 return rename_func(func_name)(self, expression) 1271 1272 def offset_sql(self, expression: exp.Offset) -> str: 1273 return f"{super().offset_sql(expression)} ROWS" 1274 1275 def version_sql(self, expression: exp.Version) -> str: 1276 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1277 this = f"FOR {name}" 1278 expr = expression.expression 1279 kind = expression.text("kind") 1280 if kind in ("FROM", "BETWEEN"): 1281 args = expr.expressions 1282 sep = "TO" if kind == "FROM" else "AND" 1283 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1284 else: 1285 expr_sql = self.sql(expr) 1286 1287 expr_sql = f" {expr_sql}" if expr_sql else "" 1288 return f"{this} {kind}{expr_sql}" 1289 1290 def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str: 1291 table = expression.args.get("table") 1292 table = f"{table} " if table else "" 1293 return f"RETURNS {table}{self.sql(expression, 'this')}" 1294 1295 def returning_sql(self, expression: exp.Returning) -> str: 1296 into = self.sql(expression, "into") 1297 into = self.seg(f"INTO {into}") if into else "" 1298 return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}" 1299 1300 def transaction_sql(self, expression: exp.Transaction) -> str: 1301 this = self.sql(expression, "this") 1302 this = f" {this}" if this else "" 1303 mark = self.sql(expression, "mark") 1304 mark = f" WITH MARK {mark}" if mark else "" 1305 return f"BEGIN TRANSACTION{this}{mark}" 1306 1307 def commit_sql(self, expression: exp.Commit) -> str: 1308 this = self.sql(expression, "this") 1309 this = f" {this}" if this else "" 1310 durability = expression.args.get("durability") 1311 durability = ( 1312 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1313 if durability is not None 1314 else "" 1315 ) 1316 return f"COMMIT TRANSACTION{this}{durability}" 1317 1318 def rollback_sql(self, expression: exp.Rollback) -> str: 1319 this = self.sql(expression, "this") 1320 this = f" {this}" if this else "" 1321 return f"ROLLBACK TRANSACTION{this}" 1322 1323 def identifier_sql(self, expression: exp.Identifier) -> str: 1324 identifier = super().identifier_sql(expression) 1325 1326 if expression.args.get("global"): 1327 identifier = f"##{identifier}" 1328 elif expression.args.get("temporary"): 1329 identifier = f"#{identifier}" 1330 1331 return identifier 1332 1333 def constraint_sql(self, expression: exp.Constraint) -> str: 1334 this = self.sql(expression, "this") 1335 expressions = self.expressions(expression, flat=True, sep=" ") 1336 return f"CONSTRAINT {this} {expressions}" 1337 1338 def length_sql(self, expression: exp.Length) -> str: 1339 return self._uncast_text(expression, "LEN") 1340 1341 def right_sql(self, expression: exp.Right) -> str: 1342 return self._uncast_text(expression, "RIGHT") 1343 1344 def left_sql(self, expression: exp.Left) -> str: 1345 return self._uncast_text(expression, "LEFT") 1346 1347 def _uncast_text(self, expression: exp.Expression, name: str) -> str: 1348 this = expression.this 1349 if isinstance(this, exp.Cast) and this.is_type(exp.DataType.Type.TEXT): 1350 this_sql = self.sql(this, "this") 1351 else: 1352 this_sql = self.sql(this) 1353 expression_sql = self.sql(expression, "expression") 1354 return self.func(name, this_sql, expression_sql if expression_sql else None) 1355 1356 def partition_sql(self, expression: exp.Partition) -> str: 1357 return f"WITH (PARTITIONS({self.expressions(expression, flat=True)}))" 1358 1359 def alter_sql(self, expression: exp.Alter) -> str: 1360 action = seq_get(expression.args.get("actions") or [], 0) 1361 if isinstance(action, exp.AlterRename): 1362 return f"EXEC sp_rename '{self.sql(expression.this)}', '{action.this.name}'" 1363 return super().alter_sql(expression) 1364 1365 def drop_sql(self, expression: exp.Drop) -> str: 1366 if expression.args["kind"] == "VIEW": 1367 expression.this.set("catalog", None) 1368 return super().drop_sql(expression) 1369 1370 def options_modifier(self, expression: exp.Expression) -> str: 1371 options = self.expressions(expression, key="options") 1372 return f" OPTION{self.wrap(options)}" if options else "" 1373 1374 def dpipe_sql(self, expression: exp.DPipe) -> str: 1375 return self.sql( 1376 reduce(lambda x, y: exp.Add(this=x, expression=y), expression.flatten()) 1377 ) 1378 1379 def isascii_sql(self, expression: exp.IsAscii) -> str: 1380 return f"(PATINDEX(CONVERT(VARCHAR(MAX), 0x255b5e002d7f5d25) COLLATE Latin1_General_BIN, {self.sql(expression.this)}) = 0)" 1381 1382 def columndef_sql(self, expression: exp.ColumnDef, sep: str = " ") -> str: 1383 this = super().columndef_sql(expression, sep) 1384 default = self.sql(expression, "default") 1385 default = f" = {default}" if default else "" 1386 output = self.sql(expression, "output") 1387 output = f" {output}" if output else "" 1388 return f"{this}{default}{output}" 1389 1390 def coalesce_sql(self, expression: exp.Coalesce) -> str: 1391 func_name = "ISNULL" if expression.args.get("is_null") else "COALESCE" 1392 return rename_func(func_name)(self, expression)
274def qualify_derived_table_outputs(expression: exp.Expression) -> exp.Expression: 275 """Ensures all (unnamed) output columns are aliased for CTEs and Subqueries.""" 276 alias = expression.args.get("alias") 277 278 if ( 279 isinstance(expression, (exp.CTE, exp.Subquery)) 280 and isinstance(alias, exp.TableAlias) 281 and not alias.columns 282 ): 283 from sqlglot.optimizer.qualify_columns import qualify_outputs 284 285 # We keep track of the unaliased column projection indexes instead of the expressions 286 # themselves, because the latter are going to be replaced by new nodes when the aliases 287 # are added and hence we won't be able to reach these newly added Alias parents 288 query = expression.this 289 unaliased_column_indexes = ( 290 i for i, c in enumerate(query.selects) if isinstance(c, exp.Column) and not c.alias 291 ) 292 293 qualify_outputs(query) 294 295 # Preserve the quoting information of columns for newly added Alias nodes 296 query_selects = query.selects 297 for select_index in unaliased_column_indexes: 298 alias = query_selects[select_index] 299 column = alias.this 300 if isinstance(column.this, exp.Identifier): 301 alias.args["alias"].set("quoted", column.this.quoted) 302 303 return expression
Ensures all (unnamed) output columns are aliased for CTEs and Subqueries.
406class TSQL(Dialect): 407 SUPPORTS_SEMI_ANTI_JOIN = False 408 LOG_BASE_FIRST = False 409 TYPED_DIVISION = True 410 CONCAT_COALESCE = True 411 NORMALIZATION_STRATEGY = NormalizationStrategy.CASE_INSENSITIVE 412 ALTER_TABLE_ADD_REQUIRED_FOR_EACH_COLUMN = False 413 414 TIME_FORMAT = "'yyyy-mm-dd hh:mm:ss'" 415 416 TIME_MAPPING = { 417 "year": "%Y", 418 "dayofyear": "%j", 419 "day": "%d", 420 "dy": "%d", 421 "y": "%Y", 422 "week": "%W", 423 "ww": "%W", 424 "wk": "%W", 425 "hour": "%h", 426 "hh": "%I", 427 "minute": "%M", 428 "mi": "%M", 429 "n": "%M", 430 "second": "%S", 431 "ss": "%S", 432 "s": "%-S", 433 "millisecond": "%f", 434 "ms": "%f", 435 "weekday": "%w", 436 "dw": "%w", 437 "month": "%m", 438 "mm": "%M", 439 "m": "%-M", 440 "Y": "%Y", 441 "YYYY": "%Y", 442 "YY": "%y", 443 "MMMM": "%B", 444 "MMM": "%b", 445 "MM": "%m", 446 "M": "%-m", 447 "dddd": "%A", 448 "dd": "%d", 449 "d": "%-d", 450 "HH": "%H", 451 "H": "%-H", 452 "h": "%-I", 453 "ffffff": "%f", 454 "yyyy": "%Y", 455 "yy": "%y", 456 } 457 458 CONVERT_FORMAT_MAPPING = { 459 "0": "%b %d %Y %-I:%M%p", 460 "1": "%m/%d/%y", 461 "2": "%y.%m.%d", 462 "3": "%d/%m/%y", 463 "4": "%d.%m.%y", 464 "5": "%d-%m-%y", 465 "6": "%d %b %y", 466 "7": "%b %d, %y", 467 "8": "%H:%M:%S", 468 "9": "%b %d %Y %-I:%M:%S:%f%p", 469 "10": "mm-dd-yy", 470 "11": "yy/mm/dd", 471 "12": "yymmdd", 472 "13": "%d %b %Y %H:%M:ss:%f", 473 "14": "%H:%M:%S:%f", 474 "20": "%Y-%m-%d %H:%M:%S", 475 "21": "%Y-%m-%d %H:%M:%S.%f", 476 "22": "%m/%d/%y %-I:%M:%S %p", 477 "23": "%Y-%m-%d", 478 "24": "%H:%M:%S", 479 "25": "%Y-%m-%d %H:%M:%S.%f", 480 "100": "%b %d %Y %-I:%M%p", 481 "101": "%m/%d/%Y", 482 "102": "%Y.%m.%d", 483 "103": "%d/%m/%Y", 484 "104": "%d.%m.%Y", 485 "105": "%d-%m-%Y", 486 "106": "%d %b %Y", 487 "107": "%b %d, %Y", 488 "108": "%H:%M:%S", 489 "109": "%b %d %Y %-I:%M:%S:%f%p", 490 "110": "%m-%d-%Y", 491 "111": "%Y/%m/%d", 492 "112": "%Y%m%d", 493 "113": "%d %b %Y %H:%M:%S:%f", 494 "114": "%H:%M:%S:%f", 495 "120": "%Y-%m-%d %H:%M:%S", 496 "121": "%Y-%m-%d %H:%M:%S.%f", 497 "126": "%Y-%m-%dT%H:%M:%S.%f", 498 } 499 500 FORMAT_TIME_MAPPING = { 501 "y": "%B %Y", 502 "d": "%m/%d/%Y", 503 "H": "%-H", 504 "h": "%-I", 505 "s": "%Y-%m-%d %H:%M:%S", 506 "D": "%A,%B,%Y", 507 "f": "%A,%B,%Y %-I:%M %p", 508 "F": "%A,%B,%Y %-I:%M:%S %p", 509 "g": "%m/%d/%Y %-I:%M %p", 510 "G": "%m/%d/%Y %-I:%M:%S %p", 511 "M": "%B %-d", 512 "m": "%B %-d", 513 "O": "%Y-%m-%dT%H:%M:%S", 514 "u": "%Y-%M-%D %H:%M:%S%z", 515 "U": "%A, %B %D, %Y %H:%M:%S%z", 516 "T": "%-I:%M:%S %p", 517 "t": "%-I:%M", 518 "Y": "%a %Y", 519 } 520 521 class Tokenizer(tokens.Tokenizer): 522 IDENTIFIERS = [("[", "]"), '"'] 523 QUOTES = ["'", '"'] 524 HEX_STRINGS = [("0x", ""), ("0X", "")] 525 VAR_SINGLE_TOKENS = {"@", "$", "#"} 526 527 KEYWORDS = { 528 **tokens.Tokenizer.KEYWORDS, 529 "CLUSTERED INDEX": TokenType.INDEX, 530 "DATETIME2": TokenType.DATETIME2, 531 "DATETIMEOFFSET": TokenType.TIMESTAMPTZ, 532 "DECLARE": TokenType.DECLARE, 533 "EXEC": TokenType.COMMAND, 534 "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT, 535 "GO": TokenType.COMMAND, 536 "IMAGE": TokenType.IMAGE, 537 "MONEY": TokenType.MONEY, 538 "NONCLUSTERED INDEX": TokenType.INDEX, 539 "NTEXT": TokenType.TEXT, 540 "OPTION": TokenType.OPTION, 541 "OUTPUT": TokenType.RETURNING, 542 "PRINT": TokenType.COMMAND, 543 "PROC": TokenType.PROCEDURE, 544 "REAL": TokenType.FLOAT, 545 "ROWVERSION": TokenType.ROWVERSION, 546 "SMALLDATETIME": TokenType.SMALLDATETIME, 547 "SMALLMONEY": TokenType.SMALLMONEY, 548 "SQL_VARIANT": TokenType.VARIANT, 549 "SYSTEM_USER": TokenType.CURRENT_USER, 550 "TOP": TokenType.TOP, 551 "TIMESTAMP": TokenType.ROWVERSION, 552 "TINYINT": TokenType.UTINYINT, 553 "UNIQUEIDENTIFIER": TokenType.UUID, 554 "UPDATE STATISTICS": TokenType.COMMAND, 555 "XML": TokenType.XML, 556 } 557 KEYWORDS.pop("/*+") 558 559 COMMANDS = {*tokens.Tokenizer.COMMANDS, TokenType.END} 560 561 class Parser(parser.Parser): 562 SET_REQUIRES_ASSIGNMENT_DELIMITER = False 563 LOG_DEFAULTS_TO_LN = True 564 STRING_ALIASES = True 565 NO_PAREN_IF_COMMANDS = False 566 567 QUERY_MODIFIER_PARSERS = { 568 **parser.Parser.QUERY_MODIFIER_PARSERS, 569 TokenType.OPTION: lambda self: ("options", self._parse_options()), 570 TokenType.FOR: lambda self: ("for", self._parse_for()), 571 } 572 573 # T-SQL does not allow BEGIN to be used as an identifier 574 ID_VAR_TOKENS = parser.Parser.ID_VAR_TOKENS - {TokenType.BEGIN} 575 ALIAS_TOKENS = parser.Parser.ALIAS_TOKENS - {TokenType.BEGIN} 576 TABLE_ALIAS_TOKENS = parser.Parser.TABLE_ALIAS_TOKENS - {TokenType.BEGIN} 577 COMMENT_TABLE_ALIAS_TOKENS = parser.Parser.COMMENT_TABLE_ALIAS_TOKENS - {TokenType.BEGIN} 578 UPDATE_ALIAS_TOKENS = parser.Parser.UPDATE_ALIAS_TOKENS - {TokenType.BEGIN} 579 580 FUNCTIONS = { 581 **parser.Parser.FUNCTIONS, 582 "CHARINDEX": lambda args: exp.StrPosition( 583 this=seq_get(args, 1), 584 substr=seq_get(args, 0), 585 position=seq_get(args, 2), 586 ), 587 "COUNT": lambda args: exp.Count( 588 this=seq_get(args, 0), expressions=args[1:], big_int=False 589 ), 590 "COUNT_BIG": lambda args: exp.Count( 591 this=seq_get(args, 0), expressions=args[1:], big_int=True 592 ), 593 "DATEADD": build_date_delta(exp.DateAdd, unit_mapping=DATE_DELTA_INTERVAL), 594 "DATEDIFF": _build_date_delta(exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL), 595 "DATENAME": _build_formatted_time(exp.TimeToStr, full_format_mapping=True), 596 "DATEPART": _build_formatted_time(exp.TimeToStr), 597 "DATETIMEFROMPARTS": _build_datetimefromparts, 598 "EOMONTH": _build_eomonth, 599 "FORMAT": _build_format, 600 "GETDATE": exp.CurrentTimestamp.from_arg_list, 601 "HASHBYTES": _build_hashbytes, 602 "ISNULL": lambda args: build_coalesce(args=args, is_null=True), 603 "JSON_QUERY": _build_json_query, 604 "JSON_VALUE": parser.build_extract_json_with_path(exp.JSONExtractScalar), 605 "LEN": _build_with_arg_as_text(exp.Length), 606 "LEFT": _build_with_arg_as_text(exp.Left), 607 "NEWID": exp.Uuid.from_arg_list, 608 "RIGHT": _build_with_arg_as_text(exp.Right), 609 "PARSENAME": _build_parsename, 610 "REPLICATE": exp.Repeat.from_arg_list, 611 "SCHEMA_NAME": exp.CurrentSchema.from_arg_list, 612 "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)), 613 "SYSDATETIME": exp.CurrentTimestamp.from_arg_list, 614 "SUSER_NAME": exp.CurrentUser.from_arg_list, 615 "SUSER_SNAME": exp.CurrentUser.from_arg_list, 616 "SYSDATETIMEOFFSET": exp.CurrentTimestampLTZ.from_arg_list, 617 "SYSTEM_USER": exp.CurrentUser.from_arg_list, 618 "TIMEFROMPARTS": _build_timefromparts, 619 "DATETRUNC": _build_datetrunc, 620 } 621 622 JOIN_HINTS = {"LOOP", "HASH", "MERGE", "REMOTE"} 623 624 PROCEDURE_OPTIONS = dict.fromkeys( 625 ("ENCRYPTION", "RECOMPILE", "SCHEMABINDING", "NATIVE_COMPILATION", "EXECUTE"), tuple() 626 ) 627 628 COLUMN_DEFINITION_MODES = {"OUT", "OUTPUT", "READONLY"} 629 630 RETURNS_TABLE_TOKENS = parser.Parser.ID_VAR_TOKENS - { 631 TokenType.TABLE, 632 *parser.Parser.TYPE_TOKENS, 633 } 634 635 STATEMENT_PARSERS = { 636 **parser.Parser.STATEMENT_PARSERS, 637 TokenType.DECLARE: lambda self: self._parse_declare(), 638 } 639 640 RANGE_PARSERS = { 641 **parser.Parser.RANGE_PARSERS, 642 TokenType.DCOLON: lambda self, this: self.expression( 643 exp.ScopeResolution, 644 this=this, 645 expression=self._parse_function() or self._parse_var(any_token=True), 646 ), 647 } 648 649 NO_PAREN_FUNCTION_PARSERS = { 650 **parser.Parser.NO_PAREN_FUNCTION_PARSERS, 651 "NEXT": lambda self: self._parse_next_value_for(), 652 } 653 654 # The DCOLON (::) operator serves as a scope resolution (exp.ScopeResolution) operator in T-SQL 655 COLUMN_OPERATORS = { 656 **parser.Parser.COLUMN_OPERATORS, 657 TokenType.DCOLON: lambda self, this, to: self.expression(exp.Cast, this=this, to=to) 658 if isinstance(to, exp.DataType) and to.this != exp.DataType.Type.USERDEFINED 659 else self.expression(exp.ScopeResolution, this=this, expression=to), 660 } 661 662 def _parse_alter_table_set(self) -> exp.AlterSet: 663 return self._parse_wrapped(super()._parse_alter_table_set) 664 665 def _parse_wrapped_select(self, table: bool = False) -> t.Optional[exp.Expression]: 666 if self._match(TokenType.MERGE): 667 comments = self._prev_comments 668 merge = self._parse_merge() 669 merge.add_comments(comments, prepend=True) 670 return merge 671 672 return super()._parse_wrapped_select(table=table) 673 674 def _parse_dcolon(self) -> t.Optional[exp.Expression]: 675 # We want to use _parse_types() if the first token after :: is a known type, 676 # otherwise we could parse something like x::varchar(max) into a function 677 if self._match_set(self.TYPE_TOKENS, advance=False): 678 return self._parse_types() 679 680 return self._parse_function() or self._parse_types() 681 682 def _parse_options(self) -> t.Optional[t.List[exp.Expression]]: 683 if not self._match(TokenType.OPTION): 684 return None 685 686 def _parse_option() -> t.Optional[exp.Expression]: 687 option = self._parse_var_from_options(OPTIONS) 688 if not option: 689 return None 690 691 self._match(TokenType.EQ) 692 return self.expression( 693 exp.QueryOption, this=option, expression=self._parse_primary_or_var() 694 ) 695 696 return self._parse_wrapped_csv(_parse_option) 697 698 def _parse_xml_key_value_option(self) -> exp.XMLKeyValueOption: 699 this = self._parse_primary_or_var() 700 if self._match(TokenType.L_PAREN, advance=False): 701 expression = self._parse_wrapped(self._parse_string) 702 else: 703 expression = None 704 705 return exp.XMLKeyValueOption(this=this, expression=expression) 706 707 def _parse_for(self) -> t.Optional[t.List[exp.Expression]]: 708 if not self._match_pair(TokenType.FOR, TokenType.XML): 709 return None 710 711 def _parse_for_xml() -> t.Optional[exp.Expression]: 712 return self.expression( 713 exp.QueryOption, 714 this=self._parse_var_from_options(XML_OPTIONS, raise_unmatched=False) 715 or self._parse_xml_key_value_option(), 716 ) 717 718 return self._parse_csv(_parse_for_xml) 719 720 def _parse_projections(self) -> t.List[exp.Expression]: 721 """ 722 T-SQL supports the syntax alias = expression in the SELECT's projection list, 723 so we transform all parsed Selects to convert their EQ projections into Aliases. 724 725 See: https://learn.microsoft.com/en-us/sql/t-sql/queries/select-clause-transact-sql?view=sql-server-ver16#syntax 726 """ 727 return [ 728 ( 729 exp.alias_(projection.expression, projection.this.this, copy=False) 730 if isinstance(projection, exp.EQ) and isinstance(projection.this, exp.Column) 731 else projection 732 ) 733 for projection in super()._parse_projections() 734 ] 735 736 def _parse_commit_or_rollback(self) -> exp.Commit | exp.Rollback: 737 """Applies to SQL Server and Azure SQL Database 738 COMMIT [ { TRAN | TRANSACTION } 739 [ transaction_name | @tran_name_variable ] ] 740 [ WITH ( DELAYED_DURABILITY = { OFF | ON } ) ] 741 742 ROLLBACK { TRAN | TRANSACTION } 743 [ transaction_name | @tran_name_variable 744 | savepoint_name | @savepoint_variable ] 745 """ 746 rollback = self._prev.token_type == TokenType.ROLLBACK 747 748 self._match_texts(("TRAN", "TRANSACTION")) 749 this = self._parse_id_var() 750 751 if rollback: 752 return self.expression(exp.Rollback, this=this) 753 754 durability = None 755 if self._match_pair(TokenType.WITH, TokenType.L_PAREN): 756 self._match_text_seq("DELAYED_DURABILITY") 757 self._match(TokenType.EQ) 758 759 if self._match_text_seq("OFF"): 760 durability = False 761 else: 762 self._match(TokenType.ON) 763 durability = True 764 765 self._match_r_paren() 766 767 return self.expression(exp.Commit, this=this, durability=durability) 768 769 def _parse_transaction(self) -> exp.Transaction | exp.Command: 770 """Applies to SQL Server and Azure SQL Database 771 BEGIN { TRAN | TRANSACTION } 772 [ { transaction_name | @tran_name_variable } 773 [ WITH MARK [ 'description' ] ] 774 ] 775 """ 776 if self._match_texts(("TRAN", "TRANSACTION")): 777 transaction = self.expression(exp.Transaction, this=self._parse_id_var()) 778 if self._match_text_seq("WITH", "MARK"): 779 transaction.set("mark", self._parse_string()) 780 781 return transaction 782 783 return self._parse_as_command(self._prev) 784 785 def _parse_returns(self) -> exp.ReturnsProperty: 786 table = self._parse_id_var(any_token=False, tokens=self.RETURNS_TABLE_TOKENS) 787 returns = super()._parse_returns() 788 returns.set("table", table) 789 return returns 790 791 def _parse_convert( 792 self, strict: bool, safe: t.Optional[bool] = None 793 ) -> t.Optional[exp.Expression]: 794 this = self._parse_types() 795 self._match(TokenType.COMMA) 796 args = [this, *self._parse_csv(self._parse_assignment)] 797 convert = exp.Convert.from_arg_list(args) 798 convert.set("safe", safe) 799 convert.set("strict", strict) 800 return convert 801 802 def _parse_column_def( 803 self, this: t.Optional[exp.Expression], computed_column: bool = True 804 ) -> t.Optional[exp.Expression]: 805 this = super()._parse_column_def(this=this, computed_column=computed_column) 806 if not this: 807 return None 808 if self._match(TokenType.EQ): 809 this.set("default", self._parse_disjunction()) 810 if self._match_texts(self.COLUMN_DEFINITION_MODES): 811 this.set("output", self._prev.text) 812 return this 813 814 def _parse_user_defined_function( 815 self, kind: t.Optional[TokenType] = None 816 ) -> t.Optional[exp.Expression]: 817 this = super()._parse_user_defined_function(kind=kind) 818 819 if ( 820 kind == TokenType.FUNCTION 821 or isinstance(this, exp.UserDefinedFunction) 822 or self._match(TokenType.ALIAS, advance=False) 823 ): 824 return this 825 826 if not self._match(TokenType.WITH, advance=False): 827 expressions = self._parse_csv(self._parse_function_parameter) 828 else: 829 expressions = None 830 831 return self.expression(exp.UserDefinedFunction, this=this, expressions=expressions) 832 833 def _parse_into(self) -> t.Optional[exp.Into]: 834 into = super()._parse_into() 835 836 table = isinstance(into, exp.Into) and into.find(exp.Table) 837 if isinstance(table, exp.Table): 838 table_identifier = table.this 839 if table_identifier.args.get("temporary"): 840 # Promote the temporary property from the Identifier to the Into expression 841 t.cast(exp.Into, into).set("temporary", True) 842 843 return into 844 845 def _parse_id_var( 846 self, 847 any_token: bool = True, 848 tokens: t.Optional[t.Collection[TokenType]] = None, 849 ) -> t.Optional[exp.Expression]: 850 is_temporary = self._match(TokenType.HASH) 851 is_global = is_temporary and self._match(TokenType.HASH) 852 853 this = super()._parse_id_var(any_token=any_token, tokens=tokens) 854 if this: 855 if is_global: 856 this.set("global", True) 857 elif is_temporary: 858 this.set("temporary", True) 859 860 return this 861 862 def _parse_create(self) -> exp.Create | exp.Command: 863 create = super()._parse_create() 864 865 if isinstance(create, exp.Create): 866 table = create.this.this if isinstance(create.this, exp.Schema) else create.this 867 if isinstance(table, exp.Table) and table.this and table.this.args.get("temporary"): 868 if not create.args.get("properties"): 869 create.set("properties", exp.Properties(expressions=[])) 870 871 create.args["properties"].append("expressions", exp.TemporaryProperty()) 872 873 return create 874 875 def _parse_if(self) -> t.Optional[exp.Expression]: 876 index = self._index 877 878 if self._match_text_seq("OBJECT_ID"): 879 self._parse_wrapped_csv(self._parse_string) 880 if self._match_text_seq("IS", "NOT", "NULL") and self._match(TokenType.DROP): 881 return self._parse_drop(exists=True) 882 self._retreat(index) 883 884 return super()._parse_if() 885 886 def _parse_unique(self) -> exp.UniqueColumnConstraint: 887 if self._match_texts(("CLUSTERED", "NONCLUSTERED")): 888 this = self.CONSTRAINT_PARSERS[self._prev.text.upper()](self) 889 else: 890 this = self._parse_schema(self._parse_id_var(any_token=False)) 891 892 return self.expression(exp.UniqueColumnConstraint, this=this) 893 894 def _parse_partition(self) -> t.Optional[exp.Partition]: 895 if not self._match_text_seq("WITH", "(", "PARTITIONS"): 896 return None 897 898 def parse_range(): 899 low = self._parse_bitwise() 900 high = self._parse_bitwise() if self._match_text_seq("TO") else None 901 902 return ( 903 self.expression(exp.PartitionRange, this=low, expression=high) if high else low 904 ) 905 906 partition = self.expression( 907 exp.Partition, expressions=self._parse_wrapped_csv(parse_range) 908 ) 909 910 self._match_r_paren() 911 912 return partition 913 914 def _parse_declareitem(self) -> t.Optional[exp.DeclareItem]: 915 var = self._parse_id_var() 916 if not var: 917 return None 918 919 self._match(TokenType.ALIAS) 920 return self.expression( 921 exp.DeclareItem, 922 this=var, 923 kind=self._parse_schema() if self._match(TokenType.TABLE) else self._parse_types(), 924 default=self._match(TokenType.EQ) and self._parse_bitwise(), 925 ) 926 927 def _parse_alter_table_alter(self) -> t.Optional[exp.Expression]: 928 expression = super()._parse_alter_table_alter() 929 930 if expression is not None: 931 collation = expression.args.get("collate") 932 if isinstance(collation, exp.Column) and isinstance(collation.this, exp.Identifier): 933 identifier = collation.this 934 collation.set("this", exp.Var(this=identifier.name)) 935 936 return expression 937 938 class Generator(generator.Generator): 939 LIMIT_IS_TOP = True 940 QUERY_HINTS = False 941 RETURNING_END = False 942 NVL2_SUPPORTED = False 943 ALTER_TABLE_INCLUDE_COLUMN_KEYWORD = False 944 LIMIT_FETCH = "FETCH" 945 COMPUTED_COLUMN_WITH_TYPE = False 946 CTE_RECURSIVE_KEYWORD_REQUIRED = False 947 ENSURE_BOOLS = True 948 NULL_ORDERING_SUPPORTED = None 949 SUPPORTS_SINGLE_ARG_CONCAT = False 950 TABLESAMPLE_SEED_KEYWORD = "REPEATABLE" 951 SUPPORTS_SELECT_INTO = True 952 JSON_PATH_BRACKETED_KEY_SUPPORTED = False 953 SUPPORTS_TO_NUMBER = False 954 SET_OP_MODIFIERS = False 955 COPY_PARAMS_EQ_REQUIRED = True 956 PARSE_JSON_NAME = None 957 EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE = False 958 ALTER_SET_WRAPPED = True 959 ALTER_SET_TYPE = "" 960 961 EXPRESSIONS_WITHOUT_NESTED_CTES = { 962 exp.Create, 963 exp.Delete, 964 exp.Insert, 965 exp.Intersect, 966 exp.Except, 967 exp.Merge, 968 exp.Select, 969 exp.Subquery, 970 exp.Union, 971 exp.Update, 972 } 973 974 SUPPORTED_JSON_PATH_PARTS = { 975 exp.JSONPathKey, 976 exp.JSONPathRoot, 977 exp.JSONPathSubscript, 978 } 979 980 TYPE_MAPPING = { 981 **generator.Generator.TYPE_MAPPING, 982 exp.DataType.Type.BOOLEAN: "BIT", 983 exp.DataType.Type.DATETIME2: "DATETIME2", 984 exp.DataType.Type.DECIMAL: "NUMERIC", 985 exp.DataType.Type.DOUBLE: "FLOAT", 986 exp.DataType.Type.INT: "INTEGER", 987 exp.DataType.Type.ROWVERSION: "ROWVERSION", 988 exp.DataType.Type.TEXT: "VARCHAR(MAX)", 989 exp.DataType.Type.TIMESTAMP: "DATETIME2", 990 exp.DataType.Type.TIMESTAMPNTZ: "DATETIME2", 991 exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET", 992 exp.DataType.Type.SMALLDATETIME: "SMALLDATETIME", 993 exp.DataType.Type.UTINYINT: "TINYINT", 994 exp.DataType.Type.VARIANT: "SQL_VARIANT", 995 exp.DataType.Type.UUID: "UNIQUEIDENTIFIER", 996 } 997 998 TYPE_MAPPING.pop(exp.DataType.Type.NCHAR) 999 TYPE_MAPPING.pop(exp.DataType.Type.NVARCHAR) 1000 1001 TRANSFORMS = { 1002 **generator.Generator.TRANSFORMS, 1003 exp.AnyValue: any_value_to_max_sql, 1004 exp.ArrayToString: rename_func("STRING_AGG"), 1005 exp.AutoIncrementColumnConstraint: lambda *_: "IDENTITY", 1006 exp.Chr: rename_func("CHAR"), 1007 exp.DateAdd: date_delta_sql("DATEADD"), 1008 exp.DateDiff: date_delta_sql("DATEDIFF"), 1009 exp.CTE: transforms.preprocess([qualify_derived_table_outputs]), 1010 exp.CurrentDate: rename_func("GETDATE"), 1011 exp.CurrentTimestamp: rename_func("GETDATE"), 1012 exp.CurrentTimestampLTZ: rename_func("SYSDATETIMEOFFSET"), 1013 exp.DateStrToDate: datestrtodate_sql, 1014 exp.Extract: rename_func("DATEPART"), 1015 exp.GeneratedAsIdentityColumnConstraint: generatedasidentitycolumnconstraint_sql, 1016 exp.GroupConcat: _string_agg_sql, 1017 exp.If: rename_func("IIF"), 1018 exp.JSONExtract: _json_extract_sql, 1019 exp.JSONExtractScalar: _json_extract_sql, 1020 exp.LastDay: lambda self, e: self.func("EOMONTH", e.this), 1021 exp.Ln: rename_func("LOG"), 1022 exp.Max: max_or_greatest, 1023 exp.MD5: lambda self, e: self.func("HASHBYTES", exp.Literal.string("MD5"), e.this), 1024 exp.Min: min_or_least, 1025 exp.NumberToStr: _format_sql, 1026 exp.Repeat: rename_func("REPLICATE"), 1027 exp.CurrentSchema: rename_func("SCHEMA_NAME"), 1028 exp.Select: transforms.preprocess( 1029 [ 1030 transforms.eliminate_distinct_on, 1031 transforms.eliminate_semi_and_anti_joins, 1032 transforms.eliminate_qualify, 1033 transforms.unnest_generate_date_array_using_recursive_cte, 1034 ] 1035 ), 1036 exp.Stddev: rename_func("STDEV"), 1037 exp.StrPosition: lambda self, e: strposition_sql( 1038 self, e, func_name="CHARINDEX", supports_position=True 1039 ), 1040 exp.Subquery: transforms.preprocess([qualify_derived_table_outputs]), 1041 exp.SHA: lambda self, e: self.func("HASHBYTES", exp.Literal.string("SHA1"), e.this), 1042 exp.SHA2: lambda self, e: self.func( 1043 "HASHBYTES", exp.Literal.string(f"SHA2_{e.args.get('length', 256)}"), e.this 1044 ), 1045 exp.TemporaryProperty: lambda self, e: "", 1046 exp.TimeStrToTime: _timestrtotime_sql, 1047 exp.TimeToStr: _format_sql, 1048 exp.Trim: trim_sql, 1049 exp.TsOrDsAdd: date_delta_sql("DATEADD", cast=True), 1050 exp.TsOrDsDiff: date_delta_sql("DATEDIFF"), 1051 exp.TimestampTrunc: lambda self, e: self.func("DATETRUNC", e.unit, e.this), 1052 exp.Uuid: lambda *_: "NEWID()", 1053 exp.DateFromParts: rename_func("DATEFROMPARTS"), 1054 } 1055 1056 TRANSFORMS.pop(exp.ReturnsProperty) 1057 1058 PROPERTIES_LOCATION = { 1059 **generator.Generator.PROPERTIES_LOCATION, 1060 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 1061 } 1062 1063 def scope_resolution(self, rhs: str, scope_name: str) -> str: 1064 return f"{scope_name}::{rhs}" 1065 1066 def select_sql(self, expression: exp.Select) -> str: 1067 limit = expression.args.get("limit") 1068 offset = expression.args.get("offset") 1069 1070 if isinstance(limit, exp.Fetch) and not offset: 1071 # Dialects like Oracle can FETCH directly from a row set but 1072 # T-SQL requires an ORDER BY + OFFSET clause in order to FETCH 1073 offset = exp.Offset(expression=exp.Literal.number(0)) 1074 expression.set("offset", offset) 1075 1076 if offset: 1077 if not expression.args.get("order"): 1078 # ORDER BY is required in order to use OFFSET in a query, so we use 1079 # a noop order by, since we don't really care about the order. 1080 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 1081 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 1082 1083 if isinstance(limit, exp.Limit): 1084 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 1085 # we replace here because otherwise TOP would be generated in select_sql 1086 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 1087 1088 return super().select_sql(expression) 1089 1090 def convert_sql(self, expression: exp.Convert) -> str: 1091 name = "TRY_CONVERT" if expression.args.get("safe") else "CONVERT" 1092 return self.func( 1093 name, expression.this, expression.expression, expression.args.get("style") 1094 ) 1095 1096 def queryoption_sql(self, expression: exp.QueryOption) -> str: 1097 option = self.sql(expression, "this") 1098 value = self.sql(expression, "expression") 1099 if value: 1100 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 1101 return f"{option} {optional_equal_sign}{value}" 1102 return option 1103 1104 def lateral_op(self, expression: exp.Lateral) -> str: 1105 cross_apply = expression.args.get("cross_apply") 1106 if cross_apply is True: 1107 return "CROSS APPLY" 1108 if cross_apply is False: 1109 return "OUTER APPLY" 1110 1111 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 1112 self.unsupported("LATERAL clause is not supported.") 1113 return "LATERAL" 1114 1115 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 1116 this = expression.this 1117 split_count = len(this.name.split(".")) 1118 delimiter = expression.args.get("delimiter") 1119 part_index = expression.args.get("part_index") 1120 1121 if ( 1122 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 1123 or (delimiter and delimiter.name != ".") 1124 or not part_index 1125 or split_count > 4 1126 ): 1127 self.unsupported( 1128 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 1129 ) 1130 return "" 1131 1132 return self.func( 1133 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 1134 ) 1135 1136 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1137 nano = expression.args.get("nano") 1138 if nano is not None: 1139 nano.pop() 1140 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1141 1142 if expression.args.get("fractions") is None: 1143 expression.set("fractions", exp.Literal.number(0)) 1144 if expression.args.get("precision") is None: 1145 expression.set("precision", exp.Literal.number(0)) 1146 1147 return rename_func("TIMEFROMPARTS")(self, expression) 1148 1149 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1150 zone = expression.args.get("zone") 1151 if zone is not None: 1152 zone.pop() 1153 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1154 1155 nano = expression.args.get("nano") 1156 if nano is not None: 1157 nano.pop() 1158 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1159 1160 if expression.args.get("milli") is None: 1161 expression.set("milli", exp.Literal.number(0)) 1162 1163 return rename_func("DATETIMEFROMPARTS")(self, expression) 1164 1165 def setitem_sql(self, expression: exp.SetItem) -> str: 1166 this = expression.this 1167 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1168 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1169 return f"{self.sql(this.left)} {self.sql(this.right)}" 1170 1171 return super().setitem_sql(expression) 1172 1173 def boolean_sql(self, expression: exp.Boolean) -> str: 1174 if type(expression.parent) in BIT_TYPES or isinstance( 1175 expression.find_ancestor(exp.Values, exp.Select), exp.Values 1176 ): 1177 return "1" if expression.this else "0" 1178 1179 return "(1 = 1)" if expression.this else "(1 = 0)" 1180 1181 def is_sql(self, expression: exp.Is) -> str: 1182 if isinstance(expression.expression, exp.Boolean): 1183 return self.binary(expression, "=") 1184 return self.binary(expression, "IS") 1185 1186 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1187 sql = self.sql(expression, "this") 1188 properties = expression.args.get("properties") 1189 1190 if sql[:1] != "#" and any( 1191 isinstance(prop, exp.TemporaryProperty) 1192 for prop in (properties.expressions if properties else []) 1193 ): 1194 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1195 1196 return sql 1197 1198 def create_sql(self, expression: exp.Create) -> str: 1199 kind = expression.kind 1200 exists = expression.args.pop("exists", None) 1201 1202 like_property = expression.find(exp.LikeProperty) 1203 if like_property: 1204 ctas_expression = like_property.this 1205 else: 1206 ctas_expression = expression.expression 1207 1208 if kind == "VIEW": 1209 expression.this.set("catalog", None) 1210 with_ = expression.args.get("with") 1211 if ctas_expression and with_: 1212 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1213 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1214 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1215 ctas_expression.set("with", with_.pop()) 1216 1217 table = expression.find(exp.Table) 1218 1219 # Convert CTAS statement to SELECT .. INTO .. 1220 if kind == "TABLE" and ctas_expression: 1221 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1222 ctas_expression = ctas_expression.subquery() 1223 1224 properties = expression.args.get("properties") or exp.Properties() 1225 is_temp = any(isinstance(p, exp.TemporaryProperty) for p in properties.expressions) 1226 1227 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1228 select_into.set("into", exp.Into(this=table, temporary=is_temp)) 1229 1230 if like_property: 1231 select_into.limit(0, copy=False) 1232 1233 sql = self.sql(select_into) 1234 else: 1235 sql = super().create_sql(expression) 1236 1237 if exists: 1238 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1239 sql_with_ctes = self.prepend_ctes(expression, sql) 1240 sql_literal = self.sql(exp.Literal.string(sql_with_ctes)) 1241 if kind == "SCHEMA": 1242 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = {identifier}) EXEC({sql_literal})""" 1243 elif kind == "TABLE": 1244 assert table 1245 where = exp.and_( 1246 exp.column("TABLE_NAME").eq(table.name), 1247 exp.column("TABLE_SCHEMA").eq(table.db) if table.db else None, 1248 exp.column("TABLE_CATALOG").eq(table.catalog) if table.catalog else None, 1249 ) 1250 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE {where}) EXEC({sql_literal})""" 1251 elif kind == "INDEX": 1252 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1253 return f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql_literal})""" 1254 elif expression.args.get("replace"): 1255 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1256 1257 return self.prepend_ctes(expression, sql) 1258 1259 @generator.unsupported_args("unlogged", "expressions") 1260 def into_sql(self, expression: exp.Into) -> str: 1261 if expression.args.get("temporary"): 1262 # If the Into expression has a temporary property, push this down to the Identifier 1263 table = expression.find(exp.Table) 1264 if table and isinstance(table.this, exp.Identifier): 1265 table.this.set("temporary", True) 1266 1267 return f"{self.seg('INTO')} {self.sql(expression, 'this')}" 1268 1269 def count_sql(self, expression: exp.Count) -> str: 1270 func_name = "COUNT_BIG" if expression.args.get("big_int") else "COUNT" 1271 return rename_func(func_name)(self, expression) 1272 1273 def offset_sql(self, expression: exp.Offset) -> str: 1274 return f"{super().offset_sql(expression)} ROWS" 1275 1276 def version_sql(self, expression: exp.Version) -> str: 1277 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1278 this = f"FOR {name}" 1279 expr = expression.expression 1280 kind = expression.text("kind") 1281 if kind in ("FROM", "BETWEEN"): 1282 args = expr.expressions 1283 sep = "TO" if kind == "FROM" else "AND" 1284 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1285 else: 1286 expr_sql = self.sql(expr) 1287 1288 expr_sql = f" {expr_sql}" if expr_sql else "" 1289 return f"{this} {kind}{expr_sql}" 1290 1291 def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str: 1292 table = expression.args.get("table") 1293 table = f"{table} " if table else "" 1294 return f"RETURNS {table}{self.sql(expression, 'this')}" 1295 1296 def returning_sql(self, expression: exp.Returning) -> str: 1297 into = self.sql(expression, "into") 1298 into = self.seg(f"INTO {into}") if into else "" 1299 return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}" 1300 1301 def transaction_sql(self, expression: exp.Transaction) -> str: 1302 this = self.sql(expression, "this") 1303 this = f" {this}" if this else "" 1304 mark = self.sql(expression, "mark") 1305 mark = f" WITH MARK {mark}" if mark else "" 1306 return f"BEGIN TRANSACTION{this}{mark}" 1307 1308 def commit_sql(self, expression: exp.Commit) -> str: 1309 this = self.sql(expression, "this") 1310 this = f" {this}" if this else "" 1311 durability = expression.args.get("durability") 1312 durability = ( 1313 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1314 if durability is not None 1315 else "" 1316 ) 1317 return f"COMMIT TRANSACTION{this}{durability}" 1318 1319 def rollback_sql(self, expression: exp.Rollback) -> str: 1320 this = self.sql(expression, "this") 1321 this = f" {this}" if this else "" 1322 return f"ROLLBACK TRANSACTION{this}" 1323 1324 def identifier_sql(self, expression: exp.Identifier) -> str: 1325 identifier = super().identifier_sql(expression) 1326 1327 if expression.args.get("global"): 1328 identifier = f"##{identifier}" 1329 elif expression.args.get("temporary"): 1330 identifier = f"#{identifier}" 1331 1332 return identifier 1333 1334 def constraint_sql(self, expression: exp.Constraint) -> str: 1335 this = self.sql(expression, "this") 1336 expressions = self.expressions(expression, flat=True, sep=" ") 1337 return f"CONSTRAINT {this} {expressions}" 1338 1339 def length_sql(self, expression: exp.Length) -> str: 1340 return self._uncast_text(expression, "LEN") 1341 1342 def right_sql(self, expression: exp.Right) -> str: 1343 return self._uncast_text(expression, "RIGHT") 1344 1345 def left_sql(self, expression: exp.Left) -> str: 1346 return self._uncast_text(expression, "LEFT") 1347 1348 def _uncast_text(self, expression: exp.Expression, name: str) -> str: 1349 this = expression.this 1350 if isinstance(this, exp.Cast) and this.is_type(exp.DataType.Type.TEXT): 1351 this_sql = self.sql(this, "this") 1352 else: 1353 this_sql = self.sql(this) 1354 expression_sql = self.sql(expression, "expression") 1355 return self.func(name, this_sql, expression_sql if expression_sql else None) 1356 1357 def partition_sql(self, expression: exp.Partition) -> str: 1358 return f"WITH (PARTITIONS({self.expressions(expression, flat=True)}))" 1359 1360 def alter_sql(self, expression: exp.Alter) -> str: 1361 action = seq_get(expression.args.get("actions") or [], 0) 1362 if isinstance(action, exp.AlterRename): 1363 return f"EXEC sp_rename '{self.sql(expression.this)}', '{action.this.name}'" 1364 return super().alter_sql(expression) 1365 1366 def drop_sql(self, expression: exp.Drop) -> str: 1367 if expression.args["kind"] == "VIEW": 1368 expression.this.set("catalog", None) 1369 return super().drop_sql(expression) 1370 1371 def options_modifier(self, expression: exp.Expression) -> str: 1372 options = self.expressions(expression, key="options") 1373 return f" OPTION{self.wrap(options)}" if options else "" 1374 1375 def dpipe_sql(self, expression: exp.DPipe) -> str: 1376 return self.sql( 1377 reduce(lambda x, y: exp.Add(this=x, expression=y), expression.flatten()) 1378 ) 1379 1380 def isascii_sql(self, expression: exp.IsAscii) -> str: 1381 return f"(PATINDEX(CONVERT(VARCHAR(MAX), 0x255b5e002d7f5d25) COLLATE Latin1_General_BIN, {self.sql(expression.this)}) = 0)" 1382 1383 def columndef_sql(self, expression: exp.ColumnDef, sep: str = " ") -> str: 1384 this = super().columndef_sql(expression, sep) 1385 default = self.sql(expression, "default") 1386 default = f" = {default}" if default else "" 1387 output = self.sql(expression, "output") 1388 output = f" {output}" if output else "" 1389 return f"{this}{default}{output}" 1390 1391 def coalesce_sql(self, expression: exp.Coalesce) -> str: 1392 func_name = "ISNULL" if expression.args.get("is_null") else "COALESCE" 1393 return rename_func(func_name)(self, expression)
Whether the base comes first in the LOG
function.
Possible values: True
, False
, None
(two arguments are not supported by LOG
)
Whether the behavior of a / b
depends on the types of a
and b
.
False means a / b
is always float division.
True means a / b
is integer division if both a
and b
are integers.
A NULL
arg in CONCAT
yields NULL
by default, but in some dialects it yields an empty string.
Specifies the strategy according to which identifiers should be normalized.
Associates this dialect's time formats with their equivalent Python strftime
formats.
521 class Tokenizer(tokens.Tokenizer): 522 IDENTIFIERS = [("[", "]"), '"'] 523 QUOTES = ["'", '"'] 524 HEX_STRINGS = [("0x", ""), ("0X", "")] 525 VAR_SINGLE_TOKENS = {"@", "$", "#"} 526 527 KEYWORDS = { 528 **tokens.Tokenizer.KEYWORDS, 529 "CLUSTERED INDEX": TokenType.INDEX, 530 "DATETIME2": TokenType.DATETIME2, 531 "DATETIMEOFFSET": TokenType.TIMESTAMPTZ, 532 "DECLARE": TokenType.DECLARE, 533 "EXEC": TokenType.COMMAND, 534 "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT, 535 "GO": TokenType.COMMAND, 536 "IMAGE": TokenType.IMAGE, 537 "MONEY": TokenType.MONEY, 538 "NONCLUSTERED INDEX": TokenType.INDEX, 539 "NTEXT": TokenType.TEXT, 540 "OPTION": TokenType.OPTION, 541 "OUTPUT": TokenType.RETURNING, 542 "PRINT": TokenType.COMMAND, 543 "PROC": TokenType.PROCEDURE, 544 "REAL": TokenType.FLOAT, 545 "ROWVERSION": TokenType.ROWVERSION, 546 "SMALLDATETIME": TokenType.SMALLDATETIME, 547 "SMALLMONEY": TokenType.SMALLMONEY, 548 "SQL_VARIANT": TokenType.VARIANT, 549 "SYSTEM_USER": TokenType.CURRENT_USER, 550 "TOP": TokenType.TOP, 551 "TIMESTAMP": TokenType.ROWVERSION, 552 "TINYINT": TokenType.UTINYINT, 553 "UNIQUEIDENTIFIER": TokenType.UUID, 554 "UPDATE STATISTICS": TokenType.COMMAND, 555 "XML": TokenType.XML, 556 } 557 KEYWORDS.pop("/*+") 558 559 COMMANDS = {*tokens.Tokenizer.COMMANDS, TokenType.END}
Inherited Members
- sqlglot.tokens.Tokenizer
- Tokenizer
- SINGLE_TOKENS
- BIT_STRINGS
- BYTE_STRINGS
- RAW_STRINGS
- HEREDOC_STRINGS
- UNICODE_STRINGS
- STRING_ESCAPES
- IDENTIFIER_ESCAPES
- HEREDOC_TAG_IS_IDENTIFIER
- HEREDOC_STRING_ALTERNATIVE
- STRING_ESCAPES_ALLOWED_IN_RAW_STRINGS
- NESTED_COMMENTS
- HINT_START
- TOKENS_PRECEDING_HINT
- WHITE_SPACE
- COMMAND_PREFIX_TOKENS
- NUMERIC_LITERALS
- COMMENTS
- dialect
- use_rs_tokenizer
- reset
- tokenize
- tokenize_rs
- size
- sql
- tokens
561 class Parser(parser.Parser): 562 SET_REQUIRES_ASSIGNMENT_DELIMITER = False 563 LOG_DEFAULTS_TO_LN = True 564 STRING_ALIASES = True 565 NO_PAREN_IF_COMMANDS = False 566 567 QUERY_MODIFIER_PARSERS = { 568 **parser.Parser.QUERY_MODIFIER_PARSERS, 569 TokenType.OPTION: lambda self: ("options", self._parse_options()), 570 TokenType.FOR: lambda self: ("for", self._parse_for()), 571 } 572 573 # T-SQL does not allow BEGIN to be used as an identifier 574 ID_VAR_TOKENS = parser.Parser.ID_VAR_TOKENS - {TokenType.BEGIN} 575 ALIAS_TOKENS = parser.Parser.ALIAS_TOKENS - {TokenType.BEGIN} 576 TABLE_ALIAS_TOKENS = parser.Parser.TABLE_ALIAS_TOKENS - {TokenType.BEGIN} 577 COMMENT_TABLE_ALIAS_TOKENS = parser.Parser.COMMENT_TABLE_ALIAS_TOKENS - {TokenType.BEGIN} 578 UPDATE_ALIAS_TOKENS = parser.Parser.UPDATE_ALIAS_TOKENS - {TokenType.BEGIN} 579 580 FUNCTIONS = { 581 **parser.Parser.FUNCTIONS, 582 "CHARINDEX": lambda args: exp.StrPosition( 583 this=seq_get(args, 1), 584 substr=seq_get(args, 0), 585 position=seq_get(args, 2), 586 ), 587 "COUNT": lambda args: exp.Count( 588 this=seq_get(args, 0), expressions=args[1:], big_int=False 589 ), 590 "COUNT_BIG": lambda args: exp.Count( 591 this=seq_get(args, 0), expressions=args[1:], big_int=True 592 ), 593 "DATEADD": build_date_delta(exp.DateAdd, unit_mapping=DATE_DELTA_INTERVAL), 594 "DATEDIFF": _build_date_delta(exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL), 595 "DATENAME": _build_formatted_time(exp.TimeToStr, full_format_mapping=True), 596 "DATEPART": _build_formatted_time(exp.TimeToStr), 597 "DATETIMEFROMPARTS": _build_datetimefromparts, 598 "EOMONTH": _build_eomonth, 599 "FORMAT": _build_format, 600 "GETDATE": exp.CurrentTimestamp.from_arg_list, 601 "HASHBYTES": _build_hashbytes, 602 "ISNULL": lambda args: build_coalesce(args=args, is_null=True), 603 "JSON_QUERY": _build_json_query, 604 "JSON_VALUE": parser.build_extract_json_with_path(exp.JSONExtractScalar), 605 "LEN": _build_with_arg_as_text(exp.Length), 606 "LEFT": _build_with_arg_as_text(exp.Left), 607 "NEWID": exp.Uuid.from_arg_list, 608 "RIGHT": _build_with_arg_as_text(exp.Right), 609 "PARSENAME": _build_parsename, 610 "REPLICATE": exp.Repeat.from_arg_list, 611 "SCHEMA_NAME": exp.CurrentSchema.from_arg_list, 612 "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)), 613 "SYSDATETIME": exp.CurrentTimestamp.from_arg_list, 614 "SUSER_NAME": exp.CurrentUser.from_arg_list, 615 "SUSER_SNAME": exp.CurrentUser.from_arg_list, 616 "SYSDATETIMEOFFSET": exp.CurrentTimestampLTZ.from_arg_list, 617 "SYSTEM_USER": exp.CurrentUser.from_arg_list, 618 "TIMEFROMPARTS": _build_timefromparts, 619 "DATETRUNC": _build_datetrunc, 620 } 621 622 JOIN_HINTS = {"LOOP", "HASH", "MERGE", "REMOTE"} 623 624 PROCEDURE_OPTIONS = dict.fromkeys( 625 ("ENCRYPTION", "RECOMPILE", "SCHEMABINDING", "NATIVE_COMPILATION", "EXECUTE"), tuple() 626 ) 627 628 COLUMN_DEFINITION_MODES = {"OUT", "OUTPUT", "READONLY"} 629 630 RETURNS_TABLE_TOKENS = parser.Parser.ID_VAR_TOKENS - { 631 TokenType.TABLE, 632 *parser.Parser.TYPE_TOKENS, 633 } 634 635 STATEMENT_PARSERS = { 636 **parser.Parser.STATEMENT_PARSERS, 637 TokenType.DECLARE: lambda self: self._parse_declare(), 638 } 639 640 RANGE_PARSERS = { 641 **parser.Parser.RANGE_PARSERS, 642 TokenType.DCOLON: lambda self, this: self.expression( 643 exp.ScopeResolution, 644 this=this, 645 expression=self._parse_function() or self._parse_var(any_token=True), 646 ), 647 } 648 649 NO_PAREN_FUNCTION_PARSERS = { 650 **parser.Parser.NO_PAREN_FUNCTION_PARSERS, 651 "NEXT": lambda self: self._parse_next_value_for(), 652 } 653 654 # The DCOLON (::) operator serves as a scope resolution (exp.ScopeResolution) operator in T-SQL 655 COLUMN_OPERATORS = { 656 **parser.Parser.COLUMN_OPERATORS, 657 TokenType.DCOLON: lambda self, this, to: self.expression(exp.Cast, this=this, to=to) 658 if isinstance(to, exp.DataType) and to.this != exp.DataType.Type.USERDEFINED 659 else self.expression(exp.ScopeResolution, this=this, expression=to), 660 } 661 662 def _parse_alter_table_set(self) -> exp.AlterSet: 663 return self._parse_wrapped(super()._parse_alter_table_set) 664 665 def _parse_wrapped_select(self, table: bool = False) -> t.Optional[exp.Expression]: 666 if self._match(TokenType.MERGE): 667 comments = self._prev_comments 668 merge = self._parse_merge() 669 merge.add_comments(comments, prepend=True) 670 return merge 671 672 return super()._parse_wrapped_select(table=table) 673 674 def _parse_dcolon(self) -> t.Optional[exp.Expression]: 675 # We want to use _parse_types() if the first token after :: is a known type, 676 # otherwise we could parse something like x::varchar(max) into a function 677 if self._match_set(self.TYPE_TOKENS, advance=False): 678 return self._parse_types() 679 680 return self._parse_function() or self._parse_types() 681 682 def _parse_options(self) -> t.Optional[t.List[exp.Expression]]: 683 if not self._match(TokenType.OPTION): 684 return None 685 686 def _parse_option() -> t.Optional[exp.Expression]: 687 option = self._parse_var_from_options(OPTIONS) 688 if not option: 689 return None 690 691 self._match(TokenType.EQ) 692 return self.expression( 693 exp.QueryOption, this=option, expression=self._parse_primary_or_var() 694 ) 695 696 return self._parse_wrapped_csv(_parse_option) 697 698 def _parse_xml_key_value_option(self) -> exp.XMLKeyValueOption: 699 this = self._parse_primary_or_var() 700 if self._match(TokenType.L_PAREN, advance=False): 701 expression = self._parse_wrapped(self._parse_string) 702 else: 703 expression = None 704 705 return exp.XMLKeyValueOption(this=this, expression=expression) 706 707 def _parse_for(self) -> t.Optional[t.List[exp.Expression]]: 708 if not self._match_pair(TokenType.FOR, TokenType.XML): 709 return None 710 711 def _parse_for_xml() -> t.Optional[exp.Expression]: 712 return self.expression( 713 exp.QueryOption, 714 this=self._parse_var_from_options(XML_OPTIONS, raise_unmatched=False) 715 or self._parse_xml_key_value_option(), 716 ) 717 718 return self._parse_csv(_parse_for_xml) 719 720 def _parse_projections(self) -> t.List[exp.Expression]: 721 """ 722 T-SQL supports the syntax alias = expression in the SELECT's projection list, 723 so we transform all parsed Selects to convert their EQ projections into Aliases. 724 725 See: https://learn.microsoft.com/en-us/sql/t-sql/queries/select-clause-transact-sql?view=sql-server-ver16#syntax 726 """ 727 return [ 728 ( 729 exp.alias_(projection.expression, projection.this.this, copy=False) 730 if isinstance(projection, exp.EQ) and isinstance(projection.this, exp.Column) 731 else projection 732 ) 733 for projection in super()._parse_projections() 734 ] 735 736 def _parse_commit_or_rollback(self) -> exp.Commit | exp.Rollback: 737 """Applies to SQL Server and Azure SQL Database 738 COMMIT [ { TRAN | TRANSACTION } 739 [ transaction_name | @tran_name_variable ] ] 740 [ WITH ( DELAYED_DURABILITY = { OFF | ON } ) ] 741 742 ROLLBACK { TRAN | TRANSACTION } 743 [ transaction_name | @tran_name_variable 744 | savepoint_name | @savepoint_variable ] 745 """ 746 rollback = self._prev.token_type == TokenType.ROLLBACK 747 748 self._match_texts(("TRAN", "TRANSACTION")) 749 this = self._parse_id_var() 750 751 if rollback: 752 return self.expression(exp.Rollback, this=this) 753 754 durability = None 755 if self._match_pair(TokenType.WITH, TokenType.L_PAREN): 756 self._match_text_seq("DELAYED_DURABILITY") 757 self._match(TokenType.EQ) 758 759 if self._match_text_seq("OFF"): 760 durability = False 761 else: 762 self._match(TokenType.ON) 763 durability = True 764 765 self._match_r_paren() 766 767 return self.expression(exp.Commit, this=this, durability=durability) 768 769 def _parse_transaction(self) -> exp.Transaction | exp.Command: 770 """Applies to SQL Server and Azure SQL Database 771 BEGIN { TRAN | TRANSACTION } 772 [ { transaction_name | @tran_name_variable } 773 [ WITH MARK [ 'description' ] ] 774 ] 775 """ 776 if self._match_texts(("TRAN", "TRANSACTION")): 777 transaction = self.expression(exp.Transaction, this=self._parse_id_var()) 778 if self._match_text_seq("WITH", "MARK"): 779 transaction.set("mark", self._parse_string()) 780 781 return transaction 782 783 return self._parse_as_command(self._prev) 784 785 def _parse_returns(self) -> exp.ReturnsProperty: 786 table = self._parse_id_var(any_token=False, tokens=self.RETURNS_TABLE_TOKENS) 787 returns = super()._parse_returns() 788 returns.set("table", table) 789 return returns 790 791 def _parse_convert( 792 self, strict: bool, safe: t.Optional[bool] = None 793 ) -> t.Optional[exp.Expression]: 794 this = self._parse_types() 795 self._match(TokenType.COMMA) 796 args = [this, *self._parse_csv(self._parse_assignment)] 797 convert = exp.Convert.from_arg_list(args) 798 convert.set("safe", safe) 799 convert.set("strict", strict) 800 return convert 801 802 def _parse_column_def( 803 self, this: t.Optional[exp.Expression], computed_column: bool = True 804 ) -> t.Optional[exp.Expression]: 805 this = super()._parse_column_def(this=this, computed_column=computed_column) 806 if not this: 807 return None 808 if self._match(TokenType.EQ): 809 this.set("default", self._parse_disjunction()) 810 if self._match_texts(self.COLUMN_DEFINITION_MODES): 811 this.set("output", self._prev.text) 812 return this 813 814 def _parse_user_defined_function( 815 self, kind: t.Optional[TokenType] = None 816 ) -> t.Optional[exp.Expression]: 817 this = super()._parse_user_defined_function(kind=kind) 818 819 if ( 820 kind == TokenType.FUNCTION 821 or isinstance(this, exp.UserDefinedFunction) 822 or self._match(TokenType.ALIAS, advance=False) 823 ): 824 return this 825 826 if not self._match(TokenType.WITH, advance=False): 827 expressions = self._parse_csv(self._parse_function_parameter) 828 else: 829 expressions = None 830 831 return self.expression(exp.UserDefinedFunction, this=this, expressions=expressions) 832 833 def _parse_into(self) -> t.Optional[exp.Into]: 834 into = super()._parse_into() 835 836 table = isinstance(into, exp.Into) and into.find(exp.Table) 837 if isinstance(table, exp.Table): 838 table_identifier = table.this 839 if table_identifier.args.get("temporary"): 840 # Promote the temporary property from the Identifier to the Into expression 841 t.cast(exp.Into, into).set("temporary", True) 842 843 return into 844 845 def _parse_id_var( 846 self, 847 any_token: bool = True, 848 tokens: t.Optional[t.Collection[TokenType]] = None, 849 ) -> t.Optional[exp.Expression]: 850 is_temporary = self._match(TokenType.HASH) 851 is_global = is_temporary and self._match(TokenType.HASH) 852 853 this = super()._parse_id_var(any_token=any_token, tokens=tokens) 854 if this: 855 if is_global: 856 this.set("global", True) 857 elif is_temporary: 858 this.set("temporary", True) 859 860 return this 861 862 def _parse_create(self) -> exp.Create | exp.Command: 863 create = super()._parse_create() 864 865 if isinstance(create, exp.Create): 866 table = create.this.this if isinstance(create.this, exp.Schema) else create.this 867 if isinstance(table, exp.Table) and table.this and table.this.args.get("temporary"): 868 if not create.args.get("properties"): 869 create.set("properties", exp.Properties(expressions=[])) 870 871 create.args["properties"].append("expressions", exp.TemporaryProperty()) 872 873 return create 874 875 def _parse_if(self) -> t.Optional[exp.Expression]: 876 index = self._index 877 878 if self._match_text_seq("OBJECT_ID"): 879 self._parse_wrapped_csv(self._parse_string) 880 if self._match_text_seq("IS", "NOT", "NULL") and self._match(TokenType.DROP): 881 return self._parse_drop(exists=True) 882 self._retreat(index) 883 884 return super()._parse_if() 885 886 def _parse_unique(self) -> exp.UniqueColumnConstraint: 887 if self._match_texts(("CLUSTERED", "NONCLUSTERED")): 888 this = self.CONSTRAINT_PARSERS[self._prev.text.upper()](self) 889 else: 890 this = self._parse_schema(self._parse_id_var(any_token=False)) 891 892 return self.expression(exp.UniqueColumnConstraint, this=this) 893 894 def _parse_partition(self) -> t.Optional[exp.Partition]: 895 if not self._match_text_seq("WITH", "(", "PARTITIONS"): 896 return None 897 898 def parse_range(): 899 low = self._parse_bitwise() 900 high = self._parse_bitwise() if self._match_text_seq("TO") else None 901 902 return ( 903 self.expression(exp.PartitionRange, this=low, expression=high) if high else low 904 ) 905 906 partition = self.expression( 907 exp.Partition, expressions=self._parse_wrapped_csv(parse_range) 908 ) 909 910 self._match_r_paren() 911 912 return partition 913 914 def _parse_declareitem(self) -> t.Optional[exp.DeclareItem]: 915 var = self._parse_id_var() 916 if not var: 917 return None 918 919 self._match(TokenType.ALIAS) 920 return self.expression( 921 exp.DeclareItem, 922 this=var, 923 kind=self._parse_schema() if self._match(TokenType.TABLE) else self._parse_types(), 924 default=self._match(TokenType.EQ) and self._parse_bitwise(), 925 ) 926 927 def _parse_alter_table_alter(self) -> t.Optional[exp.Expression]: 928 expression = super()._parse_alter_table_alter() 929 930 if expression is not None: 931 collation = expression.args.get("collate") 932 if isinstance(collation, exp.Column) and isinstance(collation.this, exp.Identifier): 933 identifier = collation.this 934 collation.set("this", exp.Var(this=identifier.name)) 935 936 return expression
Parser consumes a list of tokens produced by the Tokenizer and produces a parsed syntax tree.
Arguments:
- error_level: The desired error level. Default: ErrorLevel.IMMEDIATE
- error_message_context: The amount of context to capture from a query string when displaying the error message (in number of characters). Default: 100
- max_errors: Maximum number of error messages to include in a raised ParseError. This is only relevant if error_level is ErrorLevel.RAISE. Default: 3
Inherited Members
- sqlglot.parser.Parser
- Parser
- NO_PAREN_FUNCTIONS
- STRUCT_TYPE_TOKENS
- NESTED_TYPE_TOKENS
- ENUM_TYPE_TOKENS
- AGGREGATE_TYPE_TOKENS
- TYPE_TOKENS
- SIGNED_TO_UNSIGNED_TYPE_TOKEN
- SUBQUERY_PREDICATES
- RESERVED_TOKENS
- DB_CREATABLES
- CREATABLES
- ALTERABLES
- COLON_PLACEHOLDER_TOKENS
- ARRAY_CONSTRUCTORS
- TRIM_TYPES
- FUNC_TOKENS
- CONJUNCTION
- ASSIGNMENT
- DISJUNCTION
- EQUALITY
- COMPARISON
- BITWISE
- TERM
- FACTOR
- EXPONENT
- TIMES
- TIMESTAMPS
- SET_OPERATIONS
- JOIN_METHODS
- JOIN_SIDES
- JOIN_KINDS
- LAMBDAS
- CAST_COLUMN_OPERATORS
- EXPRESSION_PARSERS
- UNARY_PARSERS
- STRING_PARSERS
- NUMERIC_PARSERS
- PRIMARY_PARSERS
- PLACEHOLDER_PARSERS
- PIPE_SYNTAX_TRANSFORM_PARSERS
- PROPERTY_PARSERS
- CONSTRAINT_PARSERS
- ALTER_PARSERS
- ALTER_ALTER_PARSERS
- SCHEMA_UNNAMED_CONSTRAINTS
- INVALID_FUNC_NAME_TOKENS
- FUNCTIONS_WITH_ALIASED_ARGS
- KEY_VALUE_DEFINITIONS
- FUNCTION_PARSERS
- QUERY_MODIFIER_TOKENS
- SET_PARSERS
- SHOW_PARSERS
- TYPE_LITERAL_PARSERS
- TYPE_CONVERTERS
- DDL_SELECT_TOKENS
- PRE_VOLATILE_TOKENS
- TRANSACTION_KIND
- TRANSACTION_CHARACTERISTICS
- CONFLICT_ACTIONS
- CREATE_SEQUENCE
- ISOLATED_LOADING_OPTIONS
- USABLES
- CAST_ACTIONS
- SCHEMA_BINDING_OPTIONS
- EXECUTE_AS_OPTIONS
- KEY_CONSTRAINT_OPTIONS
- WINDOW_EXCLUDE_OPTIONS
- INSERT_ALTERNATIVES
- CLONE_KEYWORDS
- HISTORICAL_DATA_PREFIX
- HISTORICAL_DATA_KIND
- OPCLASS_FOLLOW_KEYWORDS
- OPTYPE_FOLLOW_TOKENS
- TABLE_INDEX_HINT_TOKENS
- VIEW_ATTRIBUTES
- WINDOW_ALIAS_TOKENS
- WINDOW_BEFORE_PAREN_TOKENS
- WINDOW_SIDES
- JSON_KEY_VALUE_SEPARATOR_TOKENS
- FETCH_TOKENS
- ADD_CONSTRAINT_TOKENS
- DISTINCT_TOKENS
- NULL_TOKENS
- UNNEST_OFFSET_ALIAS_TOKENS
- SELECT_START_TOKENS
- COPY_INTO_VARLEN_OPTIONS
- IS_JSON_PREDICATE_KIND
- ODBC_DATETIME_LITERALS
- ON_CONDITION_TOKENS
- PRIVILEGE_FOLLOW_TOKENS
- DESCRIBE_STYLES
- ANALYZE_STYLES
- ANALYZE_EXPRESSION_PARSERS
- PARTITION_KEYWORDS
- AMBIGUOUS_ALIAS_TOKENS
- OPERATION_MODIFIERS
- RECURSIVE_CTE_SEARCH_KIND
- MODIFIABLES
- STRICT_CAST
- PREFIXED_PIVOT_COLUMNS
- IDENTIFY_PIVOT_STRINGS
- TABLESAMPLE_CSV
- DEFAULT_SAMPLING_METHOD
- TRIM_PATTERN_FIRST
- MODIFIERS_ATTACHED_TO_SET_OP
- SET_OP_MODIFIERS
- JSON_ARROWS_REQUIRE_JSON_TYPE
- COLON_IS_VARIANT_EXTRACT
- VALUES_FOLLOWED_BY_PAREN
- SUPPORTS_IMPLICIT_UNNEST
- INTERVAL_SPANS
- SUPPORTS_PARTITION_SELECTION
- WRAPPED_TRANSFORM_COLUMN_CONSTRAINT
- OPTIONAL_ALIAS_TOKEN_CTE
- ALTER_RENAME_REQUIRES_COLUMN
- JOINS_HAVE_EQUAL_PRECEDENCE
- ZONE_AWARE_TIMESTAMP_CONSTRUCTOR
- MAP_KEYS_ARE_ARBITRARY_EXPRESSIONS
- JSON_EXTRACT_REQUIRES_JSON_EXPRESSION
- error_level
- error_message_context
- max_errors
- dialect
- reset
- parse
- parse_into
- check_errors
- raise_error
- expression
- validate_expression
- parse_set_operation
- build_cast
- errors
- sql
938 class Generator(generator.Generator): 939 LIMIT_IS_TOP = True 940 QUERY_HINTS = False 941 RETURNING_END = False 942 NVL2_SUPPORTED = False 943 ALTER_TABLE_INCLUDE_COLUMN_KEYWORD = False 944 LIMIT_FETCH = "FETCH" 945 COMPUTED_COLUMN_WITH_TYPE = False 946 CTE_RECURSIVE_KEYWORD_REQUIRED = False 947 ENSURE_BOOLS = True 948 NULL_ORDERING_SUPPORTED = None 949 SUPPORTS_SINGLE_ARG_CONCAT = False 950 TABLESAMPLE_SEED_KEYWORD = "REPEATABLE" 951 SUPPORTS_SELECT_INTO = True 952 JSON_PATH_BRACKETED_KEY_SUPPORTED = False 953 SUPPORTS_TO_NUMBER = False 954 SET_OP_MODIFIERS = False 955 COPY_PARAMS_EQ_REQUIRED = True 956 PARSE_JSON_NAME = None 957 EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE = False 958 ALTER_SET_WRAPPED = True 959 ALTER_SET_TYPE = "" 960 961 EXPRESSIONS_WITHOUT_NESTED_CTES = { 962 exp.Create, 963 exp.Delete, 964 exp.Insert, 965 exp.Intersect, 966 exp.Except, 967 exp.Merge, 968 exp.Select, 969 exp.Subquery, 970 exp.Union, 971 exp.Update, 972 } 973 974 SUPPORTED_JSON_PATH_PARTS = { 975 exp.JSONPathKey, 976 exp.JSONPathRoot, 977 exp.JSONPathSubscript, 978 } 979 980 TYPE_MAPPING = { 981 **generator.Generator.TYPE_MAPPING, 982 exp.DataType.Type.BOOLEAN: "BIT", 983 exp.DataType.Type.DATETIME2: "DATETIME2", 984 exp.DataType.Type.DECIMAL: "NUMERIC", 985 exp.DataType.Type.DOUBLE: "FLOAT", 986 exp.DataType.Type.INT: "INTEGER", 987 exp.DataType.Type.ROWVERSION: "ROWVERSION", 988 exp.DataType.Type.TEXT: "VARCHAR(MAX)", 989 exp.DataType.Type.TIMESTAMP: "DATETIME2", 990 exp.DataType.Type.TIMESTAMPNTZ: "DATETIME2", 991 exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET", 992 exp.DataType.Type.SMALLDATETIME: "SMALLDATETIME", 993 exp.DataType.Type.UTINYINT: "TINYINT", 994 exp.DataType.Type.VARIANT: "SQL_VARIANT", 995 exp.DataType.Type.UUID: "UNIQUEIDENTIFIER", 996 } 997 998 TYPE_MAPPING.pop(exp.DataType.Type.NCHAR) 999 TYPE_MAPPING.pop(exp.DataType.Type.NVARCHAR) 1000 1001 TRANSFORMS = { 1002 **generator.Generator.TRANSFORMS, 1003 exp.AnyValue: any_value_to_max_sql, 1004 exp.ArrayToString: rename_func("STRING_AGG"), 1005 exp.AutoIncrementColumnConstraint: lambda *_: "IDENTITY", 1006 exp.Chr: rename_func("CHAR"), 1007 exp.DateAdd: date_delta_sql("DATEADD"), 1008 exp.DateDiff: date_delta_sql("DATEDIFF"), 1009 exp.CTE: transforms.preprocess([qualify_derived_table_outputs]), 1010 exp.CurrentDate: rename_func("GETDATE"), 1011 exp.CurrentTimestamp: rename_func("GETDATE"), 1012 exp.CurrentTimestampLTZ: rename_func("SYSDATETIMEOFFSET"), 1013 exp.DateStrToDate: datestrtodate_sql, 1014 exp.Extract: rename_func("DATEPART"), 1015 exp.GeneratedAsIdentityColumnConstraint: generatedasidentitycolumnconstraint_sql, 1016 exp.GroupConcat: _string_agg_sql, 1017 exp.If: rename_func("IIF"), 1018 exp.JSONExtract: _json_extract_sql, 1019 exp.JSONExtractScalar: _json_extract_sql, 1020 exp.LastDay: lambda self, e: self.func("EOMONTH", e.this), 1021 exp.Ln: rename_func("LOG"), 1022 exp.Max: max_or_greatest, 1023 exp.MD5: lambda self, e: self.func("HASHBYTES", exp.Literal.string("MD5"), e.this), 1024 exp.Min: min_or_least, 1025 exp.NumberToStr: _format_sql, 1026 exp.Repeat: rename_func("REPLICATE"), 1027 exp.CurrentSchema: rename_func("SCHEMA_NAME"), 1028 exp.Select: transforms.preprocess( 1029 [ 1030 transforms.eliminate_distinct_on, 1031 transforms.eliminate_semi_and_anti_joins, 1032 transforms.eliminate_qualify, 1033 transforms.unnest_generate_date_array_using_recursive_cte, 1034 ] 1035 ), 1036 exp.Stddev: rename_func("STDEV"), 1037 exp.StrPosition: lambda self, e: strposition_sql( 1038 self, e, func_name="CHARINDEX", supports_position=True 1039 ), 1040 exp.Subquery: transforms.preprocess([qualify_derived_table_outputs]), 1041 exp.SHA: lambda self, e: self.func("HASHBYTES", exp.Literal.string("SHA1"), e.this), 1042 exp.SHA2: lambda self, e: self.func( 1043 "HASHBYTES", exp.Literal.string(f"SHA2_{e.args.get('length', 256)}"), e.this 1044 ), 1045 exp.TemporaryProperty: lambda self, e: "", 1046 exp.TimeStrToTime: _timestrtotime_sql, 1047 exp.TimeToStr: _format_sql, 1048 exp.Trim: trim_sql, 1049 exp.TsOrDsAdd: date_delta_sql("DATEADD", cast=True), 1050 exp.TsOrDsDiff: date_delta_sql("DATEDIFF"), 1051 exp.TimestampTrunc: lambda self, e: self.func("DATETRUNC", e.unit, e.this), 1052 exp.Uuid: lambda *_: "NEWID()", 1053 exp.DateFromParts: rename_func("DATEFROMPARTS"), 1054 } 1055 1056 TRANSFORMS.pop(exp.ReturnsProperty) 1057 1058 PROPERTIES_LOCATION = { 1059 **generator.Generator.PROPERTIES_LOCATION, 1060 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 1061 } 1062 1063 def scope_resolution(self, rhs: str, scope_name: str) -> str: 1064 return f"{scope_name}::{rhs}" 1065 1066 def select_sql(self, expression: exp.Select) -> str: 1067 limit = expression.args.get("limit") 1068 offset = expression.args.get("offset") 1069 1070 if isinstance(limit, exp.Fetch) and not offset: 1071 # Dialects like Oracle can FETCH directly from a row set but 1072 # T-SQL requires an ORDER BY + OFFSET clause in order to FETCH 1073 offset = exp.Offset(expression=exp.Literal.number(0)) 1074 expression.set("offset", offset) 1075 1076 if offset: 1077 if not expression.args.get("order"): 1078 # ORDER BY is required in order to use OFFSET in a query, so we use 1079 # a noop order by, since we don't really care about the order. 1080 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 1081 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 1082 1083 if isinstance(limit, exp.Limit): 1084 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 1085 # we replace here because otherwise TOP would be generated in select_sql 1086 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 1087 1088 return super().select_sql(expression) 1089 1090 def convert_sql(self, expression: exp.Convert) -> str: 1091 name = "TRY_CONVERT" if expression.args.get("safe") else "CONVERT" 1092 return self.func( 1093 name, expression.this, expression.expression, expression.args.get("style") 1094 ) 1095 1096 def queryoption_sql(self, expression: exp.QueryOption) -> str: 1097 option = self.sql(expression, "this") 1098 value = self.sql(expression, "expression") 1099 if value: 1100 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 1101 return f"{option} {optional_equal_sign}{value}" 1102 return option 1103 1104 def lateral_op(self, expression: exp.Lateral) -> str: 1105 cross_apply = expression.args.get("cross_apply") 1106 if cross_apply is True: 1107 return "CROSS APPLY" 1108 if cross_apply is False: 1109 return "OUTER APPLY" 1110 1111 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 1112 self.unsupported("LATERAL clause is not supported.") 1113 return "LATERAL" 1114 1115 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 1116 this = expression.this 1117 split_count = len(this.name.split(".")) 1118 delimiter = expression.args.get("delimiter") 1119 part_index = expression.args.get("part_index") 1120 1121 if ( 1122 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 1123 or (delimiter and delimiter.name != ".") 1124 or not part_index 1125 or split_count > 4 1126 ): 1127 self.unsupported( 1128 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 1129 ) 1130 return "" 1131 1132 return self.func( 1133 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 1134 ) 1135 1136 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1137 nano = expression.args.get("nano") 1138 if nano is not None: 1139 nano.pop() 1140 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1141 1142 if expression.args.get("fractions") is None: 1143 expression.set("fractions", exp.Literal.number(0)) 1144 if expression.args.get("precision") is None: 1145 expression.set("precision", exp.Literal.number(0)) 1146 1147 return rename_func("TIMEFROMPARTS")(self, expression) 1148 1149 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1150 zone = expression.args.get("zone") 1151 if zone is not None: 1152 zone.pop() 1153 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1154 1155 nano = expression.args.get("nano") 1156 if nano is not None: 1157 nano.pop() 1158 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1159 1160 if expression.args.get("milli") is None: 1161 expression.set("milli", exp.Literal.number(0)) 1162 1163 return rename_func("DATETIMEFROMPARTS")(self, expression) 1164 1165 def setitem_sql(self, expression: exp.SetItem) -> str: 1166 this = expression.this 1167 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1168 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1169 return f"{self.sql(this.left)} {self.sql(this.right)}" 1170 1171 return super().setitem_sql(expression) 1172 1173 def boolean_sql(self, expression: exp.Boolean) -> str: 1174 if type(expression.parent) in BIT_TYPES or isinstance( 1175 expression.find_ancestor(exp.Values, exp.Select), exp.Values 1176 ): 1177 return "1" if expression.this else "0" 1178 1179 return "(1 = 1)" if expression.this else "(1 = 0)" 1180 1181 def is_sql(self, expression: exp.Is) -> str: 1182 if isinstance(expression.expression, exp.Boolean): 1183 return self.binary(expression, "=") 1184 return self.binary(expression, "IS") 1185 1186 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1187 sql = self.sql(expression, "this") 1188 properties = expression.args.get("properties") 1189 1190 if sql[:1] != "#" and any( 1191 isinstance(prop, exp.TemporaryProperty) 1192 for prop in (properties.expressions if properties else []) 1193 ): 1194 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1195 1196 return sql 1197 1198 def create_sql(self, expression: exp.Create) -> str: 1199 kind = expression.kind 1200 exists = expression.args.pop("exists", None) 1201 1202 like_property = expression.find(exp.LikeProperty) 1203 if like_property: 1204 ctas_expression = like_property.this 1205 else: 1206 ctas_expression = expression.expression 1207 1208 if kind == "VIEW": 1209 expression.this.set("catalog", None) 1210 with_ = expression.args.get("with") 1211 if ctas_expression and with_: 1212 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1213 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1214 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1215 ctas_expression.set("with", with_.pop()) 1216 1217 table = expression.find(exp.Table) 1218 1219 # Convert CTAS statement to SELECT .. INTO .. 1220 if kind == "TABLE" and ctas_expression: 1221 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1222 ctas_expression = ctas_expression.subquery() 1223 1224 properties = expression.args.get("properties") or exp.Properties() 1225 is_temp = any(isinstance(p, exp.TemporaryProperty) for p in properties.expressions) 1226 1227 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1228 select_into.set("into", exp.Into(this=table, temporary=is_temp)) 1229 1230 if like_property: 1231 select_into.limit(0, copy=False) 1232 1233 sql = self.sql(select_into) 1234 else: 1235 sql = super().create_sql(expression) 1236 1237 if exists: 1238 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1239 sql_with_ctes = self.prepend_ctes(expression, sql) 1240 sql_literal = self.sql(exp.Literal.string(sql_with_ctes)) 1241 if kind == "SCHEMA": 1242 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = {identifier}) EXEC({sql_literal})""" 1243 elif kind == "TABLE": 1244 assert table 1245 where = exp.and_( 1246 exp.column("TABLE_NAME").eq(table.name), 1247 exp.column("TABLE_SCHEMA").eq(table.db) if table.db else None, 1248 exp.column("TABLE_CATALOG").eq(table.catalog) if table.catalog else None, 1249 ) 1250 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE {where}) EXEC({sql_literal})""" 1251 elif kind == "INDEX": 1252 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1253 return f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql_literal})""" 1254 elif expression.args.get("replace"): 1255 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1256 1257 return self.prepend_ctes(expression, sql) 1258 1259 @generator.unsupported_args("unlogged", "expressions") 1260 def into_sql(self, expression: exp.Into) -> str: 1261 if expression.args.get("temporary"): 1262 # If the Into expression has a temporary property, push this down to the Identifier 1263 table = expression.find(exp.Table) 1264 if table and isinstance(table.this, exp.Identifier): 1265 table.this.set("temporary", True) 1266 1267 return f"{self.seg('INTO')} {self.sql(expression, 'this')}" 1268 1269 def count_sql(self, expression: exp.Count) -> str: 1270 func_name = "COUNT_BIG" if expression.args.get("big_int") else "COUNT" 1271 return rename_func(func_name)(self, expression) 1272 1273 def offset_sql(self, expression: exp.Offset) -> str: 1274 return f"{super().offset_sql(expression)} ROWS" 1275 1276 def version_sql(self, expression: exp.Version) -> str: 1277 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1278 this = f"FOR {name}" 1279 expr = expression.expression 1280 kind = expression.text("kind") 1281 if kind in ("FROM", "BETWEEN"): 1282 args = expr.expressions 1283 sep = "TO" if kind == "FROM" else "AND" 1284 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1285 else: 1286 expr_sql = self.sql(expr) 1287 1288 expr_sql = f" {expr_sql}" if expr_sql else "" 1289 return f"{this} {kind}{expr_sql}" 1290 1291 def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str: 1292 table = expression.args.get("table") 1293 table = f"{table} " if table else "" 1294 return f"RETURNS {table}{self.sql(expression, 'this')}" 1295 1296 def returning_sql(self, expression: exp.Returning) -> str: 1297 into = self.sql(expression, "into") 1298 into = self.seg(f"INTO {into}") if into else "" 1299 return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}" 1300 1301 def transaction_sql(self, expression: exp.Transaction) -> str: 1302 this = self.sql(expression, "this") 1303 this = f" {this}" if this else "" 1304 mark = self.sql(expression, "mark") 1305 mark = f" WITH MARK {mark}" if mark else "" 1306 return f"BEGIN TRANSACTION{this}{mark}" 1307 1308 def commit_sql(self, expression: exp.Commit) -> str: 1309 this = self.sql(expression, "this") 1310 this = f" {this}" if this else "" 1311 durability = expression.args.get("durability") 1312 durability = ( 1313 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1314 if durability is not None 1315 else "" 1316 ) 1317 return f"COMMIT TRANSACTION{this}{durability}" 1318 1319 def rollback_sql(self, expression: exp.Rollback) -> str: 1320 this = self.sql(expression, "this") 1321 this = f" {this}" if this else "" 1322 return f"ROLLBACK TRANSACTION{this}" 1323 1324 def identifier_sql(self, expression: exp.Identifier) -> str: 1325 identifier = super().identifier_sql(expression) 1326 1327 if expression.args.get("global"): 1328 identifier = f"##{identifier}" 1329 elif expression.args.get("temporary"): 1330 identifier = f"#{identifier}" 1331 1332 return identifier 1333 1334 def constraint_sql(self, expression: exp.Constraint) -> str: 1335 this = self.sql(expression, "this") 1336 expressions = self.expressions(expression, flat=True, sep=" ") 1337 return f"CONSTRAINT {this} {expressions}" 1338 1339 def length_sql(self, expression: exp.Length) -> str: 1340 return self._uncast_text(expression, "LEN") 1341 1342 def right_sql(self, expression: exp.Right) -> str: 1343 return self._uncast_text(expression, "RIGHT") 1344 1345 def left_sql(self, expression: exp.Left) -> str: 1346 return self._uncast_text(expression, "LEFT") 1347 1348 def _uncast_text(self, expression: exp.Expression, name: str) -> str: 1349 this = expression.this 1350 if isinstance(this, exp.Cast) and this.is_type(exp.DataType.Type.TEXT): 1351 this_sql = self.sql(this, "this") 1352 else: 1353 this_sql = self.sql(this) 1354 expression_sql = self.sql(expression, "expression") 1355 return self.func(name, this_sql, expression_sql if expression_sql else None) 1356 1357 def partition_sql(self, expression: exp.Partition) -> str: 1358 return f"WITH (PARTITIONS({self.expressions(expression, flat=True)}))" 1359 1360 def alter_sql(self, expression: exp.Alter) -> str: 1361 action = seq_get(expression.args.get("actions") or [], 0) 1362 if isinstance(action, exp.AlterRename): 1363 return f"EXEC sp_rename '{self.sql(expression.this)}', '{action.this.name}'" 1364 return super().alter_sql(expression) 1365 1366 def drop_sql(self, expression: exp.Drop) -> str: 1367 if expression.args["kind"] == "VIEW": 1368 expression.this.set("catalog", None) 1369 return super().drop_sql(expression) 1370 1371 def options_modifier(self, expression: exp.Expression) -> str: 1372 options = self.expressions(expression, key="options") 1373 return f" OPTION{self.wrap(options)}" if options else "" 1374 1375 def dpipe_sql(self, expression: exp.DPipe) -> str: 1376 return self.sql( 1377 reduce(lambda x, y: exp.Add(this=x, expression=y), expression.flatten()) 1378 ) 1379 1380 def isascii_sql(self, expression: exp.IsAscii) -> str: 1381 return f"(PATINDEX(CONVERT(VARCHAR(MAX), 0x255b5e002d7f5d25) COLLATE Latin1_General_BIN, {self.sql(expression.this)}) = 0)" 1382 1383 def columndef_sql(self, expression: exp.ColumnDef, sep: str = " ") -> str: 1384 this = super().columndef_sql(expression, sep) 1385 default = self.sql(expression, "default") 1386 default = f" = {default}" if default else "" 1387 output = self.sql(expression, "output") 1388 output = f" {output}" if output else "" 1389 return f"{this}{default}{output}" 1390 1391 def coalesce_sql(self, expression: exp.Coalesce) -> str: 1392 func_name = "ISNULL" if expression.args.get("is_null") else "COALESCE" 1393 return rename_func(func_name)(self, expression)
Generator converts a given syntax tree to the corresponding SQL string.
Arguments:
- pretty: Whether to format the produced SQL string. Default: False.
- identify: Determines when an identifier should be quoted. Possible values are: False (default): Never quote, except in cases where it's mandatory by the dialect. True or 'always': Always quote. 'safe': Only quote identifiers that are case insensitive.
- normalize: Whether to normalize identifiers to lowercase. Default: False.
- pad: The pad size in a formatted string. For example, this affects the indentation of a projection in a query, relative to its nesting level. Default: 2.
- indent: The indentation size in a formatted string. For example, this affects the
indentation of subqueries and filters under a
WHERE
clause. Default: 2. - normalize_functions: How to normalize function names. Possible values are: "upper" or True (default): Convert names to uppercase. "lower": Convert names to lowercase. False: Disables function name normalization.
- unsupported_level: Determines the generator's behavior when it encounters unsupported expressions. Default ErrorLevel.WARN.
- max_unsupported: Maximum number of unsupported messages to include in a raised UnsupportedError. This is only relevant if unsupported_level is ErrorLevel.RAISE. Default: 3
- leading_comma: Whether the comma is leading or trailing in select expressions. This is only relevant when generating in pretty mode. Default: False
- max_text_width: The max number of characters in a segment before creating new lines in pretty mode. The default is on the smaller end because the length only represents a segment and not the true line length. Default: 80
- comments: Whether to preserve comments in the output SQL code. Default: True
1066 def select_sql(self, expression: exp.Select) -> str: 1067 limit = expression.args.get("limit") 1068 offset = expression.args.get("offset") 1069 1070 if isinstance(limit, exp.Fetch) and not offset: 1071 # Dialects like Oracle can FETCH directly from a row set but 1072 # T-SQL requires an ORDER BY + OFFSET clause in order to FETCH 1073 offset = exp.Offset(expression=exp.Literal.number(0)) 1074 expression.set("offset", offset) 1075 1076 if offset: 1077 if not expression.args.get("order"): 1078 # ORDER BY is required in order to use OFFSET in a query, so we use 1079 # a noop order by, since we don't really care about the order. 1080 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 1081 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 1082 1083 if isinstance(limit, exp.Limit): 1084 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 1085 # we replace here because otherwise TOP would be generated in select_sql 1086 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 1087 1088 return super().select_sql(expression)
1096 def queryoption_sql(self, expression: exp.QueryOption) -> str: 1097 option = self.sql(expression, "this") 1098 value = self.sql(expression, "expression") 1099 if value: 1100 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 1101 return f"{option} {optional_equal_sign}{value}" 1102 return option
1104 def lateral_op(self, expression: exp.Lateral) -> str: 1105 cross_apply = expression.args.get("cross_apply") 1106 if cross_apply is True: 1107 return "CROSS APPLY" 1108 if cross_apply is False: 1109 return "OUTER APPLY" 1110 1111 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 1112 self.unsupported("LATERAL clause is not supported.") 1113 return "LATERAL"
1115 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 1116 this = expression.this 1117 split_count = len(this.name.split(".")) 1118 delimiter = expression.args.get("delimiter") 1119 part_index = expression.args.get("part_index") 1120 1121 if ( 1122 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 1123 or (delimiter and delimiter.name != ".") 1124 or not part_index 1125 or split_count > 4 1126 ): 1127 self.unsupported( 1128 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 1129 ) 1130 return "" 1131 1132 return self.func( 1133 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 1134 )
1136 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1137 nano = expression.args.get("nano") 1138 if nano is not None: 1139 nano.pop() 1140 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1141 1142 if expression.args.get("fractions") is None: 1143 expression.set("fractions", exp.Literal.number(0)) 1144 if expression.args.get("precision") is None: 1145 expression.set("precision", exp.Literal.number(0)) 1146 1147 return rename_func("TIMEFROMPARTS")(self, expression)
1149 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1150 zone = expression.args.get("zone") 1151 if zone is not None: 1152 zone.pop() 1153 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1154 1155 nano = expression.args.get("nano") 1156 if nano is not None: 1157 nano.pop() 1158 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1159 1160 if expression.args.get("milli") is None: 1161 expression.set("milli", exp.Literal.number(0)) 1162 1163 return rename_func("DATETIMEFROMPARTS")(self, expression)
1165 def setitem_sql(self, expression: exp.SetItem) -> str: 1166 this = expression.this 1167 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1168 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1169 return f"{self.sql(this.left)} {self.sql(this.right)}" 1170 1171 return super().setitem_sql(expression)
1186 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1187 sql = self.sql(expression, "this") 1188 properties = expression.args.get("properties") 1189 1190 if sql[:1] != "#" and any( 1191 isinstance(prop, exp.TemporaryProperty) 1192 for prop in (properties.expressions if properties else []) 1193 ): 1194 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1195 1196 return sql
1198 def create_sql(self, expression: exp.Create) -> str: 1199 kind = expression.kind 1200 exists = expression.args.pop("exists", None) 1201 1202 like_property = expression.find(exp.LikeProperty) 1203 if like_property: 1204 ctas_expression = like_property.this 1205 else: 1206 ctas_expression = expression.expression 1207 1208 if kind == "VIEW": 1209 expression.this.set("catalog", None) 1210 with_ = expression.args.get("with") 1211 if ctas_expression and with_: 1212 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1213 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1214 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1215 ctas_expression.set("with", with_.pop()) 1216 1217 table = expression.find(exp.Table) 1218 1219 # Convert CTAS statement to SELECT .. INTO .. 1220 if kind == "TABLE" and ctas_expression: 1221 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1222 ctas_expression = ctas_expression.subquery() 1223 1224 properties = expression.args.get("properties") or exp.Properties() 1225 is_temp = any(isinstance(p, exp.TemporaryProperty) for p in properties.expressions) 1226 1227 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1228 select_into.set("into", exp.Into(this=table, temporary=is_temp)) 1229 1230 if like_property: 1231 select_into.limit(0, copy=False) 1232 1233 sql = self.sql(select_into) 1234 else: 1235 sql = super().create_sql(expression) 1236 1237 if exists: 1238 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1239 sql_with_ctes = self.prepend_ctes(expression, sql) 1240 sql_literal = self.sql(exp.Literal.string(sql_with_ctes)) 1241 if kind == "SCHEMA": 1242 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = {identifier}) EXEC({sql_literal})""" 1243 elif kind == "TABLE": 1244 assert table 1245 where = exp.and_( 1246 exp.column("TABLE_NAME").eq(table.name), 1247 exp.column("TABLE_SCHEMA").eq(table.db) if table.db else None, 1248 exp.column("TABLE_CATALOG").eq(table.catalog) if table.catalog else None, 1249 ) 1250 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE {where}) EXEC({sql_literal})""" 1251 elif kind == "INDEX": 1252 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1253 return f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql_literal})""" 1254 elif expression.args.get("replace"): 1255 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1256 1257 return self.prepend_ctes(expression, sql)
1259 @generator.unsupported_args("unlogged", "expressions") 1260 def into_sql(self, expression: exp.Into) -> str: 1261 if expression.args.get("temporary"): 1262 # If the Into expression has a temporary property, push this down to the Identifier 1263 table = expression.find(exp.Table) 1264 if table and isinstance(table.this, exp.Identifier): 1265 table.this.set("temporary", True) 1266 1267 return f"{self.seg('INTO')} {self.sql(expression, 'this')}"
1276 def version_sql(self, expression: exp.Version) -> str: 1277 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1278 this = f"FOR {name}" 1279 expr = expression.expression 1280 kind = expression.text("kind") 1281 if kind in ("FROM", "BETWEEN"): 1282 args = expr.expressions 1283 sep = "TO" if kind == "FROM" else "AND" 1284 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1285 else: 1286 expr_sql = self.sql(expr) 1287 1288 expr_sql = f" {expr_sql}" if expr_sql else "" 1289 return f"{this} {kind}{expr_sql}"
1308 def commit_sql(self, expression: exp.Commit) -> str: 1309 this = self.sql(expression, "this") 1310 this = f" {this}" if this else "" 1311 durability = expression.args.get("durability") 1312 durability = ( 1313 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1314 if durability is not None 1315 else "" 1316 ) 1317 return f"COMMIT TRANSACTION{this}{durability}"
1324 def identifier_sql(self, expression: exp.Identifier) -> str: 1325 identifier = super().identifier_sql(expression) 1326 1327 if expression.args.get("global"): 1328 identifier = f"##{identifier}" 1329 elif expression.args.get("temporary"): 1330 identifier = f"#{identifier}" 1331 1332 return identifier
1383 def columndef_sql(self, expression: exp.ColumnDef, sep: str = " ") -> str: 1384 this = super().columndef_sql(expression, sep) 1385 default = self.sql(expression, "default") 1386 default = f" = {default}" if default else "" 1387 output = self.sql(expression, "output") 1388 output = f" {output}" if output else "" 1389 return f"{this}{default}{output}"
Inherited Members
- sqlglot.generator.Generator
- Generator
- IGNORE_NULLS_IN_FUNC
- LOCKING_READS_SUPPORTED
- WRAP_DERIVED_VALUES
- CREATE_FUNCTION_RETURN_AS
- MATCHED_BY_SOURCE
- SINGLE_STRING_INTERVAL
- INTERVAL_ALLOWS_PLURAL_FORM
- LIMIT_ONLY_LITERALS
- RENAME_TABLE_WITH_DB
- GROUPINGS_SEP
- INDEX_ON
- JOIN_HINTS
- TABLE_HINTS
- QUERY_HINT_SEP
- IS_BOOL_ALLOWED
- DUPLICATE_KEY_UPDATE_WITH_SET
- EXTRACT_ALLOWS_QUOTES
- TZ_TO_WITH_TIME_ZONE
- VALUES_AS_TABLE
- UNNEST_WITH_ORDINALITY
- AGGREGATE_FILTER_SUPPORTED
- SEMI_ANTI_JOIN_WITH_SIDE
- SUPPORTS_TABLE_COPY
- TABLESAMPLE_REQUIRES_PARENS
- TABLESAMPLE_SIZE_IS_ROWS
- TABLESAMPLE_KEYWORDS
- TABLESAMPLE_WITH_METHOD
- COLLATE_IS_FUNC
- DATA_TYPE_SPECIFIERS_ALLOWED
- LAST_DAY_SUPPORTS_DATE_PART
- SUPPORTS_TABLE_ALIAS_COLUMNS
- UNPIVOT_ALIASES_ARE_IDENTIFIERS
- JSON_KEY_VALUE_PAIR_SEP
- INSERT_OVERWRITE
- SUPPORTS_UNLOGGED_TABLES
- SUPPORTS_CREATE_TABLE_LIKE
- LIKE_PROPERTY_INSIDE_SCHEMA
- MULTI_ARG_DISTINCT
- JSON_TYPE_REQUIRED_FOR_EXTRACTION
- JSON_PATH_SINGLE_QUOTE_ESCAPE
- CAN_IMPLEMENT_ARRAY_ANY
- SUPPORTS_WINDOW_EXCLUDE
- COPY_PARAMS_ARE_WRAPPED
- COPY_HAS_INTO_KEYWORD
- STAR_EXCEPT
- HEX_FUNC
- WITH_PROPERTIES_PREFIX
- QUOTE_JSON_PATH
- PAD_FILL_PATTERN_IS_REQUIRED
- SUPPORTS_EXPLODING_PROJECTIONS
- ARRAY_CONCAT_IS_VAR_LEN
- SUPPORTS_CONVERT_TIMEZONE
- SUPPORTS_MEDIAN
- SUPPORTS_UNIX_SECONDS
- NORMALIZE_EXTRACT_DATE_PARTS
- ARRAY_SIZE_NAME
- ARRAY_SIZE_DIM_REQUIRED
- SUPPORTS_BETWEEN_FLAGS
- SUPPORTS_LIKE_QUANTIFIERS
- TIME_PART_SINGULARS
- TOKEN_MAPPING
- STRUCT_DELIMITER
- PARAMETER_TOKEN
- NAMED_PLACEHOLDER_TOKEN
- EXPRESSION_PRECEDES_PROPERTIES_CREATABLES
- RESERVED_KEYWORDS
- WITH_SEPARATED_COMMENTS
- EXCLUDE_COMMENTS
- UNWRAPPED_INTERVAL_VALUES
- PARAMETERIZABLE_TEXT_TYPES
- RESPECT_IGNORE_NULLS_UNSUPPORTED_EXPRESSIONS
- SENTINEL_LINE_BREAK
- pretty
- identify
- normalize
- pad
- unsupported_level
- max_unsupported
- leading_comma
- max_text_width
- comments
- dialect
- normalize_functions
- unsupported_messages
- generate
- preprocess
- unsupported
- sep
- seg
- sanitize_comment
- maybe_comment
- wrap
- no_identify
- normalize_func
- indent
- sql
- uncache_sql
- cache_sql
- characterset_sql
- column_parts
- column_sql
- columnposition_sql
- columnconstraint_sql
- computedcolumnconstraint_sql
- autoincrementcolumnconstraint_sql
- compresscolumnconstraint_sql
- generatedasidentitycolumnconstraint_sql
- generatedasrowcolumnconstraint_sql
- periodforsystemtimeconstraint_sql
- notnullcolumnconstraint_sql
- primarykeycolumnconstraint_sql
- uniquecolumnconstraint_sql
- sequenceproperties_sql
- clone_sql
- describe_sql
- heredoc_sql
- prepend_ctes
- with_sql
- cte_sql
- tablealias_sql
- bitstring_sql
- hexstring_sql
- bytestring_sql
- unicodestring_sql
- rawstring_sql
- datatypeparam_sql
- datatype_sql
- directory_sql
- delete_sql
- set_operation
- set_operations
- fetch_sql
- limitoptions_sql
- filter_sql
- hint_sql
- indexparameters_sql
- index_sql
- hex_sql
- lowerhex_sql
- inputoutputformat_sql
- national_sql
- properties_sql
- root_properties
- properties
- with_properties
- locate_properties
- property_name
- property_sql
- likeproperty_sql
- fallbackproperty_sql
- journalproperty_sql
- freespaceproperty_sql
- checksumproperty_sql
- mergeblockratioproperty_sql
- datablocksizeproperty_sql
- blockcompressionproperty_sql
- isolatedloadingproperty_sql
- partitionboundspec_sql
- partitionedofproperty_sql
- lockingproperty_sql
- withdataproperty_sql
- withsystemversioningproperty_sql
- insert_sql
- introducer_sql
- kill_sql
- pseudotype_sql
- objectidentifier_sql
- onconflict_sql
- rowformatdelimitedproperty_sql
- withtablehint_sql
- indextablehint_sql
- historicaldata_sql
- table_parts
- table_sql
- tablefromrows_sql
- tablesample_sql
- pivot_sql
- tuple_sql
- update_sql
- values_sql
- var_sql
- from_sql
- groupingsets_sql
- rollup_sql
- cube_sql
- group_sql
- having_sql
- connect_sql
- prior_sql
- join_sql
- lambda_sql
- lateral_sql
- limit_sql
- set_sql
- pragma_sql
- lock_sql
- literal_sql
- escape_str
- loaddata_sql
- null_sql
- order_sql
- withfill_sql
- cluster_sql
- distribute_sql
- sort_sql
- ordered_sql
- matchrecognizemeasure_sql
- matchrecognize_sql
- query_modifiers
- for_modifiers
- offset_limit_modifiers
- after_limit_modifiers
- schema_sql
- schema_columns_sql
- star_sql
- parameter_sql
- sessionparameter_sql
- placeholder_sql
- subquery_sql
- qualify_sql
- unnest_sql
- prewhere_sql
- where_sql
- window_sql
- partition_by_sql
- windowspec_sql
- withingroup_sql
- between_sql
- bracket_offset_expressions
- bracket_sql
- all_sql
- any_sql
- exists_sql
- case_sql
- nextvaluefor_sql
- extract_sql
- trim_sql
- convert_concat_args
- concat_sql
- concatws_sql
- check_sql
- foreignkey_sql
- primarykey_sql
- if_sql
- matchagainst_sql
- jsonkeyvalue_sql
- jsonpath_sql
- json_path_part
- formatjson_sql
- formatphrase_sql
- jsonobject_sql
- jsonobjectagg_sql
- jsonarray_sql
- jsonarrayagg_sql
- jsoncolumndef_sql
- jsonschema_sql
- jsontable_sql
- openjsoncolumndef_sql
- openjson_sql
- in_sql
- in_unnest_op
- interval_sql
- return_sql
- reference_sql
- anonymous_sql
- paren_sql
- neg_sql
- not_sql
- alias_sql
- pivotalias_sql
- aliases_sql
- atindex_sql
- attimezone_sql
- fromtimezone_sql
- add_sql
- and_sql
- or_sql
- xor_sql
- connector_sql
- bitwiseand_sql
- bitwiseleftshift_sql
- bitwisenot_sql
- bitwiseor_sql
- bitwiserightshift_sql
- bitwisexor_sql
- cast_sql
- currentdate_sql
- collate_sql
- command_sql
- comment_sql
- mergetreettlaction_sql
- mergetreettl_sql
- altercolumn_sql
- alterindex_sql
- alterdiststyle_sql
- altersortkey_sql
- alterrename_sql
- renamecolumn_sql
- alterset_sql
- add_column_sql
- droppartition_sql
- addconstraint_sql
- addpartition_sql
- distinct_sql
- ignorenulls_sql
- respectnulls_sql
- havingmax_sql
- intdiv_sql
- div_sql
- safedivide_sql
- overlaps_sql
- distance_sql
- dot_sql
- eq_sql
- propertyeq_sql
- escape_sql
- glob_sql
- gt_sql
- gte_sql
- like_sql
- ilike_sql
- similarto_sql
- lt_sql
- lte_sql
- mod_sql
- mul_sql
- neq_sql
- nullsafeeq_sql
- nullsafeneq_sql
- slice_sql
- sub_sql
- trycast_sql
- jsoncast_sql
- try_sql
- log_sql
- use_sql
- binary
- ceil_floor
- function_fallback_sql
- func
- format_args
- too_wide
- format_time
- expressions
- op_expressions
- naked_property
- tag_sql
- token_sql
- userdefinedfunction_sql
- joinhint_sql
- kwarg_sql
- when_sql
- whens_sql
- merge_sql
- tochar_sql
- tonumber_sql
- dictproperty_sql
- dictrange_sql
- dictsubproperty_sql
- duplicatekeyproperty_sql
- uniquekeyproperty_sql
- distributedbyproperty_sql
- oncluster_sql
- clusteredbyproperty_sql
- anyvalue_sql
- querytransform_sql
- indexconstraintoption_sql
- checkcolumnconstraint_sql
- indexcolumnconstraint_sql
- nvl2_sql
- comprehension_sql
- columnprefix_sql
- opclass_sql
- predict_sql
- forin_sql
- refresh_sql
- toarray_sql
- tsordstotime_sql
- tsordstotimestamp_sql
- tsordstodatetime_sql
- tsordstodate_sql
- unixdate_sql
- lastday_sql
- dateadd_sql
- arrayany_sql
- struct_sql
- partitionrange_sql
- truncatetable_sql
- copyparameter_sql
- credentials_sql
- copy_sql
- semicolon_sql
- datadeletionproperty_sql
- maskingpolicycolumnconstraint_sql
- gapfill_sql
- scoperesolution_sql
- parsejson_sql
- rand_sql
- changes_sql
- pad_sql
- summarize_sql
- explodinggenerateseries_sql
- arrayconcat_sql
- converttimezone_sql
- json_sql
- jsonvalue_sql
- conditionalinsert_sql
- multitableinserts_sql
- oncondition_sql
- jsonextractquote_sql
- jsonexists_sql
- arrayagg_sql
- apply_sql
- grant_sql
- grantprivilege_sql
- grantprincipal_sql
- columns_sql
- overlay_sql
- todouble_sql
- string_sql
- median_sql
- overflowtruncatebehavior_sql
- unixseconds_sql
- arraysize_sql
- attach_sql
- detach_sql
- attachoption_sql
- featuresattime_sql
- watermarkcolumnconstraint_sql
- encodeproperty_sql
- includeproperty_sql
- xmlelement_sql
- xmlkeyvalueoption_sql
- partitionbyrangeproperty_sql
- partitionbyrangepropertydynamic_sql
- unpivotcolumns_sql
- analyzesample_sql
- analyzestatistics_sql
- analyzehistogram_sql
- analyzedelete_sql
- analyzelistchainedrows_sql
- analyzevalidate_sql
- analyze_sql
- xmltable_sql
- xmlnamespace_sql
- export_sql
- declare_sql
- declareitem_sql
- recursivewithsearch_sql
- parameterizedagg_sql
- anonymousaggfunc_sql
- combinedaggfunc_sql
- combinedparameterizedagg_sql
- show_sql
- get_put_sql
- translatecharacters_sql
- decodecase_sql
- semanticview_sql
- getextract_sql