sqlglot.dialects.tsql
1from __future__ import annotations 2 3import datetime 4import re 5import typing as t 6from functools import reduce 7 8from sqlglot import exp, generator, parser, tokens, transforms 9from sqlglot.dialects.dialect import ( 10 Dialect, 11 NormalizationStrategy, 12 any_value_to_max_sql, 13 build_date_delta, 14 date_delta_sql, 15 datestrtodate_sql, 16 generatedasidentitycolumnconstraint_sql, 17 max_or_greatest, 18 min_or_least, 19 rename_func, 20 strposition_sql, 21 timestrtotime_sql, 22 trim_sql, 23) 24from sqlglot.helper import seq_get 25from sqlglot.parser import build_coalesce 26from sqlglot.time import format_time 27from sqlglot.tokens import TokenType 28 29if t.TYPE_CHECKING: 30 from sqlglot._typing import E 31 32FULL_FORMAT_TIME_MAPPING = { 33 "weekday": "%A", 34 "dw": "%A", 35 "w": "%A", 36 "month": "%B", 37 "mm": "%B", 38 "m": "%B", 39} 40 41DATE_DELTA_INTERVAL = { 42 "year": "year", 43 "yyyy": "year", 44 "yy": "year", 45 "quarter": "quarter", 46 "qq": "quarter", 47 "q": "quarter", 48 "month": "month", 49 "mm": "month", 50 "m": "month", 51 "week": "week", 52 "ww": "week", 53 "wk": "week", 54 "day": "day", 55 "dd": "day", 56 "d": "day", 57} 58 59 60DATE_FMT_RE = re.compile("([dD]{1,2})|([mM]{1,2})|([yY]{1,4})|([hH]{1,2})|([sS]{1,2})") 61 62# N = Numeric, C=Currency 63TRANSPILE_SAFE_NUMBER_FMT = {"N", "C"} 64 65DEFAULT_START_DATE = datetime.date(1900, 1, 1) 66 67BIT_TYPES = {exp.EQ, exp.NEQ, exp.Is, exp.In, exp.Select, exp.Alias} 68 69# Unsupported options: 70# - OPTIMIZE FOR ( @variable_name { UNKNOWN | = <literal_constant> } [ , ...n ] ) 71# - TABLE HINT 72OPTIONS: parser.OPTIONS_TYPE = { 73 **dict.fromkeys( 74 ( 75 "DISABLE_OPTIMIZED_PLAN_FORCING", 76 "FAST", 77 "IGNORE_NONCLUSTERED_COLUMNSTORE_INDEX", 78 "LABEL", 79 "MAXDOP", 80 "MAXRECURSION", 81 "MAX_GRANT_PERCENT", 82 "MIN_GRANT_PERCENT", 83 "NO_PERFORMANCE_SPOOL", 84 "QUERYTRACEON", 85 "RECOMPILE", 86 ), 87 tuple(), 88 ), 89 "CONCAT": ("UNION",), 90 "DISABLE": ("EXTERNALPUSHDOWN", "SCALEOUTEXECUTION"), 91 "EXPAND": ("VIEWS",), 92 "FORCE": ("EXTERNALPUSHDOWN", "ORDER", "SCALEOUTEXECUTION"), 93 "HASH": ("GROUP", "JOIN", "UNION"), 94 "KEEP": ("PLAN",), 95 "KEEPFIXED": ("PLAN",), 96 "LOOP": ("JOIN",), 97 "MERGE": ("JOIN", "UNION"), 98 "OPTIMIZE": (("FOR", "UNKNOWN"),), 99 "ORDER": ("GROUP",), 100 "PARAMETERIZATION": ("FORCED", "SIMPLE"), 101 "ROBUST": ("PLAN",), 102 "USE": ("PLAN",), 103} 104 105 106XML_OPTIONS: parser.OPTIONS_TYPE = { 107 **dict.fromkeys( 108 ( 109 "AUTO", 110 "EXPLICIT", 111 "TYPE", 112 ), 113 tuple(), 114 ), 115 "ELEMENTS": ( 116 "XSINIL", 117 "ABSENT", 118 ), 119 "BINARY": ("BASE64",), 120} 121 122 123OPTIONS_THAT_REQUIRE_EQUAL = ("MAX_GRANT_PERCENT", "MIN_GRANT_PERCENT", "LABEL") 124 125 126def _build_formatted_time( 127 exp_class: t.Type[E], full_format_mapping: t.Optional[bool] = None 128) -> t.Callable[[t.List], E]: 129 def _builder(args: t.List) -> E: 130 fmt = seq_get(args, 0) 131 if isinstance(fmt, exp.Expression): 132 fmt = exp.Literal.string( 133 format_time( 134 fmt.name.lower(), 135 ( 136 {**TSQL.TIME_MAPPING, **FULL_FORMAT_TIME_MAPPING} 137 if full_format_mapping 138 else TSQL.TIME_MAPPING 139 ), 140 ) 141 ) 142 143 this = seq_get(args, 1) 144 if isinstance(this, exp.Expression): 145 this = exp.cast(this, exp.DataType.Type.DATETIME2) 146 147 return exp_class(this=this, format=fmt) 148 149 return _builder 150 151 152def _build_format(args: t.List) -> exp.NumberToStr | exp.TimeToStr: 153 this = seq_get(args, 0) 154 fmt = seq_get(args, 1) 155 culture = seq_get(args, 2) 156 157 number_fmt = fmt and (fmt.name in TRANSPILE_SAFE_NUMBER_FMT or not DATE_FMT_RE.search(fmt.name)) 158 159 if number_fmt: 160 return exp.NumberToStr(this=this, format=fmt, culture=culture) 161 162 if fmt: 163 fmt = exp.Literal.string( 164 format_time(fmt.name, TSQL.FORMAT_TIME_MAPPING) 165 if len(fmt.name) == 1 166 else format_time(fmt.name, TSQL.TIME_MAPPING) 167 ) 168 169 return exp.TimeToStr(this=this, format=fmt, culture=culture) 170 171 172def _build_eomonth(args: t.List) -> exp.LastDay: 173 date = exp.TsOrDsToDate(this=seq_get(args, 0)) 174 month_lag = seq_get(args, 1) 175 176 if month_lag is None: 177 this: exp.Expression = date 178 else: 179 unit = DATE_DELTA_INTERVAL.get("month") 180 this = exp.DateAdd(this=date, expression=month_lag, unit=unit and exp.var(unit)) 181 182 return exp.LastDay(this=this) 183 184 185def _build_hashbytes(args: t.List) -> exp.Expression: 186 kind, data = args 187 kind = kind.name.upper() if kind.is_string else "" 188 189 if kind == "MD5": 190 args.pop(0) 191 return exp.MD5(this=data) 192 if kind in ("SHA", "SHA1"): 193 args.pop(0) 194 return exp.SHA(this=data) 195 if kind == "SHA2_256": 196 return exp.SHA2(this=data, length=exp.Literal.number(256)) 197 if kind == "SHA2_512": 198 return exp.SHA2(this=data, length=exp.Literal.number(512)) 199 200 return exp.func("HASHBYTES", *args) 201 202 203DATEPART_ONLY_FORMATS = {"DW", "WK", "HOUR", "QUARTER"} 204 205 206def _format_sql(self: TSQL.Generator, expression: exp.NumberToStr | exp.TimeToStr) -> str: 207 fmt = expression.args["format"] 208 209 if not isinstance(expression, exp.NumberToStr): 210 if fmt.is_string: 211 mapped_fmt = format_time(fmt.name, TSQL.INVERSE_TIME_MAPPING) 212 213 name = (mapped_fmt or "").upper() 214 if name in DATEPART_ONLY_FORMATS: 215 return self.func("DATEPART", name, expression.this) 216 217 fmt_sql = self.sql(exp.Literal.string(mapped_fmt)) 218 else: 219 fmt_sql = self.format_time(expression) or self.sql(fmt) 220 else: 221 fmt_sql = self.sql(fmt) 222 223 return self.func("FORMAT", expression.this, fmt_sql, expression.args.get("culture")) 224 225 226def _string_agg_sql(self: TSQL.Generator, expression: exp.GroupConcat) -> str: 227 this = expression.this 228 distinct = expression.find(exp.Distinct) 229 if distinct: 230 # exp.Distinct can appear below an exp.Order or an exp.GroupConcat expression 231 self.unsupported("T-SQL STRING_AGG doesn't support DISTINCT.") 232 this = distinct.pop().expressions[0] 233 234 order = "" 235 if isinstance(expression.this, exp.Order): 236 if expression.this.this: 237 this = expression.this.this.pop() 238 # Order has a leading space 239 order = f" WITHIN GROUP ({self.sql(expression.this)[1:]})" 240 241 separator = expression.args.get("separator") or exp.Literal.string(",") 242 return f"STRING_AGG({self.format_args(this, separator)}){order}" 243 244 245def _build_date_delta( 246 exp_class: t.Type[E], unit_mapping: t.Optional[t.Dict[str, str]] = None 247) -> t.Callable[[t.List], E]: 248 def _builder(args: t.List) -> E: 249 unit = seq_get(args, 0) 250 if unit and unit_mapping: 251 unit = exp.var(unit_mapping.get(unit.name.lower(), unit.name)) 252 253 start_date = seq_get(args, 1) 254 if start_date and start_date.is_number: 255 # Numeric types are valid DATETIME values 256 if start_date.is_int: 257 adds = DEFAULT_START_DATE + datetime.timedelta(days=start_date.to_py()) 258 start_date = exp.Literal.string(adds.strftime("%F")) 259 else: 260 # We currently don't handle float values, i.e. they're not converted to equivalent DATETIMEs. 261 # This is not a problem when generating T-SQL code, it is when transpiling to other dialects. 262 return exp_class(this=seq_get(args, 2), expression=start_date, unit=unit) 263 264 return exp_class( 265 this=exp.TimeStrToTime(this=seq_get(args, 2)), 266 expression=exp.TimeStrToTime(this=start_date), 267 unit=unit, 268 ) 269 270 return _builder 271 272 273def qualify_derived_table_outputs(expression: exp.Expression) -> exp.Expression: 274 """Ensures all (unnamed) output columns are aliased for CTEs and Subqueries.""" 275 alias = expression.args.get("alias") 276 277 if ( 278 isinstance(expression, (exp.CTE, exp.Subquery)) 279 and isinstance(alias, exp.TableAlias) 280 and not alias.columns 281 ): 282 from sqlglot.optimizer.qualify_columns import qualify_outputs 283 284 # We keep track of the unaliased column projection indexes instead of the expressions 285 # themselves, because the latter are going to be replaced by new nodes when the aliases 286 # are added and hence we won't be able to reach these newly added Alias parents 287 query = expression.this 288 unaliased_column_indexes = ( 289 i for i, c in enumerate(query.selects) if isinstance(c, exp.Column) and not c.alias 290 ) 291 292 qualify_outputs(query) 293 294 # Preserve the quoting information of columns for newly added Alias nodes 295 query_selects = query.selects 296 for select_index in unaliased_column_indexes: 297 alias = query_selects[select_index] 298 column = alias.this 299 if isinstance(column.this, exp.Identifier): 300 alias.args["alias"].set("quoted", column.this.quoted) 301 302 return expression 303 304 305# https://learn.microsoft.com/en-us/sql/t-sql/functions/datetimefromparts-transact-sql?view=sql-server-ver16#syntax 306def _build_datetimefromparts(args: t.List) -> exp.TimestampFromParts: 307 return exp.TimestampFromParts( 308 year=seq_get(args, 0), 309 month=seq_get(args, 1), 310 day=seq_get(args, 2), 311 hour=seq_get(args, 3), 312 min=seq_get(args, 4), 313 sec=seq_get(args, 5), 314 milli=seq_get(args, 6), 315 ) 316 317 318# https://learn.microsoft.com/en-us/sql/t-sql/functions/timefromparts-transact-sql?view=sql-server-ver16#syntax 319def _build_timefromparts(args: t.List) -> exp.TimeFromParts: 320 return exp.TimeFromParts( 321 hour=seq_get(args, 0), 322 min=seq_get(args, 1), 323 sec=seq_get(args, 2), 324 fractions=seq_get(args, 3), 325 precision=seq_get(args, 4), 326 ) 327 328 329def _build_with_arg_as_text( 330 klass: t.Type[exp.Expression], 331) -> t.Callable[[t.List[exp.Expression]], exp.Expression]: 332 def _parse(args: t.List[exp.Expression]) -> exp.Expression: 333 this = seq_get(args, 0) 334 335 if this and not this.is_string: 336 this = exp.cast(this, exp.DataType.Type.TEXT) 337 338 expression = seq_get(args, 1) 339 kwargs = {"this": this} 340 341 if expression: 342 kwargs["expression"] = expression 343 344 return klass(**kwargs) 345 346 return _parse 347 348 349# https://learn.microsoft.com/en-us/sql/t-sql/functions/parsename-transact-sql?view=sql-server-ver16 350def _build_parsename(args: t.List) -> exp.SplitPart | exp.Anonymous: 351 # PARSENAME(...) will be stored into exp.SplitPart if: 352 # - All args are literals 353 # - The part index (2nd arg) is <= 4 (max valid value, otherwise TSQL returns NULL) 354 if len(args) == 2 and all(isinstance(arg, exp.Literal) for arg in args): 355 this = args[0] 356 part_index = args[1] 357 split_count = len(this.name.split(".")) 358 if split_count <= 4: 359 return exp.SplitPart( 360 this=this, 361 delimiter=exp.Literal.string("."), 362 part_index=exp.Literal.number(split_count + 1 - part_index.to_py()), 363 ) 364 365 return exp.Anonymous(this="PARSENAME", expressions=args) 366 367 368def _build_json_query(args: t.List, dialect: Dialect) -> exp.JSONExtract: 369 if len(args) == 1: 370 # The default value for path is '$'. As a result, if you don't provide a 371 # value for path, JSON_QUERY returns the input expression. 372 args.append(exp.Literal.string("$")) 373 374 return parser.build_extract_json_with_path(exp.JSONExtract)(args, dialect) 375 376 377def _json_extract_sql( 378 self: TSQL.Generator, expression: exp.JSONExtract | exp.JSONExtractScalar 379) -> str: 380 json_query = self.func("JSON_QUERY", expression.this, expression.expression) 381 json_value = self.func("JSON_VALUE", expression.this, expression.expression) 382 return self.func("ISNULL", json_query, json_value) 383 384 385def _timestrtotime_sql(self: TSQL.Generator, expression: exp.TimeStrToTime): 386 sql = timestrtotime_sql(self, expression) 387 if expression.args.get("zone"): 388 # If there is a timezone, produce an expression like: 389 # CAST('2020-01-01 12:13:14-08:00' AS DATETIMEOFFSET) AT TIME ZONE 'UTC' 390 # If you dont have AT TIME ZONE 'UTC', wrapping that expression in another cast back to DATETIME2 just drops the timezone information 391 return self.sql(exp.AtTimeZone(this=sql, zone=exp.Literal.string("UTC"))) 392 return sql 393 394 395def _build_datetrunc(args: t.List) -> exp.TimestampTrunc: 396 unit = seq_get(args, 0) 397 this = seq_get(args, 1) 398 399 if this and this.is_string: 400 this = exp.cast(this, exp.DataType.Type.DATETIME2) 401 402 return exp.TimestampTrunc(this=this, unit=unit) 403 404 405class TSQL(Dialect): 406 SUPPORTS_SEMI_ANTI_JOIN = False 407 LOG_BASE_FIRST = False 408 TYPED_DIVISION = True 409 CONCAT_COALESCE = True 410 NORMALIZATION_STRATEGY = NormalizationStrategy.CASE_INSENSITIVE 411 ALTER_TABLE_ADD_REQUIRED_FOR_EACH_COLUMN = False 412 413 TIME_FORMAT = "'yyyy-mm-dd hh:mm:ss'" 414 415 TIME_MAPPING = { 416 "year": "%Y", 417 "dayofyear": "%j", 418 "day": "%d", 419 "dy": "%d", 420 "y": "%Y", 421 "week": "%W", 422 "ww": "%W", 423 "wk": "%W", 424 "hour": "%h", 425 "hh": "%I", 426 "minute": "%M", 427 "mi": "%M", 428 "n": "%M", 429 "second": "%S", 430 "ss": "%S", 431 "s": "%-S", 432 "millisecond": "%f", 433 "ms": "%f", 434 "weekday": "%w", 435 "dw": "%w", 436 "month": "%m", 437 "mm": "%M", 438 "m": "%-M", 439 "Y": "%Y", 440 "YYYY": "%Y", 441 "YY": "%y", 442 "MMMM": "%B", 443 "MMM": "%b", 444 "MM": "%m", 445 "M": "%-m", 446 "dddd": "%A", 447 "dd": "%d", 448 "d": "%-d", 449 "HH": "%H", 450 "H": "%-H", 451 "h": "%-I", 452 "ffffff": "%f", 453 "yyyy": "%Y", 454 "yy": "%y", 455 } 456 457 CONVERT_FORMAT_MAPPING = { 458 "0": "%b %d %Y %-I:%M%p", 459 "1": "%m/%d/%y", 460 "2": "%y.%m.%d", 461 "3": "%d/%m/%y", 462 "4": "%d.%m.%y", 463 "5": "%d-%m-%y", 464 "6": "%d %b %y", 465 "7": "%b %d, %y", 466 "8": "%H:%M:%S", 467 "9": "%b %d %Y %-I:%M:%S:%f%p", 468 "10": "mm-dd-yy", 469 "11": "yy/mm/dd", 470 "12": "yymmdd", 471 "13": "%d %b %Y %H:%M:ss:%f", 472 "14": "%H:%M:%S:%f", 473 "20": "%Y-%m-%d %H:%M:%S", 474 "21": "%Y-%m-%d %H:%M:%S.%f", 475 "22": "%m/%d/%y %-I:%M:%S %p", 476 "23": "%Y-%m-%d", 477 "24": "%H:%M:%S", 478 "25": "%Y-%m-%d %H:%M:%S.%f", 479 "100": "%b %d %Y %-I:%M%p", 480 "101": "%m/%d/%Y", 481 "102": "%Y.%m.%d", 482 "103": "%d/%m/%Y", 483 "104": "%d.%m.%Y", 484 "105": "%d-%m-%Y", 485 "106": "%d %b %Y", 486 "107": "%b %d, %Y", 487 "108": "%H:%M:%S", 488 "109": "%b %d %Y %-I:%M:%S:%f%p", 489 "110": "%m-%d-%Y", 490 "111": "%Y/%m/%d", 491 "112": "%Y%m%d", 492 "113": "%d %b %Y %H:%M:%S:%f", 493 "114": "%H:%M:%S:%f", 494 "120": "%Y-%m-%d %H:%M:%S", 495 "121": "%Y-%m-%d %H:%M:%S.%f", 496 "126": "%Y-%m-%dT%H:%M:%S.%f", 497 } 498 499 FORMAT_TIME_MAPPING = { 500 "y": "%B %Y", 501 "d": "%m/%d/%Y", 502 "H": "%-H", 503 "h": "%-I", 504 "s": "%Y-%m-%d %H:%M:%S", 505 "D": "%A,%B,%Y", 506 "f": "%A,%B,%Y %-I:%M %p", 507 "F": "%A,%B,%Y %-I:%M:%S %p", 508 "g": "%m/%d/%Y %-I:%M %p", 509 "G": "%m/%d/%Y %-I:%M:%S %p", 510 "M": "%B %-d", 511 "m": "%B %-d", 512 "O": "%Y-%m-%dT%H:%M:%S", 513 "u": "%Y-%M-%D %H:%M:%S%z", 514 "U": "%A, %B %D, %Y %H:%M:%S%z", 515 "T": "%-I:%M:%S %p", 516 "t": "%-I:%M", 517 "Y": "%a %Y", 518 } 519 520 class Tokenizer(tokens.Tokenizer): 521 IDENTIFIERS = [("[", "]"), '"'] 522 QUOTES = ["'", '"'] 523 HEX_STRINGS = [("0x", ""), ("0X", "")] 524 VAR_SINGLE_TOKENS = {"@", "$", "#"} 525 526 KEYWORDS = { 527 **tokens.Tokenizer.KEYWORDS, 528 "CLUSTERED INDEX": TokenType.INDEX, 529 "DATETIME2": TokenType.DATETIME2, 530 "DATETIMEOFFSET": TokenType.TIMESTAMPTZ, 531 "DECLARE": TokenType.DECLARE, 532 "EXEC": TokenType.COMMAND, 533 "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT, 534 "GO": TokenType.COMMAND, 535 "IMAGE": TokenType.IMAGE, 536 "MONEY": TokenType.MONEY, 537 "NONCLUSTERED INDEX": TokenType.INDEX, 538 "NTEXT": TokenType.TEXT, 539 "OPTION": TokenType.OPTION, 540 "OUTPUT": TokenType.RETURNING, 541 "PRINT": TokenType.COMMAND, 542 "PROC": TokenType.PROCEDURE, 543 "REAL": TokenType.FLOAT, 544 "ROWVERSION": TokenType.ROWVERSION, 545 "SMALLDATETIME": TokenType.SMALLDATETIME, 546 "SMALLMONEY": TokenType.SMALLMONEY, 547 "SQL_VARIANT": TokenType.VARIANT, 548 "SYSTEM_USER": TokenType.CURRENT_USER, 549 "TOP": TokenType.TOP, 550 "TIMESTAMP": TokenType.ROWVERSION, 551 "TINYINT": TokenType.UTINYINT, 552 "UNIQUEIDENTIFIER": TokenType.UUID, 553 "UPDATE STATISTICS": TokenType.COMMAND, 554 "XML": TokenType.XML, 555 } 556 KEYWORDS.pop("/*+") 557 558 COMMANDS = {*tokens.Tokenizer.COMMANDS, TokenType.END} 559 560 class Parser(parser.Parser): 561 SET_REQUIRES_ASSIGNMENT_DELIMITER = False 562 LOG_DEFAULTS_TO_LN = True 563 STRING_ALIASES = True 564 NO_PAREN_IF_COMMANDS = False 565 566 QUERY_MODIFIER_PARSERS = { 567 **parser.Parser.QUERY_MODIFIER_PARSERS, 568 TokenType.OPTION: lambda self: ("options", self._parse_options()), 569 TokenType.FOR: lambda self: ("for", self._parse_for()), 570 } 571 572 # T-SQL does not allow BEGIN to be used as an identifier 573 ID_VAR_TOKENS = parser.Parser.ID_VAR_TOKENS - {TokenType.BEGIN} 574 ALIAS_TOKENS = parser.Parser.ALIAS_TOKENS - {TokenType.BEGIN} 575 TABLE_ALIAS_TOKENS = parser.Parser.TABLE_ALIAS_TOKENS - {TokenType.BEGIN} 576 COMMENT_TABLE_ALIAS_TOKENS = parser.Parser.COMMENT_TABLE_ALIAS_TOKENS - {TokenType.BEGIN} 577 UPDATE_ALIAS_TOKENS = parser.Parser.UPDATE_ALIAS_TOKENS - {TokenType.BEGIN} 578 579 FUNCTIONS = { 580 **parser.Parser.FUNCTIONS, 581 "CHARINDEX": lambda args: exp.StrPosition( 582 this=seq_get(args, 1), 583 substr=seq_get(args, 0), 584 position=seq_get(args, 2), 585 ), 586 "COUNT": lambda args: exp.Count( 587 this=seq_get(args, 0), expressions=args[1:], big_int=False 588 ), 589 "COUNT_BIG": lambda args: exp.Count( 590 this=seq_get(args, 0), expressions=args[1:], big_int=True 591 ), 592 "DATEADD": build_date_delta(exp.DateAdd, unit_mapping=DATE_DELTA_INTERVAL), 593 "DATEDIFF": _build_date_delta(exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL), 594 "DATENAME": _build_formatted_time(exp.TimeToStr, full_format_mapping=True), 595 "DATEPART": _build_formatted_time(exp.TimeToStr), 596 "DATETIMEFROMPARTS": _build_datetimefromparts, 597 "EOMONTH": _build_eomonth, 598 "FORMAT": _build_format, 599 "GETDATE": exp.CurrentTimestamp.from_arg_list, 600 "HASHBYTES": _build_hashbytes, 601 "ISNULL": lambda args: build_coalesce(args=args, is_null=True), 602 "JSON_QUERY": _build_json_query, 603 "JSON_VALUE": parser.build_extract_json_with_path(exp.JSONExtractScalar), 604 "LEN": _build_with_arg_as_text(exp.Length), 605 "LEFT": _build_with_arg_as_text(exp.Left), 606 "NEWID": exp.Uuid.from_arg_list, 607 "RIGHT": _build_with_arg_as_text(exp.Right), 608 "PARSENAME": _build_parsename, 609 "REPLICATE": exp.Repeat.from_arg_list, 610 "SCHEMA_NAME": exp.CurrentSchema.from_arg_list, 611 "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)), 612 "SYSDATETIME": exp.CurrentTimestamp.from_arg_list, 613 "SUSER_NAME": exp.CurrentUser.from_arg_list, 614 "SUSER_SNAME": exp.CurrentUser.from_arg_list, 615 "SYSDATETIMEOFFSET": exp.CurrentTimestampLTZ.from_arg_list, 616 "SYSTEM_USER": exp.CurrentUser.from_arg_list, 617 "TIMEFROMPARTS": _build_timefromparts, 618 "DATETRUNC": _build_datetrunc, 619 } 620 621 JOIN_HINTS = {"LOOP", "HASH", "MERGE", "REMOTE"} 622 623 PROCEDURE_OPTIONS = dict.fromkeys( 624 ("ENCRYPTION", "RECOMPILE", "SCHEMABINDING", "NATIVE_COMPILATION", "EXECUTE"), tuple() 625 ) 626 627 COLUMN_DEFINITION_MODES = {"OUT", "OUTPUT", "READONLY"} 628 629 RETURNS_TABLE_TOKENS = parser.Parser.ID_VAR_TOKENS - { 630 TokenType.TABLE, 631 *parser.Parser.TYPE_TOKENS, 632 } 633 634 STATEMENT_PARSERS = { 635 **parser.Parser.STATEMENT_PARSERS, 636 TokenType.DECLARE: lambda self: self._parse_declare(), 637 } 638 639 RANGE_PARSERS = { 640 **parser.Parser.RANGE_PARSERS, 641 TokenType.DCOLON: lambda self, this: self.expression( 642 exp.ScopeResolution, 643 this=this, 644 expression=self._parse_function() or self._parse_var(any_token=True), 645 ), 646 } 647 648 NO_PAREN_FUNCTION_PARSERS = { 649 **parser.Parser.NO_PAREN_FUNCTION_PARSERS, 650 "NEXT": lambda self: self._parse_next_value_for(), 651 } 652 653 # The DCOLON (::) operator serves as a scope resolution (exp.ScopeResolution) operator in T-SQL 654 COLUMN_OPERATORS = { 655 **parser.Parser.COLUMN_OPERATORS, 656 TokenType.DCOLON: lambda self, this, to: self.expression(exp.Cast, this=this, to=to) 657 if isinstance(to, exp.DataType) and to.this != exp.DataType.Type.USERDEFINED 658 else self.expression(exp.ScopeResolution, this=this, expression=to), 659 } 660 661 SET_OP_MODIFIERS = {"offset"} 662 663 def _parse_alter_table_set(self) -> exp.AlterSet: 664 return self._parse_wrapped(super()._parse_alter_table_set) 665 666 def _parse_wrapped_select(self, table: bool = False) -> t.Optional[exp.Expression]: 667 if self._match(TokenType.MERGE): 668 comments = self._prev_comments 669 merge = self._parse_merge() 670 merge.add_comments(comments, prepend=True) 671 return merge 672 673 return super()._parse_wrapped_select(table=table) 674 675 def _parse_dcolon(self) -> t.Optional[exp.Expression]: 676 # We want to use _parse_types() if the first token after :: is a known type, 677 # otherwise we could parse something like x::varchar(max) into a function 678 if self._match_set(self.TYPE_TOKENS, advance=False): 679 return self._parse_types() 680 681 return self._parse_function() or self._parse_types() 682 683 def _parse_options(self) -> t.Optional[t.List[exp.Expression]]: 684 if not self._match(TokenType.OPTION): 685 return None 686 687 def _parse_option() -> t.Optional[exp.Expression]: 688 option = self._parse_var_from_options(OPTIONS) 689 if not option: 690 return None 691 692 self._match(TokenType.EQ) 693 return self.expression( 694 exp.QueryOption, this=option, expression=self._parse_primary_or_var() 695 ) 696 697 return self._parse_wrapped_csv(_parse_option) 698 699 def _parse_xml_key_value_option(self) -> exp.XMLKeyValueOption: 700 this = self._parse_primary_or_var() 701 if self._match(TokenType.L_PAREN, advance=False): 702 expression = self._parse_wrapped(self._parse_string) 703 else: 704 expression = None 705 706 return exp.XMLKeyValueOption(this=this, expression=expression) 707 708 def _parse_for(self) -> t.Optional[t.List[exp.Expression]]: 709 if not self._match_pair(TokenType.FOR, TokenType.XML): 710 return None 711 712 def _parse_for_xml() -> t.Optional[exp.Expression]: 713 return self.expression( 714 exp.QueryOption, 715 this=self._parse_var_from_options(XML_OPTIONS, raise_unmatched=False) 716 or self._parse_xml_key_value_option(), 717 ) 718 719 return self._parse_csv(_parse_for_xml) 720 721 def _parse_projections(self) -> t.List[exp.Expression]: 722 """ 723 T-SQL supports the syntax alias = expression in the SELECT's projection list, 724 so we transform all parsed Selects to convert their EQ projections into Aliases. 725 726 See: https://learn.microsoft.com/en-us/sql/t-sql/queries/select-clause-transact-sql?view=sql-server-ver16#syntax 727 """ 728 return [ 729 ( 730 exp.alias_(projection.expression, projection.this.this, copy=False) 731 if isinstance(projection, exp.EQ) and isinstance(projection.this, exp.Column) 732 else projection 733 ) 734 for projection in super()._parse_projections() 735 ] 736 737 def _parse_commit_or_rollback(self) -> exp.Commit | exp.Rollback: 738 """Applies to SQL Server and Azure SQL Database 739 COMMIT [ { TRAN | TRANSACTION } 740 [ transaction_name | @tran_name_variable ] ] 741 [ WITH ( DELAYED_DURABILITY = { OFF | ON } ) ] 742 743 ROLLBACK { TRAN | TRANSACTION } 744 [ transaction_name | @tran_name_variable 745 | savepoint_name | @savepoint_variable ] 746 """ 747 rollback = self._prev.token_type == TokenType.ROLLBACK 748 749 self._match_texts(("TRAN", "TRANSACTION")) 750 this = self._parse_id_var() 751 752 if rollback: 753 return self.expression(exp.Rollback, this=this) 754 755 durability = None 756 if self._match_pair(TokenType.WITH, TokenType.L_PAREN): 757 self._match_text_seq("DELAYED_DURABILITY") 758 self._match(TokenType.EQ) 759 760 if self._match_text_seq("OFF"): 761 durability = False 762 else: 763 self._match(TokenType.ON) 764 durability = True 765 766 self._match_r_paren() 767 768 return self.expression(exp.Commit, this=this, durability=durability) 769 770 def _parse_transaction(self) -> exp.Transaction | exp.Command: 771 """Applies to SQL Server and Azure SQL Database 772 BEGIN { TRAN | TRANSACTION } 773 [ { transaction_name | @tran_name_variable } 774 [ WITH MARK [ 'description' ] ] 775 ] 776 """ 777 if self._match_texts(("TRAN", "TRANSACTION")): 778 transaction = self.expression(exp.Transaction, this=self._parse_id_var()) 779 if self._match_text_seq("WITH", "MARK"): 780 transaction.set("mark", self._parse_string()) 781 782 return transaction 783 784 return self._parse_as_command(self._prev) 785 786 def _parse_returns(self) -> exp.ReturnsProperty: 787 table = self._parse_id_var(any_token=False, tokens=self.RETURNS_TABLE_TOKENS) 788 returns = super()._parse_returns() 789 returns.set("table", table) 790 return returns 791 792 def _parse_convert( 793 self, strict: bool, safe: t.Optional[bool] = None 794 ) -> t.Optional[exp.Expression]: 795 this = self._parse_types() 796 self._match(TokenType.COMMA) 797 args = [this, *self._parse_csv(self._parse_assignment)] 798 convert = exp.Convert.from_arg_list(args) 799 convert.set("safe", safe) 800 convert.set("strict", strict) 801 return convert 802 803 def _parse_column_def( 804 self, this: t.Optional[exp.Expression], computed_column: bool = True 805 ) -> t.Optional[exp.Expression]: 806 this = super()._parse_column_def(this=this, computed_column=computed_column) 807 if not this: 808 return None 809 if self._match(TokenType.EQ): 810 this.set("default", self._parse_disjunction()) 811 if self._match_texts(self.COLUMN_DEFINITION_MODES): 812 this.set("output", self._prev.text) 813 return this 814 815 def _parse_user_defined_function( 816 self, kind: t.Optional[TokenType] = None 817 ) -> t.Optional[exp.Expression]: 818 this = super()._parse_user_defined_function(kind=kind) 819 820 if ( 821 kind == TokenType.FUNCTION 822 or isinstance(this, exp.UserDefinedFunction) 823 or self._match(TokenType.ALIAS, advance=False) 824 ): 825 return this 826 827 if not self._match(TokenType.WITH, advance=False): 828 expressions = self._parse_csv(self._parse_function_parameter) 829 else: 830 expressions = None 831 832 return self.expression(exp.UserDefinedFunction, this=this, expressions=expressions) 833 834 def _parse_into(self) -> t.Optional[exp.Into]: 835 into = super()._parse_into() 836 837 table = isinstance(into, exp.Into) and into.find(exp.Table) 838 if isinstance(table, exp.Table): 839 table_identifier = table.this 840 if table_identifier.args.get("temporary"): 841 # Promote the temporary property from the Identifier to the Into expression 842 t.cast(exp.Into, into).set("temporary", True) 843 844 return into 845 846 def _parse_id_var( 847 self, 848 any_token: bool = True, 849 tokens: t.Optional[t.Collection[TokenType]] = None, 850 ) -> t.Optional[exp.Expression]: 851 is_temporary = self._match(TokenType.HASH) 852 is_global = is_temporary and self._match(TokenType.HASH) 853 854 this = super()._parse_id_var(any_token=any_token, tokens=tokens) 855 if this: 856 if is_global: 857 this.set("global", True) 858 elif is_temporary: 859 this.set("temporary", True) 860 861 return this 862 863 def _parse_create(self) -> exp.Create | exp.Command: 864 create = super()._parse_create() 865 866 if isinstance(create, exp.Create): 867 table = create.this.this if isinstance(create.this, exp.Schema) else create.this 868 if isinstance(table, exp.Table) and table.this and table.this.args.get("temporary"): 869 if not create.args.get("properties"): 870 create.set("properties", exp.Properties(expressions=[])) 871 872 create.args["properties"].append("expressions", exp.TemporaryProperty()) 873 874 return create 875 876 def _parse_if(self) -> t.Optional[exp.Expression]: 877 index = self._index 878 879 if self._match_text_seq("OBJECT_ID"): 880 self._parse_wrapped_csv(self._parse_string) 881 if self._match_text_seq("IS", "NOT", "NULL") and self._match(TokenType.DROP): 882 return self._parse_drop(exists=True) 883 self._retreat(index) 884 885 return super()._parse_if() 886 887 def _parse_unique(self) -> exp.UniqueColumnConstraint: 888 if self._match_texts(("CLUSTERED", "NONCLUSTERED")): 889 this = self.CONSTRAINT_PARSERS[self._prev.text.upper()](self) 890 else: 891 this = self._parse_schema(self._parse_id_var(any_token=False)) 892 893 return self.expression(exp.UniqueColumnConstraint, this=this) 894 895 def _parse_partition(self) -> t.Optional[exp.Partition]: 896 if not self._match_text_seq("WITH", "(", "PARTITIONS"): 897 return None 898 899 def parse_range(): 900 low = self._parse_bitwise() 901 high = self._parse_bitwise() if self._match_text_seq("TO") else None 902 903 return ( 904 self.expression(exp.PartitionRange, this=low, expression=high) if high else low 905 ) 906 907 partition = self.expression( 908 exp.Partition, expressions=self._parse_wrapped_csv(parse_range) 909 ) 910 911 self._match_r_paren() 912 913 return partition 914 915 def _parse_declareitem(self) -> t.Optional[exp.DeclareItem]: 916 var = self._parse_id_var() 917 if not var: 918 return None 919 920 self._match(TokenType.ALIAS) 921 return self.expression( 922 exp.DeclareItem, 923 this=var, 924 kind=self._parse_schema() if self._match(TokenType.TABLE) else self._parse_types(), 925 default=self._match(TokenType.EQ) and self._parse_bitwise(), 926 ) 927 928 def _parse_alter_table_alter(self) -> t.Optional[exp.Expression]: 929 expression = super()._parse_alter_table_alter() 930 931 if expression is not None: 932 collation = expression.args.get("collate") 933 if isinstance(collation, exp.Column) and isinstance(collation.this, exp.Identifier): 934 identifier = collation.this 935 collation.set("this", exp.Var(this=identifier.name)) 936 937 return expression 938 939 class Generator(generator.Generator): 940 LIMIT_IS_TOP = True 941 QUERY_HINTS = False 942 RETURNING_END = False 943 NVL2_SUPPORTED = False 944 ALTER_TABLE_INCLUDE_COLUMN_KEYWORD = False 945 LIMIT_FETCH = "FETCH" 946 COMPUTED_COLUMN_WITH_TYPE = False 947 CTE_RECURSIVE_KEYWORD_REQUIRED = False 948 ENSURE_BOOLS = True 949 NULL_ORDERING_SUPPORTED = None 950 SUPPORTS_SINGLE_ARG_CONCAT = False 951 TABLESAMPLE_SEED_KEYWORD = "REPEATABLE" 952 SUPPORTS_SELECT_INTO = True 953 JSON_PATH_BRACKETED_KEY_SUPPORTED = False 954 SUPPORTS_TO_NUMBER = False 955 SET_OP_MODIFIERS = False 956 COPY_PARAMS_EQ_REQUIRED = True 957 PARSE_JSON_NAME = None 958 EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE = False 959 ALTER_SET_WRAPPED = True 960 ALTER_SET_TYPE = "" 961 962 EXPRESSIONS_WITHOUT_NESTED_CTES = { 963 exp.Create, 964 exp.Delete, 965 exp.Insert, 966 exp.Intersect, 967 exp.Except, 968 exp.Merge, 969 exp.Select, 970 exp.Subquery, 971 exp.Union, 972 exp.Update, 973 } 974 975 SUPPORTED_JSON_PATH_PARTS = { 976 exp.JSONPathKey, 977 exp.JSONPathRoot, 978 exp.JSONPathSubscript, 979 } 980 981 TYPE_MAPPING = { 982 **generator.Generator.TYPE_MAPPING, 983 exp.DataType.Type.BOOLEAN: "BIT", 984 exp.DataType.Type.DATETIME2: "DATETIME2", 985 exp.DataType.Type.DECIMAL: "NUMERIC", 986 exp.DataType.Type.DOUBLE: "FLOAT", 987 exp.DataType.Type.INT: "INTEGER", 988 exp.DataType.Type.ROWVERSION: "ROWVERSION", 989 exp.DataType.Type.TEXT: "VARCHAR(MAX)", 990 exp.DataType.Type.TIMESTAMP: "DATETIME2", 991 exp.DataType.Type.TIMESTAMPNTZ: "DATETIME2", 992 exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET", 993 exp.DataType.Type.SMALLDATETIME: "SMALLDATETIME", 994 exp.DataType.Type.UTINYINT: "TINYINT", 995 exp.DataType.Type.VARIANT: "SQL_VARIANT", 996 exp.DataType.Type.UUID: "UNIQUEIDENTIFIER", 997 } 998 999 TYPE_MAPPING.pop(exp.DataType.Type.NCHAR) 1000 TYPE_MAPPING.pop(exp.DataType.Type.NVARCHAR) 1001 1002 TRANSFORMS = { 1003 **generator.Generator.TRANSFORMS, 1004 exp.AnyValue: any_value_to_max_sql, 1005 exp.ArrayToString: rename_func("STRING_AGG"), 1006 exp.AutoIncrementColumnConstraint: lambda *_: "IDENTITY", 1007 exp.Chr: rename_func("CHAR"), 1008 exp.DateAdd: date_delta_sql("DATEADD"), 1009 exp.DateDiff: date_delta_sql("DATEDIFF"), 1010 exp.CTE: transforms.preprocess([qualify_derived_table_outputs]), 1011 exp.CurrentDate: rename_func("GETDATE"), 1012 exp.CurrentTimestamp: rename_func("GETDATE"), 1013 exp.CurrentTimestampLTZ: rename_func("SYSDATETIMEOFFSET"), 1014 exp.DateStrToDate: datestrtodate_sql, 1015 exp.Extract: rename_func("DATEPART"), 1016 exp.GeneratedAsIdentityColumnConstraint: generatedasidentitycolumnconstraint_sql, 1017 exp.GroupConcat: _string_agg_sql, 1018 exp.If: rename_func("IIF"), 1019 exp.JSONExtract: _json_extract_sql, 1020 exp.JSONExtractScalar: _json_extract_sql, 1021 exp.LastDay: lambda self, e: self.func("EOMONTH", e.this), 1022 exp.Ln: rename_func("LOG"), 1023 exp.Max: max_or_greatest, 1024 exp.MD5: lambda self, e: self.func("HASHBYTES", exp.Literal.string("MD5"), e.this), 1025 exp.Min: min_or_least, 1026 exp.NumberToStr: _format_sql, 1027 exp.Repeat: rename_func("REPLICATE"), 1028 exp.CurrentSchema: rename_func("SCHEMA_NAME"), 1029 exp.Select: transforms.preprocess( 1030 [ 1031 transforms.eliminate_distinct_on, 1032 transforms.eliminate_semi_and_anti_joins, 1033 transforms.eliminate_qualify, 1034 transforms.unnest_generate_date_array_using_recursive_cte, 1035 ] 1036 ), 1037 exp.Stddev: rename_func("STDEV"), 1038 exp.StrPosition: lambda self, e: strposition_sql( 1039 self, e, func_name="CHARINDEX", supports_position=True 1040 ), 1041 exp.Subquery: transforms.preprocess([qualify_derived_table_outputs]), 1042 exp.SHA: lambda self, e: self.func("HASHBYTES", exp.Literal.string("SHA1"), e.this), 1043 exp.SHA2: lambda self, e: self.func( 1044 "HASHBYTES", exp.Literal.string(f"SHA2_{e.args.get('length', 256)}"), e.this 1045 ), 1046 exp.TemporaryProperty: lambda self, e: "", 1047 exp.TimeStrToTime: _timestrtotime_sql, 1048 exp.TimeToStr: _format_sql, 1049 exp.Trim: trim_sql, 1050 exp.TsOrDsAdd: date_delta_sql("DATEADD", cast=True), 1051 exp.TsOrDsDiff: date_delta_sql("DATEDIFF"), 1052 exp.TimestampTrunc: lambda self, e: self.func("DATETRUNC", e.unit, e.this), 1053 exp.Uuid: lambda *_: "NEWID()", 1054 exp.DateFromParts: rename_func("DATEFROMPARTS"), 1055 } 1056 1057 TRANSFORMS.pop(exp.ReturnsProperty) 1058 1059 PROPERTIES_LOCATION = { 1060 **generator.Generator.PROPERTIES_LOCATION, 1061 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 1062 } 1063 1064 def scope_resolution(self, rhs: str, scope_name: str) -> str: 1065 return f"{scope_name}::{rhs}" 1066 1067 def select_sql(self, expression: exp.Select) -> str: 1068 limit = expression.args.get("limit") 1069 offset = expression.args.get("offset") 1070 1071 if isinstance(limit, exp.Fetch) and not offset: 1072 # Dialects like Oracle can FETCH directly from a row set but 1073 # T-SQL requires an ORDER BY + OFFSET clause in order to FETCH 1074 offset = exp.Offset(expression=exp.Literal.number(0)) 1075 expression.set("offset", offset) 1076 1077 if offset: 1078 if not expression.args.get("order"): 1079 # ORDER BY is required in order to use OFFSET in a query, so we use 1080 # a noop order by, since we don't really care about the order. 1081 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 1082 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 1083 1084 if isinstance(limit, exp.Limit): 1085 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 1086 # we replace here because otherwise TOP would be generated in select_sql 1087 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 1088 1089 return super().select_sql(expression) 1090 1091 def convert_sql(self, expression: exp.Convert) -> str: 1092 name = "TRY_CONVERT" if expression.args.get("safe") else "CONVERT" 1093 return self.func( 1094 name, expression.this, expression.expression, expression.args.get("style") 1095 ) 1096 1097 def queryoption_sql(self, expression: exp.QueryOption) -> str: 1098 option = self.sql(expression, "this") 1099 value = self.sql(expression, "expression") 1100 if value: 1101 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 1102 return f"{option} {optional_equal_sign}{value}" 1103 return option 1104 1105 def lateral_op(self, expression: exp.Lateral) -> str: 1106 cross_apply = expression.args.get("cross_apply") 1107 if cross_apply is True: 1108 return "CROSS APPLY" 1109 if cross_apply is False: 1110 return "OUTER APPLY" 1111 1112 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 1113 self.unsupported("LATERAL clause is not supported.") 1114 return "LATERAL" 1115 1116 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 1117 this = expression.this 1118 split_count = len(this.name.split(".")) 1119 delimiter = expression.args.get("delimiter") 1120 part_index = expression.args.get("part_index") 1121 1122 if ( 1123 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 1124 or (delimiter and delimiter.name != ".") 1125 or not part_index 1126 or split_count > 4 1127 ): 1128 self.unsupported( 1129 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 1130 ) 1131 return "" 1132 1133 return self.func( 1134 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 1135 ) 1136 1137 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1138 nano = expression.args.get("nano") 1139 if nano is not None: 1140 nano.pop() 1141 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1142 1143 if expression.args.get("fractions") is None: 1144 expression.set("fractions", exp.Literal.number(0)) 1145 if expression.args.get("precision") is None: 1146 expression.set("precision", exp.Literal.number(0)) 1147 1148 return rename_func("TIMEFROMPARTS")(self, expression) 1149 1150 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1151 zone = expression.args.get("zone") 1152 if zone is not None: 1153 zone.pop() 1154 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1155 1156 nano = expression.args.get("nano") 1157 if nano is not None: 1158 nano.pop() 1159 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1160 1161 if expression.args.get("milli") is None: 1162 expression.set("milli", exp.Literal.number(0)) 1163 1164 return rename_func("DATETIMEFROMPARTS")(self, expression) 1165 1166 def setitem_sql(self, expression: exp.SetItem) -> str: 1167 this = expression.this 1168 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1169 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1170 return f"{self.sql(this.left)} {self.sql(this.right)}" 1171 1172 return super().setitem_sql(expression) 1173 1174 def boolean_sql(self, expression: exp.Boolean) -> str: 1175 if type(expression.parent) in BIT_TYPES or isinstance( 1176 expression.find_ancestor(exp.Values, exp.Select), exp.Values 1177 ): 1178 return "1" if expression.this else "0" 1179 1180 return "(1 = 1)" if expression.this else "(1 = 0)" 1181 1182 def is_sql(self, expression: exp.Is) -> str: 1183 if isinstance(expression.expression, exp.Boolean): 1184 return self.binary(expression, "=") 1185 return self.binary(expression, "IS") 1186 1187 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1188 sql = self.sql(expression, "this") 1189 properties = expression.args.get("properties") 1190 1191 if sql[:1] != "#" and any( 1192 isinstance(prop, exp.TemporaryProperty) 1193 for prop in (properties.expressions if properties else []) 1194 ): 1195 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1196 1197 return sql 1198 1199 def create_sql(self, expression: exp.Create) -> str: 1200 kind = expression.kind 1201 exists = expression.args.pop("exists", None) 1202 1203 like_property = expression.find(exp.LikeProperty) 1204 if like_property: 1205 ctas_expression = like_property.this 1206 else: 1207 ctas_expression = expression.expression 1208 1209 if kind == "VIEW": 1210 expression.this.set("catalog", None) 1211 with_ = expression.args.get("with") 1212 if ctas_expression and with_: 1213 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1214 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1215 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1216 ctas_expression.set("with", with_.pop()) 1217 1218 table = expression.find(exp.Table) 1219 1220 # Convert CTAS statement to SELECT .. INTO .. 1221 if kind == "TABLE" and ctas_expression: 1222 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1223 ctas_expression = ctas_expression.subquery() 1224 1225 properties = expression.args.get("properties") or exp.Properties() 1226 is_temp = any(isinstance(p, exp.TemporaryProperty) for p in properties.expressions) 1227 1228 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1229 select_into.set("into", exp.Into(this=table, temporary=is_temp)) 1230 1231 if like_property: 1232 select_into.limit(0, copy=False) 1233 1234 sql = self.sql(select_into) 1235 else: 1236 sql = super().create_sql(expression) 1237 1238 if exists: 1239 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1240 sql_with_ctes = self.prepend_ctes(expression, sql) 1241 sql_literal = self.sql(exp.Literal.string(sql_with_ctes)) 1242 if kind == "SCHEMA": 1243 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = {identifier}) EXEC({sql_literal})""" 1244 elif kind == "TABLE": 1245 assert table 1246 where = exp.and_( 1247 exp.column("TABLE_NAME").eq(table.name), 1248 exp.column("TABLE_SCHEMA").eq(table.db) if table.db else None, 1249 exp.column("TABLE_CATALOG").eq(table.catalog) if table.catalog else None, 1250 ) 1251 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE {where}) EXEC({sql_literal})""" 1252 elif kind == "INDEX": 1253 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1254 return f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql_literal})""" 1255 elif expression.args.get("replace"): 1256 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1257 1258 return self.prepend_ctes(expression, sql) 1259 1260 @generator.unsupported_args("unlogged", "expressions") 1261 def into_sql(self, expression: exp.Into) -> str: 1262 if expression.args.get("temporary"): 1263 # If the Into expression has a temporary property, push this down to the Identifier 1264 table = expression.find(exp.Table) 1265 if table and isinstance(table.this, exp.Identifier): 1266 table.this.set("temporary", True) 1267 1268 return f"{self.seg('INTO')} {self.sql(expression, 'this')}" 1269 1270 def count_sql(self, expression: exp.Count) -> str: 1271 func_name = "COUNT_BIG" if expression.args.get("big_int") else "COUNT" 1272 return rename_func(func_name)(self, expression) 1273 1274 def offset_sql(self, expression: exp.Offset) -> str: 1275 return f"{super().offset_sql(expression)} ROWS" 1276 1277 def version_sql(self, expression: exp.Version) -> str: 1278 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1279 this = f"FOR {name}" 1280 expr = expression.expression 1281 kind = expression.text("kind") 1282 if kind in ("FROM", "BETWEEN"): 1283 args = expr.expressions 1284 sep = "TO" if kind == "FROM" else "AND" 1285 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1286 else: 1287 expr_sql = self.sql(expr) 1288 1289 expr_sql = f" {expr_sql}" if expr_sql else "" 1290 return f"{this} {kind}{expr_sql}" 1291 1292 def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str: 1293 table = expression.args.get("table") 1294 table = f"{table} " if table else "" 1295 return f"RETURNS {table}{self.sql(expression, 'this')}" 1296 1297 def returning_sql(self, expression: exp.Returning) -> str: 1298 into = self.sql(expression, "into") 1299 into = self.seg(f"INTO {into}") if into else "" 1300 return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}" 1301 1302 def transaction_sql(self, expression: exp.Transaction) -> str: 1303 this = self.sql(expression, "this") 1304 this = f" {this}" if this else "" 1305 mark = self.sql(expression, "mark") 1306 mark = f" WITH MARK {mark}" if mark else "" 1307 return f"BEGIN TRANSACTION{this}{mark}" 1308 1309 def commit_sql(self, expression: exp.Commit) -> str: 1310 this = self.sql(expression, "this") 1311 this = f" {this}" if this else "" 1312 durability = expression.args.get("durability") 1313 durability = ( 1314 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1315 if durability is not None 1316 else "" 1317 ) 1318 return f"COMMIT TRANSACTION{this}{durability}" 1319 1320 def rollback_sql(self, expression: exp.Rollback) -> str: 1321 this = self.sql(expression, "this") 1322 this = f" {this}" if this else "" 1323 return f"ROLLBACK TRANSACTION{this}" 1324 1325 def identifier_sql(self, expression: exp.Identifier) -> str: 1326 identifier = super().identifier_sql(expression) 1327 1328 if expression.args.get("global"): 1329 identifier = f"##{identifier}" 1330 elif expression.args.get("temporary"): 1331 identifier = f"#{identifier}" 1332 1333 return identifier 1334 1335 def constraint_sql(self, expression: exp.Constraint) -> str: 1336 this = self.sql(expression, "this") 1337 expressions = self.expressions(expression, flat=True, sep=" ") 1338 return f"CONSTRAINT {this} {expressions}" 1339 1340 def length_sql(self, expression: exp.Length) -> str: 1341 return self._uncast_text(expression, "LEN") 1342 1343 def right_sql(self, expression: exp.Right) -> str: 1344 return self._uncast_text(expression, "RIGHT") 1345 1346 def left_sql(self, expression: exp.Left) -> str: 1347 return self._uncast_text(expression, "LEFT") 1348 1349 def _uncast_text(self, expression: exp.Expression, name: str) -> str: 1350 this = expression.this 1351 if isinstance(this, exp.Cast) and this.is_type(exp.DataType.Type.TEXT): 1352 this_sql = self.sql(this, "this") 1353 else: 1354 this_sql = self.sql(this) 1355 expression_sql = self.sql(expression, "expression") 1356 return self.func(name, this_sql, expression_sql if expression_sql else None) 1357 1358 def partition_sql(self, expression: exp.Partition) -> str: 1359 return f"WITH (PARTITIONS({self.expressions(expression, flat=True)}))" 1360 1361 def alter_sql(self, expression: exp.Alter) -> str: 1362 action = seq_get(expression.args.get("actions") or [], 0) 1363 if isinstance(action, exp.AlterRename): 1364 return f"EXEC sp_rename '{self.sql(expression.this)}', '{action.this.name}'" 1365 return super().alter_sql(expression) 1366 1367 def drop_sql(self, expression: exp.Drop) -> str: 1368 if expression.args["kind"] == "VIEW": 1369 expression.this.set("catalog", None) 1370 return super().drop_sql(expression) 1371 1372 def options_modifier(self, expression: exp.Expression) -> str: 1373 options = self.expressions(expression, key="options") 1374 return f" OPTION{self.wrap(options)}" if options else "" 1375 1376 def dpipe_sql(self, expression: exp.DPipe) -> str: 1377 return self.sql( 1378 reduce(lambda x, y: exp.Add(this=x, expression=y), expression.flatten()) 1379 ) 1380 1381 def isascii_sql(self, expression: exp.IsAscii) -> str: 1382 return f"(PATINDEX(CONVERT(VARCHAR(MAX), 0x255b5e002d7f5d25) COLLATE Latin1_General_BIN, {self.sql(expression.this)}) = 0)" 1383 1384 def columndef_sql(self, expression: exp.ColumnDef, sep: str = " ") -> str: 1385 this = super().columndef_sql(expression, sep) 1386 default = self.sql(expression, "default") 1387 default = f" = {default}" if default else "" 1388 output = self.sql(expression, "output") 1389 output = f" {output}" if output else "" 1390 return f"{this}{default}{output}" 1391 1392 def coalesce_sql(self, expression: exp.Coalesce) -> str: 1393 func_name = "ISNULL" if expression.args.get("is_null") else "COALESCE" 1394 return rename_func(func_name)(self, expression)
274def qualify_derived_table_outputs(expression: exp.Expression) -> exp.Expression: 275 """Ensures all (unnamed) output columns are aliased for CTEs and Subqueries.""" 276 alias = expression.args.get("alias") 277 278 if ( 279 isinstance(expression, (exp.CTE, exp.Subquery)) 280 and isinstance(alias, exp.TableAlias) 281 and not alias.columns 282 ): 283 from sqlglot.optimizer.qualify_columns import qualify_outputs 284 285 # We keep track of the unaliased column projection indexes instead of the expressions 286 # themselves, because the latter are going to be replaced by new nodes when the aliases 287 # are added and hence we won't be able to reach these newly added Alias parents 288 query = expression.this 289 unaliased_column_indexes = ( 290 i for i, c in enumerate(query.selects) if isinstance(c, exp.Column) and not c.alias 291 ) 292 293 qualify_outputs(query) 294 295 # Preserve the quoting information of columns for newly added Alias nodes 296 query_selects = query.selects 297 for select_index in unaliased_column_indexes: 298 alias = query_selects[select_index] 299 column = alias.this 300 if isinstance(column.this, exp.Identifier): 301 alias.args["alias"].set("quoted", column.this.quoted) 302 303 return expression
Ensures all (unnamed) output columns are aliased for CTEs and Subqueries.
406class TSQL(Dialect): 407 SUPPORTS_SEMI_ANTI_JOIN = False 408 LOG_BASE_FIRST = False 409 TYPED_DIVISION = True 410 CONCAT_COALESCE = True 411 NORMALIZATION_STRATEGY = NormalizationStrategy.CASE_INSENSITIVE 412 ALTER_TABLE_ADD_REQUIRED_FOR_EACH_COLUMN = False 413 414 TIME_FORMAT = "'yyyy-mm-dd hh:mm:ss'" 415 416 TIME_MAPPING = { 417 "year": "%Y", 418 "dayofyear": "%j", 419 "day": "%d", 420 "dy": "%d", 421 "y": "%Y", 422 "week": "%W", 423 "ww": "%W", 424 "wk": "%W", 425 "hour": "%h", 426 "hh": "%I", 427 "minute": "%M", 428 "mi": "%M", 429 "n": "%M", 430 "second": "%S", 431 "ss": "%S", 432 "s": "%-S", 433 "millisecond": "%f", 434 "ms": "%f", 435 "weekday": "%w", 436 "dw": "%w", 437 "month": "%m", 438 "mm": "%M", 439 "m": "%-M", 440 "Y": "%Y", 441 "YYYY": "%Y", 442 "YY": "%y", 443 "MMMM": "%B", 444 "MMM": "%b", 445 "MM": "%m", 446 "M": "%-m", 447 "dddd": "%A", 448 "dd": "%d", 449 "d": "%-d", 450 "HH": "%H", 451 "H": "%-H", 452 "h": "%-I", 453 "ffffff": "%f", 454 "yyyy": "%Y", 455 "yy": "%y", 456 } 457 458 CONVERT_FORMAT_MAPPING = { 459 "0": "%b %d %Y %-I:%M%p", 460 "1": "%m/%d/%y", 461 "2": "%y.%m.%d", 462 "3": "%d/%m/%y", 463 "4": "%d.%m.%y", 464 "5": "%d-%m-%y", 465 "6": "%d %b %y", 466 "7": "%b %d, %y", 467 "8": "%H:%M:%S", 468 "9": "%b %d %Y %-I:%M:%S:%f%p", 469 "10": "mm-dd-yy", 470 "11": "yy/mm/dd", 471 "12": "yymmdd", 472 "13": "%d %b %Y %H:%M:ss:%f", 473 "14": "%H:%M:%S:%f", 474 "20": "%Y-%m-%d %H:%M:%S", 475 "21": "%Y-%m-%d %H:%M:%S.%f", 476 "22": "%m/%d/%y %-I:%M:%S %p", 477 "23": "%Y-%m-%d", 478 "24": "%H:%M:%S", 479 "25": "%Y-%m-%d %H:%M:%S.%f", 480 "100": "%b %d %Y %-I:%M%p", 481 "101": "%m/%d/%Y", 482 "102": "%Y.%m.%d", 483 "103": "%d/%m/%Y", 484 "104": "%d.%m.%Y", 485 "105": "%d-%m-%Y", 486 "106": "%d %b %Y", 487 "107": "%b %d, %Y", 488 "108": "%H:%M:%S", 489 "109": "%b %d %Y %-I:%M:%S:%f%p", 490 "110": "%m-%d-%Y", 491 "111": "%Y/%m/%d", 492 "112": "%Y%m%d", 493 "113": "%d %b %Y %H:%M:%S:%f", 494 "114": "%H:%M:%S:%f", 495 "120": "%Y-%m-%d %H:%M:%S", 496 "121": "%Y-%m-%d %H:%M:%S.%f", 497 "126": "%Y-%m-%dT%H:%M:%S.%f", 498 } 499 500 FORMAT_TIME_MAPPING = { 501 "y": "%B %Y", 502 "d": "%m/%d/%Y", 503 "H": "%-H", 504 "h": "%-I", 505 "s": "%Y-%m-%d %H:%M:%S", 506 "D": "%A,%B,%Y", 507 "f": "%A,%B,%Y %-I:%M %p", 508 "F": "%A,%B,%Y %-I:%M:%S %p", 509 "g": "%m/%d/%Y %-I:%M %p", 510 "G": "%m/%d/%Y %-I:%M:%S %p", 511 "M": "%B %-d", 512 "m": "%B %-d", 513 "O": "%Y-%m-%dT%H:%M:%S", 514 "u": "%Y-%M-%D %H:%M:%S%z", 515 "U": "%A, %B %D, %Y %H:%M:%S%z", 516 "T": "%-I:%M:%S %p", 517 "t": "%-I:%M", 518 "Y": "%a %Y", 519 } 520 521 class Tokenizer(tokens.Tokenizer): 522 IDENTIFIERS = [("[", "]"), '"'] 523 QUOTES = ["'", '"'] 524 HEX_STRINGS = [("0x", ""), ("0X", "")] 525 VAR_SINGLE_TOKENS = {"@", "$", "#"} 526 527 KEYWORDS = { 528 **tokens.Tokenizer.KEYWORDS, 529 "CLUSTERED INDEX": TokenType.INDEX, 530 "DATETIME2": TokenType.DATETIME2, 531 "DATETIMEOFFSET": TokenType.TIMESTAMPTZ, 532 "DECLARE": TokenType.DECLARE, 533 "EXEC": TokenType.COMMAND, 534 "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT, 535 "GO": TokenType.COMMAND, 536 "IMAGE": TokenType.IMAGE, 537 "MONEY": TokenType.MONEY, 538 "NONCLUSTERED INDEX": TokenType.INDEX, 539 "NTEXT": TokenType.TEXT, 540 "OPTION": TokenType.OPTION, 541 "OUTPUT": TokenType.RETURNING, 542 "PRINT": TokenType.COMMAND, 543 "PROC": TokenType.PROCEDURE, 544 "REAL": TokenType.FLOAT, 545 "ROWVERSION": TokenType.ROWVERSION, 546 "SMALLDATETIME": TokenType.SMALLDATETIME, 547 "SMALLMONEY": TokenType.SMALLMONEY, 548 "SQL_VARIANT": TokenType.VARIANT, 549 "SYSTEM_USER": TokenType.CURRENT_USER, 550 "TOP": TokenType.TOP, 551 "TIMESTAMP": TokenType.ROWVERSION, 552 "TINYINT": TokenType.UTINYINT, 553 "UNIQUEIDENTIFIER": TokenType.UUID, 554 "UPDATE STATISTICS": TokenType.COMMAND, 555 "XML": TokenType.XML, 556 } 557 KEYWORDS.pop("/*+") 558 559 COMMANDS = {*tokens.Tokenizer.COMMANDS, TokenType.END} 560 561 class Parser(parser.Parser): 562 SET_REQUIRES_ASSIGNMENT_DELIMITER = False 563 LOG_DEFAULTS_TO_LN = True 564 STRING_ALIASES = True 565 NO_PAREN_IF_COMMANDS = False 566 567 QUERY_MODIFIER_PARSERS = { 568 **parser.Parser.QUERY_MODIFIER_PARSERS, 569 TokenType.OPTION: lambda self: ("options", self._parse_options()), 570 TokenType.FOR: lambda self: ("for", self._parse_for()), 571 } 572 573 # T-SQL does not allow BEGIN to be used as an identifier 574 ID_VAR_TOKENS = parser.Parser.ID_VAR_TOKENS - {TokenType.BEGIN} 575 ALIAS_TOKENS = parser.Parser.ALIAS_TOKENS - {TokenType.BEGIN} 576 TABLE_ALIAS_TOKENS = parser.Parser.TABLE_ALIAS_TOKENS - {TokenType.BEGIN} 577 COMMENT_TABLE_ALIAS_TOKENS = parser.Parser.COMMENT_TABLE_ALIAS_TOKENS - {TokenType.BEGIN} 578 UPDATE_ALIAS_TOKENS = parser.Parser.UPDATE_ALIAS_TOKENS - {TokenType.BEGIN} 579 580 FUNCTIONS = { 581 **parser.Parser.FUNCTIONS, 582 "CHARINDEX": lambda args: exp.StrPosition( 583 this=seq_get(args, 1), 584 substr=seq_get(args, 0), 585 position=seq_get(args, 2), 586 ), 587 "COUNT": lambda args: exp.Count( 588 this=seq_get(args, 0), expressions=args[1:], big_int=False 589 ), 590 "COUNT_BIG": lambda args: exp.Count( 591 this=seq_get(args, 0), expressions=args[1:], big_int=True 592 ), 593 "DATEADD": build_date_delta(exp.DateAdd, unit_mapping=DATE_DELTA_INTERVAL), 594 "DATEDIFF": _build_date_delta(exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL), 595 "DATENAME": _build_formatted_time(exp.TimeToStr, full_format_mapping=True), 596 "DATEPART": _build_formatted_time(exp.TimeToStr), 597 "DATETIMEFROMPARTS": _build_datetimefromparts, 598 "EOMONTH": _build_eomonth, 599 "FORMAT": _build_format, 600 "GETDATE": exp.CurrentTimestamp.from_arg_list, 601 "HASHBYTES": _build_hashbytes, 602 "ISNULL": lambda args: build_coalesce(args=args, is_null=True), 603 "JSON_QUERY": _build_json_query, 604 "JSON_VALUE": parser.build_extract_json_with_path(exp.JSONExtractScalar), 605 "LEN": _build_with_arg_as_text(exp.Length), 606 "LEFT": _build_with_arg_as_text(exp.Left), 607 "NEWID": exp.Uuid.from_arg_list, 608 "RIGHT": _build_with_arg_as_text(exp.Right), 609 "PARSENAME": _build_parsename, 610 "REPLICATE": exp.Repeat.from_arg_list, 611 "SCHEMA_NAME": exp.CurrentSchema.from_arg_list, 612 "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)), 613 "SYSDATETIME": exp.CurrentTimestamp.from_arg_list, 614 "SUSER_NAME": exp.CurrentUser.from_arg_list, 615 "SUSER_SNAME": exp.CurrentUser.from_arg_list, 616 "SYSDATETIMEOFFSET": exp.CurrentTimestampLTZ.from_arg_list, 617 "SYSTEM_USER": exp.CurrentUser.from_arg_list, 618 "TIMEFROMPARTS": _build_timefromparts, 619 "DATETRUNC": _build_datetrunc, 620 } 621 622 JOIN_HINTS = {"LOOP", "HASH", "MERGE", "REMOTE"} 623 624 PROCEDURE_OPTIONS = dict.fromkeys( 625 ("ENCRYPTION", "RECOMPILE", "SCHEMABINDING", "NATIVE_COMPILATION", "EXECUTE"), tuple() 626 ) 627 628 COLUMN_DEFINITION_MODES = {"OUT", "OUTPUT", "READONLY"} 629 630 RETURNS_TABLE_TOKENS = parser.Parser.ID_VAR_TOKENS - { 631 TokenType.TABLE, 632 *parser.Parser.TYPE_TOKENS, 633 } 634 635 STATEMENT_PARSERS = { 636 **parser.Parser.STATEMENT_PARSERS, 637 TokenType.DECLARE: lambda self: self._parse_declare(), 638 } 639 640 RANGE_PARSERS = { 641 **parser.Parser.RANGE_PARSERS, 642 TokenType.DCOLON: lambda self, this: self.expression( 643 exp.ScopeResolution, 644 this=this, 645 expression=self._parse_function() or self._parse_var(any_token=True), 646 ), 647 } 648 649 NO_PAREN_FUNCTION_PARSERS = { 650 **parser.Parser.NO_PAREN_FUNCTION_PARSERS, 651 "NEXT": lambda self: self._parse_next_value_for(), 652 } 653 654 # The DCOLON (::) operator serves as a scope resolution (exp.ScopeResolution) operator in T-SQL 655 COLUMN_OPERATORS = { 656 **parser.Parser.COLUMN_OPERATORS, 657 TokenType.DCOLON: lambda self, this, to: self.expression(exp.Cast, this=this, to=to) 658 if isinstance(to, exp.DataType) and to.this != exp.DataType.Type.USERDEFINED 659 else self.expression(exp.ScopeResolution, this=this, expression=to), 660 } 661 662 SET_OP_MODIFIERS = {"offset"} 663 664 def _parse_alter_table_set(self) -> exp.AlterSet: 665 return self._parse_wrapped(super()._parse_alter_table_set) 666 667 def _parse_wrapped_select(self, table: bool = False) -> t.Optional[exp.Expression]: 668 if self._match(TokenType.MERGE): 669 comments = self._prev_comments 670 merge = self._parse_merge() 671 merge.add_comments(comments, prepend=True) 672 return merge 673 674 return super()._parse_wrapped_select(table=table) 675 676 def _parse_dcolon(self) -> t.Optional[exp.Expression]: 677 # We want to use _parse_types() if the first token after :: is a known type, 678 # otherwise we could parse something like x::varchar(max) into a function 679 if self._match_set(self.TYPE_TOKENS, advance=False): 680 return self._parse_types() 681 682 return self._parse_function() or self._parse_types() 683 684 def _parse_options(self) -> t.Optional[t.List[exp.Expression]]: 685 if not self._match(TokenType.OPTION): 686 return None 687 688 def _parse_option() -> t.Optional[exp.Expression]: 689 option = self._parse_var_from_options(OPTIONS) 690 if not option: 691 return None 692 693 self._match(TokenType.EQ) 694 return self.expression( 695 exp.QueryOption, this=option, expression=self._parse_primary_or_var() 696 ) 697 698 return self._parse_wrapped_csv(_parse_option) 699 700 def _parse_xml_key_value_option(self) -> exp.XMLKeyValueOption: 701 this = self._parse_primary_or_var() 702 if self._match(TokenType.L_PAREN, advance=False): 703 expression = self._parse_wrapped(self._parse_string) 704 else: 705 expression = None 706 707 return exp.XMLKeyValueOption(this=this, expression=expression) 708 709 def _parse_for(self) -> t.Optional[t.List[exp.Expression]]: 710 if not self._match_pair(TokenType.FOR, TokenType.XML): 711 return None 712 713 def _parse_for_xml() -> t.Optional[exp.Expression]: 714 return self.expression( 715 exp.QueryOption, 716 this=self._parse_var_from_options(XML_OPTIONS, raise_unmatched=False) 717 or self._parse_xml_key_value_option(), 718 ) 719 720 return self._parse_csv(_parse_for_xml) 721 722 def _parse_projections(self) -> t.List[exp.Expression]: 723 """ 724 T-SQL supports the syntax alias = expression in the SELECT's projection list, 725 so we transform all parsed Selects to convert their EQ projections into Aliases. 726 727 See: https://learn.microsoft.com/en-us/sql/t-sql/queries/select-clause-transact-sql?view=sql-server-ver16#syntax 728 """ 729 return [ 730 ( 731 exp.alias_(projection.expression, projection.this.this, copy=False) 732 if isinstance(projection, exp.EQ) and isinstance(projection.this, exp.Column) 733 else projection 734 ) 735 for projection in super()._parse_projections() 736 ] 737 738 def _parse_commit_or_rollback(self) -> exp.Commit | exp.Rollback: 739 """Applies to SQL Server and Azure SQL Database 740 COMMIT [ { TRAN | TRANSACTION } 741 [ transaction_name | @tran_name_variable ] ] 742 [ WITH ( DELAYED_DURABILITY = { OFF | ON } ) ] 743 744 ROLLBACK { TRAN | TRANSACTION } 745 [ transaction_name | @tran_name_variable 746 | savepoint_name | @savepoint_variable ] 747 """ 748 rollback = self._prev.token_type == TokenType.ROLLBACK 749 750 self._match_texts(("TRAN", "TRANSACTION")) 751 this = self._parse_id_var() 752 753 if rollback: 754 return self.expression(exp.Rollback, this=this) 755 756 durability = None 757 if self._match_pair(TokenType.WITH, TokenType.L_PAREN): 758 self._match_text_seq("DELAYED_DURABILITY") 759 self._match(TokenType.EQ) 760 761 if self._match_text_seq("OFF"): 762 durability = False 763 else: 764 self._match(TokenType.ON) 765 durability = True 766 767 self._match_r_paren() 768 769 return self.expression(exp.Commit, this=this, durability=durability) 770 771 def _parse_transaction(self) -> exp.Transaction | exp.Command: 772 """Applies to SQL Server and Azure SQL Database 773 BEGIN { TRAN | TRANSACTION } 774 [ { transaction_name | @tran_name_variable } 775 [ WITH MARK [ 'description' ] ] 776 ] 777 """ 778 if self._match_texts(("TRAN", "TRANSACTION")): 779 transaction = self.expression(exp.Transaction, this=self._parse_id_var()) 780 if self._match_text_seq("WITH", "MARK"): 781 transaction.set("mark", self._parse_string()) 782 783 return transaction 784 785 return self._parse_as_command(self._prev) 786 787 def _parse_returns(self) -> exp.ReturnsProperty: 788 table = self._parse_id_var(any_token=False, tokens=self.RETURNS_TABLE_TOKENS) 789 returns = super()._parse_returns() 790 returns.set("table", table) 791 return returns 792 793 def _parse_convert( 794 self, strict: bool, safe: t.Optional[bool] = None 795 ) -> t.Optional[exp.Expression]: 796 this = self._parse_types() 797 self._match(TokenType.COMMA) 798 args = [this, *self._parse_csv(self._parse_assignment)] 799 convert = exp.Convert.from_arg_list(args) 800 convert.set("safe", safe) 801 convert.set("strict", strict) 802 return convert 803 804 def _parse_column_def( 805 self, this: t.Optional[exp.Expression], computed_column: bool = True 806 ) -> t.Optional[exp.Expression]: 807 this = super()._parse_column_def(this=this, computed_column=computed_column) 808 if not this: 809 return None 810 if self._match(TokenType.EQ): 811 this.set("default", self._parse_disjunction()) 812 if self._match_texts(self.COLUMN_DEFINITION_MODES): 813 this.set("output", self._prev.text) 814 return this 815 816 def _parse_user_defined_function( 817 self, kind: t.Optional[TokenType] = None 818 ) -> t.Optional[exp.Expression]: 819 this = super()._parse_user_defined_function(kind=kind) 820 821 if ( 822 kind == TokenType.FUNCTION 823 or isinstance(this, exp.UserDefinedFunction) 824 or self._match(TokenType.ALIAS, advance=False) 825 ): 826 return this 827 828 if not self._match(TokenType.WITH, advance=False): 829 expressions = self._parse_csv(self._parse_function_parameter) 830 else: 831 expressions = None 832 833 return self.expression(exp.UserDefinedFunction, this=this, expressions=expressions) 834 835 def _parse_into(self) -> t.Optional[exp.Into]: 836 into = super()._parse_into() 837 838 table = isinstance(into, exp.Into) and into.find(exp.Table) 839 if isinstance(table, exp.Table): 840 table_identifier = table.this 841 if table_identifier.args.get("temporary"): 842 # Promote the temporary property from the Identifier to the Into expression 843 t.cast(exp.Into, into).set("temporary", True) 844 845 return into 846 847 def _parse_id_var( 848 self, 849 any_token: bool = True, 850 tokens: t.Optional[t.Collection[TokenType]] = None, 851 ) -> t.Optional[exp.Expression]: 852 is_temporary = self._match(TokenType.HASH) 853 is_global = is_temporary and self._match(TokenType.HASH) 854 855 this = super()._parse_id_var(any_token=any_token, tokens=tokens) 856 if this: 857 if is_global: 858 this.set("global", True) 859 elif is_temporary: 860 this.set("temporary", True) 861 862 return this 863 864 def _parse_create(self) -> exp.Create | exp.Command: 865 create = super()._parse_create() 866 867 if isinstance(create, exp.Create): 868 table = create.this.this if isinstance(create.this, exp.Schema) else create.this 869 if isinstance(table, exp.Table) and table.this and table.this.args.get("temporary"): 870 if not create.args.get("properties"): 871 create.set("properties", exp.Properties(expressions=[])) 872 873 create.args["properties"].append("expressions", exp.TemporaryProperty()) 874 875 return create 876 877 def _parse_if(self) -> t.Optional[exp.Expression]: 878 index = self._index 879 880 if self._match_text_seq("OBJECT_ID"): 881 self._parse_wrapped_csv(self._parse_string) 882 if self._match_text_seq("IS", "NOT", "NULL") and self._match(TokenType.DROP): 883 return self._parse_drop(exists=True) 884 self._retreat(index) 885 886 return super()._parse_if() 887 888 def _parse_unique(self) -> exp.UniqueColumnConstraint: 889 if self._match_texts(("CLUSTERED", "NONCLUSTERED")): 890 this = self.CONSTRAINT_PARSERS[self._prev.text.upper()](self) 891 else: 892 this = self._parse_schema(self._parse_id_var(any_token=False)) 893 894 return self.expression(exp.UniqueColumnConstraint, this=this) 895 896 def _parse_partition(self) -> t.Optional[exp.Partition]: 897 if not self._match_text_seq("WITH", "(", "PARTITIONS"): 898 return None 899 900 def parse_range(): 901 low = self._parse_bitwise() 902 high = self._parse_bitwise() if self._match_text_seq("TO") else None 903 904 return ( 905 self.expression(exp.PartitionRange, this=low, expression=high) if high else low 906 ) 907 908 partition = self.expression( 909 exp.Partition, expressions=self._parse_wrapped_csv(parse_range) 910 ) 911 912 self._match_r_paren() 913 914 return partition 915 916 def _parse_declareitem(self) -> t.Optional[exp.DeclareItem]: 917 var = self._parse_id_var() 918 if not var: 919 return None 920 921 self._match(TokenType.ALIAS) 922 return self.expression( 923 exp.DeclareItem, 924 this=var, 925 kind=self._parse_schema() if self._match(TokenType.TABLE) else self._parse_types(), 926 default=self._match(TokenType.EQ) and self._parse_bitwise(), 927 ) 928 929 def _parse_alter_table_alter(self) -> t.Optional[exp.Expression]: 930 expression = super()._parse_alter_table_alter() 931 932 if expression is not None: 933 collation = expression.args.get("collate") 934 if isinstance(collation, exp.Column) and isinstance(collation.this, exp.Identifier): 935 identifier = collation.this 936 collation.set("this", exp.Var(this=identifier.name)) 937 938 return expression 939 940 class Generator(generator.Generator): 941 LIMIT_IS_TOP = True 942 QUERY_HINTS = False 943 RETURNING_END = False 944 NVL2_SUPPORTED = False 945 ALTER_TABLE_INCLUDE_COLUMN_KEYWORD = False 946 LIMIT_FETCH = "FETCH" 947 COMPUTED_COLUMN_WITH_TYPE = False 948 CTE_RECURSIVE_KEYWORD_REQUIRED = False 949 ENSURE_BOOLS = True 950 NULL_ORDERING_SUPPORTED = None 951 SUPPORTS_SINGLE_ARG_CONCAT = False 952 TABLESAMPLE_SEED_KEYWORD = "REPEATABLE" 953 SUPPORTS_SELECT_INTO = True 954 JSON_PATH_BRACKETED_KEY_SUPPORTED = False 955 SUPPORTS_TO_NUMBER = False 956 SET_OP_MODIFIERS = False 957 COPY_PARAMS_EQ_REQUIRED = True 958 PARSE_JSON_NAME = None 959 EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE = False 960 ALTER_SET_WRAPPED = True 961 ALTER_SET_TYPE = "" 962 963 EXPRESSIONS_WITHOUT_NESTED_CTES = { 964 exp.Create, 965 exp.Delete, 966 exp.Insert, 967 exp.Intersect, 968 exp.Except, 969 exp.Merge, 970 exp.Select, 971 exp.Subquery, 972 exp.Union, 973 exp.Update, 974 } 975 976 SUPPORTED_JSON_PATH_PARTS = { 977 exp.JSONPathKey, 978 exp.JSONPathRoot, 979 exp.JSONPathSubscript, 980 } 981 982 TYPE_MAPPING = { 983 **generator.Generator.TYPE_MAPPING, 984 exp.DataType.Type.BOOLEAN: "BIT", 985 exp.DataType.Type.DATETIME2: "DATETIME2", 986 exp.DataType.Type.DECIMAL: "NUMERIC", 987 exp.DataType.Type.DOUBLE: "FLOAT", 988 exp.DataType.Type.INT: "INTEGER", 989 exp.DataType.Type.ROWVERSION: "ROWVERSION", 990 exp.DataType.Type.TEXT: "VARCHAR(MAX)", 991 exp.DataType.Type.TIMESTAMP: "DATETIME2", 992 exp.DataType.Type.TIMESTAMPNTZ: "DATETIME2", 993 exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET", 994 exp.DataType.Type.SMALLDATETIME: "SMALLDATETIME", 995 exp.DataType.Type.UTINYINT: "TINYINT", 996 exp.DataType.Type.VARIANT: "SQL_VARIANT", 997 exp.DataType.Type.UUID: "UNIQUEIDENTIFIER", 998 } 999 1000 TYPE_MAPPING.pop(exp.DataType.Type.NCHAR) 1001 TYPE_MAPPING.pop(exp.DataType.Type.NVARCHAR) 1002 1003 TRANSFORMS = { 1004 **generator.Generator.TRANSFORMS, 1005 exp.AnyValue: any_value_to_max_sql, 1006 exp.ArrayToString: rename_func("STRING_AGG"), 1007 exp.AutoIncrementColumnConstraint: lambda *_: "IDENTITY", 1008 exp.Chr: rename_func("CHAR"), 1009 exp.DateAdd: date_delta_sql("DATEADD"), 1010 exp.DateDiff: date_delta_sql("DATEDIFF"), 1011 exp.CTE: transforms.preprocess([qualify_derived_table_outputs]), 1012 exp.CurrentDate: rename_func("GETDATE"), 1013 exp.CurrentTimestamp: rename_func("GETDATE"), 1014 exp.CurrentTimestampLTZ: rename_func("SYSDATETIMEOFFSET"), 1015 exp.DateStrToDate: datestrtodate_sql, 1016 exp.Extract: rename_func("DATEPART"), 1017 exp.GeneratedAsIdentityColumnConstraint: generatedasidentitycolumnconstraint_sql, 1018 exp.GroupConcat: _string_agg_sql, 1019 exp.If: rename_func("IIF"), 1020 exp.JSONExtract: _json_extract_sql, 1021 exp.JSONExtractScalar: _json_extract_sql, 1022 exp.LastDay: lambda self, e: self.func("EOMONTH", e.this), 1023 exp.Ln: rename_func("LOG"), 1024 exp.Max: max_or_greatest, 1025 exp.MD5: lambda self, e: self.func("HASHBYTES", exp.Literal.string("MD5"), e.this), 1026 exp.Min: min_or_least, 1027 exp.NumberToStr: _format_sql, 1028 exp.Repeat: rename_func("REPLICATE"), 1029 exp.CurrentSchema: rename_func("SCHEMA_NAME"), 1030 exp.Select: transforms.preprocess( 1031 [ 1032 transforms.eliminate_distinct_on, 1033 transforms.eliminate_semi_and_anti_joins, 1034 transforms.eliminate_qualify, 1035 transforms.unnest_generate_date_array_using_recursive_cte, 1036 ] 1037 ), 1038 exp.Stddev: rename_func("STDEV"), 1039 exp.StrPosition: lambda self, e: strposition_sql( 1040 self, e, func_name="CHARINDEX", supports_position=True 1041 ), 1042 exp.Subquery: transforms.preprocess([qualify_derived_table_outputs]), 1043 exp.SHA: lambda self, e: self.func("HASHBYTES", exp.Literal.string("SHA1"), e.this), 1044 exp.SHA2: lambda self, e: self.func( 1045 "HASHBYTES", exp.Literal.string(f"SHA2_{e.args.get('length', 256)}"), e.this 1046 ), 1047 exp.TemporaryProperty: lambda self, e: "", 1048 exp.TimeStrToTime: _timestrtotime_sql, 1049 exp.TimeToStr: _format_sql, 1050 exp.Trim: trim_sql, 1051 exp.TsOrDsAdd: date_delta_sql("DATEADD", cast=True), 1052 exp.TsOrDsDiff: date_delta_sql("DATEDIFF"), 1053 exp.TimestampTrunc: lambda self, e: self.func("DATETRUNC", e.unit, e.this), 1054 exp.Uuid: lambda *_: "NEWID()", 1055 exp.DateFromParts: rename_func("DATEFROMPARTS"), 1056 } 1057 1058 TRANSFORMS.pop(exp.ReturnsProperty) 1059 1060 PROPERTIES_LOCATION = { 1061 **generator.Generator.PROPERTIES_LOCATION, 1062 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 1063 } 1064 1065 def scope_resolution(self, rhs: str, scope_name: str) -> str: 1066 return f"{scope_name}::{rhs}" 1067 1068 def select_sql(self, expression: exp.Select) -> str: 1069 limit = expression.args.get("limit") 1070 offset = expression.args.get("offset") 1071 1072 if isinstance(limit, exp.Fetch) and not offset: 1073 # Dialects like Oracle can FETCH directly from a row set but 1074 # T-SQL requires an ORDER BY + OFFSET clause in order to FETCH 1075 offset = exp.Offset(expression=exp.Literal.number(0)) 1076 expression.set("offset", offset) 1077 1078 if offset: 1079 if not expression.args.get("order"): 1080 # ORDER BY is required in order to use OFFSET in a query, so we use 1081 # a noop order by, since we don't really care about the order. 1082 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 1083 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 1084 1085 if isinstance(limit, exp.Limit): 1086 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 1087 # we replace here because otherwise TOP would be generated in select_sql 1088 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 1089 1090 return super().select_sql(expression) 1091 1092 def convert_sql(self, expression: exp.Convert) -> str: 1093 name = "TRY_CONVERT" if expression.args.get("safe") else "CONVERT" 1094 return self.func( 1095 name, expression.this, expression.expression, expression.args.get("style") 1096 ) 1097 1098 def queryoption_sql(self, expression: exp.QueryOption) -> str: 1099 option = self.sql(expression, "this") 1100 value = self.sql(expression, "expression") 1101 if value: 1102 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 1103 return f"{option} {optional_equal_sign}{value}" 1104 return option 1105 1106 def lateral_op(self, expression: exp.Lateral) -> str: 1107 cross_apply = expression.args.get("cross_apply") 1108 if cross_apply is True: 1109 return "CROSS APPLY" 1110 if cross_apply is False: 1111 return "OUTER APPLY" 1112 1113 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 1114 self.unsupported("LATERAL clause is not supported.") 1115 return "LATERAL" 1116 1117 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 1118 this = expression.this 1119 split_count = len(this.name.split(".")) 1120 delimiter = expression.args.get("delimiter") 1121 part_index = expression.args.get("part_index") 1122 1123 if ( 1124 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 1125 or (delimiter and delimiter.name != ".") 1126 or not part_index 1127 or split_count > 4 1128 ): 1129 self.unsupported( 1130 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 1131 ) 1132 return "" 1133 1134 return self.func( 1135 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 1136 ) 1137 1138 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1139 nano = expression.args.get("nano") 1140 if nano is not None: 1141 nano.pop() 1142 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1143 1144 if expression.args.get("fractions") is None: 1145 expression.set("fractions", exp.Literal.number(0)) 1146 if expression.args.get("precision") is None: 1147 expression.set("precision", exp.Literal.number(0)) 1148 1149 return rename_func("TIMEFROMPARTS")(self, expression) 1150 1151 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1152 zone = expression.args.get("zone") 1153 if zone is not None: 1154 zone.pop() 1155 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1156 1157 nano = expression.args.get("nano") 1158 if nano is not None: 1159 nano.pop() 1160 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1161 1162 if expression.args.get("milli") is None: 1163 expression.set("milli", exp.Literal.number(0)) 1164 1165 return rename_func("DATETIMEFROMPARTS")(self, expression) 1166 1167 def setitem_sql(self, expression: exp.SetItem) -> str: 1168 this = expression.this 1169 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1170 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1171 return f"{self.sql(this.left)} {self.sql(this.right)}" 1172 1173 return super().setitem_sql(expression) 1174 1175 def boolean_sql(self, expression: exp.Boolean) -> str: 1176 if type(expression.parent) in BIT_TYPES or isinstance( 1177 expression.find_ancestor(exp.Values, exp.Select), exp.Values 1178 ): 1179 return "1" if expression.this else "0" 1180 1181 return "(1 = 1)" if expression.this else "(1 = 0)" 1182 1183 def is_sql(self, expression: exp.Is) -> str: 1184 if isinstance(expression.expression, exp.Boolean): 1185 return self.binary(expression, "=") 1186 return self.binary(expression, "IS") 1187 1188 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1189 sql = self.sql(expression, "this") 1190 properties = expression.args.get("properties") 1191 1192 if sql[:1] != "#" and any( 1193 isinstance(prop, exp.TemporaryProperty) 1194 for prop in (properties.expressions if properties else []) 1195 ): 1196 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1197 1198 return sql 1199 1200 def create_sql(self, expression: exp.Create) -> str: 1201 kind = expression.kind 1202 exists = expression.args.pop("exists", None) 1203 1204 like_property = expression.find(exp.LikeProperty) 1205 if like_property: 1206 ctas_expression = like_property.this 1207 else: 1208 ctas_expression = expression.expression 1209 1210 if kind == "VIEW": 1211 expression.this.set("catalog", None) 1212 with_ = expression.args.get("with") 1213 if ctas_expression and with_: 1214 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1215 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1216 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1217 ctas_expression.set("with", with_.pop()) 1218 1219 table = expression.find(exp.Table) 1220 1221 # Convert CTAS statement to SELECT .. INTO .. 1222 if kind == "TABLE" and ctas_expression: 1223 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1224 ctas_expression = ctas_expression.subquery() 1225 1226 properties = expression.args.get("properties") or exp.Properties() 1227 is_temp = any(isinstance(p, exp.TemporaryProperty) for p in properties.expressions) 1228 1229 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1230 select_into.set("into", exp.Into(this=table, temporary=is_temp)) 1231 1232 if like_property: 1233 select_into.limit(0, copy=False) 1234 1235 sql = self.sql(select_into) 1236 else: 1237 sql = super().create_sql(expression) 1238 1239 if exists: 1240 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1241 sql_with_ctes = self.prepend_ctes(expression, sql) 1242 sql_literal = self.sql(exp.Literal.string(sql_with_ctes)) 1243 if kind == "SCHEMA": 1244 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = {identifier}) EXEC({sql_literal})""" 1245 elif kind == "TABLE": 1246 assert table 1247 where = exp.and_( 1248 exp.column("TABLE_NAME").eq(table.name), 1249 exp.column("TABLE_SCHEMA").eq(table.db) if table.db else None, 1250 exp.column("TABLE_CATALOG").eq(table.catalog) if table.catalog else None, 1251 ) 1252 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE {where}) EXEC({sql_literal})""" 1253 elif kind == "INDEX": 1254 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1255 return f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql_literal})""" 1256 elif expression.args.get("replace"): 1257 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1258 1259 return self.prepend_ctes(expression, sql) 1260 1261 @generator.unsupported_args("unlogged", "expressions") 1262 def into_sql(self, expression: exp.Into) -> str: 1263 if expression.args.get("temporary"): 1264 # If the Into expression has a temporary property, push this down to the Identifier 1265 table = expression.find(exp.Table) 1266 if table and isinstance(table.this, exp.Identifier): 1267 table.this.set("temporary", True) 1268 1269 return f"{self.seg('INTO')} {self.sql(expression, 'this')}" 1270 1271 def count_sql(self, expression: exp.Count) -> str: 1272 func_name = "COUNT_BIG" if expression.args.get("big_int") else "COUNT" 1273 return rename_func(func_name)(self, expression) 1274 1275 def offset_sql(self, expression: exp.Offset) -> str: 1276 return f"{super().offset_sql(expression)} ROWS" 1277 1278 def version_sql(self, expression: exp.Version) -> str: 1279 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1280 this = f"FOR {name}" 1281 expr = expression.expression 1282 kind = expression.text("kind") 1283 if kind in ("FROM", "BETWEEN"): 1284 args = expr.expressions 1285 sep = "TO" if kind == "FROM" else "AND" 1286 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1287 else: 1288 expr_sql = self.sql(expr) 1289 1290 expr_sql = f" {expr_sql}" if expr_sql else "" 1291 return f"{this} {kind}{expr_sql}" 1292 1293 def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str: 1294 table = expression.args.get("table") 1295 table = f"{table} " if table else "" 1296 return f"RETURNS {table}{self.sql(expression, 'this')}" 1297 1298 def returning_sql(self, expression: exp.Returning) -> str: 1299 into = self.sql(expression, "into") 1300 into = self.seg(f"INTO {into}") if into else "" 1301 return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}" 1302 1303 def transaction_sql(self, expression: exp.Transaction) -> str: 1304 this = self.sql(expression, "this") 1305 this = f" {this}" if this else "" 1306 mark = self.sql(expression, "mark") 1307 mark = f" WITH MARK {mark}" if mark else "" 1308 return f"BEGIN TRANSACTION{this}{mark}" 1309 1310 def commit_sql(self, expression: exp.Commit) -> str: 1311 this = self.sql(expression, "this") 1312 this = f" {this}" if this else "" 1313 durability = expression.args.get("durability") 1314 durability = ( 1315 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1316 if durability is not None 1317 else "" 1318 ) 1319 return f"COMMIT TRANSACTION{this}{durability}" 1320 1321 def rollback_sql(self, expression: exp.Rollback) -> str: 1322 this = self.sql(expression, "this") 1323 this = f" {this}" if this else "" 1324 return f"ROLLBACK TRANSACTION{this}" 1325 1326 def identifier_sql(self, expression: exp.Identifier) -> str: 1327 identifier = super().identifier_sql(expression) 1328 1329 if expression.args.get("global"): 1330 identifier = f"##{identifier}" 1331 elif expression.args.get("temporary"): 1332 identifier = f"#{identifier}" 1333 1334 return identifier 1335 1336 def constraint_sql(self, expression: exp.Constraint) -> str: 1337 this = self.sql(expression, "this") 1338 expressions = self.expressions(expression, flat=True, sep=" ") 1339 return f"CONSTRAINT {this} {expressions}" 1340 1341 def length_sql(self, expression: exp.Length) -> str: 1342 return self._uncast_text(expression, "LEN") 1343 1344 def right_sql(self, expression: exp.Right) -> str: 1345 return self._uncast_text(expression, "RIGHT") 1346 1347 def left_sql(self, expression: exp.Left) -> str: 1348 return self._uncast_text(expression, "LEFT") 1349 1350 def _uncast_text(self, expression: exp.Expression, name: str) -> str: 1351 this = expression.this 1352 if isinstance(this, exp.Cast) and this.is_type(exp.DataType.Type.TEXT): 1353 this_sql = self.sql(this, "this") 1354 else: 1355 this_sql = self.sql(this) 1356 expression_sql = self.sql(expression, "expression") 1357 return self.func(name, this_sql, expression_sql if expression_sql else None) 1358 1359 def partition_sql(self, expression: exp.Partition) -> str: 1360 return f"WITH (PARTITIONS({self.expressions(expression, flat=True)}))" 1361 1362 def alter_sql(self, expression: exp.Alter) -> str: 1363 action = seq_get(expression.args.get("actions") or [], 0) 1364 if isinstance(action, exp.AlterRename): 1365 return f"EXEC sp_rename '{self.sql(expression.this)}', '{action.this.name}'" 1366 return super().alter_sql(expression) 1367 1368 def drop_sql(self, expression: exp.Drop) -> str: 1369 if expression.args["kind"] == "VIEW": 1370 expression.this.set("catalog", None) 1371 return super().drop_sql(expression) 1372 1373 def options_modifier(self, expression: exp.Expression) -> str: 1374 options = self.expressions(expression, key="options") 1375 return f" OPTION{self.wrap(options)}" if options else "" 1376 1377 def dpipe_sql(self, expression: exp.DPipe) -> str: 1378 return self.sql( 1379 reduce(lambda x, y: exp.Add(this=x, expression=y), expression.flatten()) 1380 ) 1381 1382 def isascii_sql(self, expression: exp.IsAscii) -> str: 1383 return f"(PATINDEX(CONVERT(VARCHAR(MAX), 0x255b5e002d7f5d25) COLLATE Latin1_General_BIN, {self.sql(expression.this)}) = 0)" 1384 1385 def columndef_sql(self, expression: exp.ColumnDef, sep: str = " ") -> str: 1386 this = super().columndef_sql(expression, sep) 1387 default = self.sql(expression, "default") 1388 default = f" = {default}" if default else "" 1389 output = self.sql(expression, "output") 1390 output = f" {output}" if output else "" 1391 return f"{this}{default}{output}" 1392 1393 def coalesce_sql(self, expression: exp.Coalesce) -> str: 1394 func_name = "ISNULL" if expression.args.get("is_null") else "COALESCE" 1395 return rename_func(func_name)(self, expression)
Whether the base comes first in the LOG
function.
Possible values: True
, False
, None
(two arguments are not supported by LOG
)
Whether the behavior of a / b
depends on the types of a
and b
.
False means a / b
is always float division.
True means a / b
is integer division if both a
and b
are integers.
A NULL
arg in CONCAT
yields NULL
by default, but in some dialects it yields an empty string.
Specifies the strategy according to which identifiers should be normalized.
Associates this dialect's time formats with their equivalent Python strftime
formats.
521 class Tokenizer(tokens.Tokenizer): 522 IDENTIFIERS = [("[", "]"), '"'] 523 QUOTES = ["'", '"'] 524 HEX_STRINGS = [("0x", ""), ("0X", "")] 525 VAR_SINGLE_TOKENS = {"@", "$", "#"} 526 527 KEYWORDS = { 528 **tokens.Tokenizer.KEYWORDS, 529 "CLUSTERED INDEX": TokenType.INDEX, 530 "DATETIME2": TokenType.DATETIME2, 531 "DATETIMEOFFSET": TokenType.TIMESTAMPTZ, 532 "DECLARE": TokenType.DECLARE, 533 "EXEC": TokenType.COMMAND, 534 "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT, 535 "GO": TokenType.COMMAND, 536 "IMAGE": TokenType.IMAGE, 537 "MONEY": TokenType.MONEY, 538 "NONCLUSTERED INDEX": TokenType.INDEX, 539 "NTEXT": TokenType.TEXT, 540 "OPTION": TokenType.OPTION, 541 "OUTPUT": TokenType.RETURNING, 542 "PRINT": TokenType.COMMAND, 543 "PROC": TokenType.PROCEDURE, 544 "REAL": TokenType.FLOAT, 545 "ROWVERSION": TokenType.ROWVERSION, 546 "SMALLDATETIME": TokenType.SMALLDATETIME, 547 "SMALLMONEY": TokenType.SMALLMONEY, 548 "SQL_VARIANT": TokenType.VARIANT, 549 "SYSTEM_USER": TokenType.CURRENT_USER, 550 "TOP": TokenType.TOP, 551 "TIMESTAMP": TokenType.ROWVERSION, 552 "TINYINT": TokenType.UTINYINT, 553 "UNIQUEIDENTIFIER": TokenType.UUID, 554 "UPDATE STATISTICS": TokenType.COMMAND, 555 "XML": TokenType.XML, 556 } 557 KEYWORDS.pop("/*+") 558 559 COMMANDS = {*tokens.Tokenizer.COMMANDS, TokenType.END}
Inherited Members
- sqlglot.tokens.Tokenizer
- Tokenizer
- SINGLE_TOKENS
- BIT_STRINGS
- BYTE_STRINGS
- RAW_STRINGS
- HEREDOC_STRINGS
- UNICODE_STRINGS
- STRING_ESCAPES
- IDENTIFIER_ESCAPES
- HEREDOC_TAG_IS_IDENTIFIER
- HEREDOC_STRING_ALTERNATIVE
- STRING_ESCAPES_ALLOWED_IN_RAW_STRINGS
- NESTED_COMMENTS
- HINT_START
- TOKENS_PRECEDING_HINT
- WHITE_SPACE
- COMMAND_PREFIX_TOKENS
- NUMERIC_LITERALS
- COMMENTS
- dialect
- use_rs_tokenizer
- reset
- tokenize
- tokenize_rs
- size
- sql
- tokens
561 class Parser(parser.Parser): 562 SET_REQUIRES_ASSIGNMENT_DELIMITER = False 563 LOG_DEFAULTS_TO_LN = True 564 STRING_ALIASES = True 565 NO_PAREN_IF_COMMANDS = False 566 567 QUERY_MODIFIER_PARSERS = { 568 **parser.Parser.QUERY_MODIFIER_PARSERS, 569 TokenType.OPTION: lambda self: ("options", self._parse_options()), 570 TokenType.FOR: lambda self: ("for", self._parse_for()), 571 } 572 573 # T-SQL does not allow BEGIN to be used as an identifier 574 ID_VAR_TOKENS = parser.Parser.ID_VAR_TOKENS - {TokenType.BEGIN} 575 ALIAS_TOKENS = parser.Parser.ALIAS_TOKENS - {TokenType.BEGIN} 576 TABLE_ALIAS_TOKENS = parser.Parser.TABLE_ALIAS_TOKENS - {TokenType.BEGIN} 577 COMMENT_TABLE_ALIAS_TOKENS = parser.Parser.COMMENT_TABLE_ALIAS_TOKENS - {TokenType.BEGIN} 578 UPDATE_ALIAS_TOKENS = parser.Parser.UPDATE_ALIAS_TOKENS - {TokenType.BEGIN} 579 580 FUNCTIONS = { 581 **parser.Parser.FUNCTIONS, 582 "CHARINDEX": lambda args: exp.StrPosition( 583 this=seq_get(args, 1), 584 substr=seq_get(args, 0), 585 position=seq_get(args, 2), 586 ), 587 "COUNT": lambda args: exp.Count( 588 this=seq_get(args, 0), expressions=args[1:], big_int=False 589 ), 590 "COUNT_BIG": lambda args: exp.Count( 591 this=seq_get(args, 0), expressions=args[1:], big_int=True 592 ), 593 "DATEADD": build_date_delta(exp.DateAdd, unit_mapping=DATE_DELTA_INTERVAL), 594 "DATEDIFF": _build_date_delta(exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL), 595 "DATENAME": _build_formatted_time(exp.TimeToStr, full_format_mapping=True), 596 "DATEPART": _build_formatted_time(exp.TimeToStr), 597 "DATETIMEFROMPARTS": _build_datetimefromparts, 598 "EOMONTH": _build_eomonth, 599 "FORMAT": _build_format, 600 "GETDATE": exp.CurrentTimestamp.from_arg_list, 601 "HASHBYTES": _build_hashbytes, 602 "ISNULL": lambda args: build_coalesce(args=args, is_null=True), 603 "JSON_QUERY": _build_json_query, 604 "JSON_VALUE": parser.build_extract_json_with_path(exp.JSONExtractScalar), 605 "LEN": _build_with_arg_as_text(exp.Length), 606 "LEFT": _build_with_arg_as_text(exp.Left), 607 "NEWID": exp.Uuid.from_arg_list, 608 "RIGHT": _build_with_arg_as_text(exp.Right), 609 "PARSENAME": _build_parsename, 610 "REPLICATE": exp.Repeat.from_arg_list, 611 "SCHEMA_NAME": exp.CurrentSchema.from_arg_list, 612 "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)), 613 "SYSDATETIME": exp.CurrentTimestamp.from_arg_list, 614 "SUSER_NAME": exp.CurrentUser.from_arg_list, 615 "SUSER_SNAME": exp.CurrentUser.from_arg_list, 616 "SYSDATETIMEOFFSET": exp.CurrentTimestampLTZ.from_arg_list, 617 "SYSTEM_USER": exp.CurrentUser.from_arg_list, 618 "TIMEFROMPARTS": _build_timefromparts, 619 "DATETRUNC": _build_datetrunc, 620 } 621 622 JOIN_HINTS = {"LOOP", "HASH", "MERGE", "REMOTE"} 623 624 PROCEDURE_OPTIONS = dict.fromkeys( 625 ("ENCRYPTION", "RECOMPILE", "SCHEMABINDING", "NATIVE_COMPILATION", "EXECUTE"), tuple() 626 ) 627 628 COLUMN_DEFINITION_MODES = {"OUT", "OUTPUT", "READONLY"} 629 630 RETURNS_TABLE_TOKENS = parser.Parser.ID_VAR_TOKENS - { 631 TokenType.TABLE, 632 *parser.Parser.TYPE_TOKENS, 633 } 634 635 STATEMENT_PARSERS = { 636 **parser.Parser.STATEMENT_PARSERS, 637 TokenType.DECLARE: lambda self: self._parse_declare(), 638 } 639 640 RANGE_PARSERS = { 641 **parser.Parser.RANGE_PARSERS, 642 TokenType.DCOLON: lambda self, this: self.expression( 643 exp.ScopeResolution, 644 this=this, 645 expression=self._parse_function() or self._parse_var(any_token=True), 646 ), 647 } 648 649 NO_PAREN_FUNCTION_PARSERS = { 650 **parser.Parser.NO_PAREN_FUNCTION_PARSERS, 651 "NEXT": lambda self: self._parse_next_value_for(), 652 } 653 654 # The DCOLON (::) operator serves as a scope resolution (exp.ScopeResolution) operator in T-SQL 655 COLUMN_OPERATORS = { 656 **parser.Parser.COLUMN_OPERATORS, 657 TokenType.DCOLON: lambda self, this, to: self.expression(exp.Cast, this=this, to=to) 658 if isinstance(to, exp.DataType) and to.this != exp.DataType.Type.USERDEFINED 659 else self.expression(exp.ScopeResolution, this=this, expression=to), 660 } 661 662 SET_OP_MODIFIERS = {"offset"} 663 664 def _parse_alter_table_set(self) -> exp.AlterSet: 665 return self._parse_wrapped(super()._parse_alter_table_set) 666 667 def _parse_wrapped_select(self, table: bool = False) -> t.Optional[exp.Expression]: 668 if self._match(TokenType.MERGE): 669 comments = self._prev_comments 670 merge = self._parse_merge() 671 merge.add_comments(comments, prepend=True) 672 return merge 673 674 return super()._parse_wrapped_select(table=table) 675 676 def _parse_dcolon(self) -> t.Optional[exp.Expression]: 677 # We want to use _parse_types() if the first token after :: is a known type, 678 # otherwise we could parse something like x::varchar(max) into a function 679 if self._match_set(self.TYPE_TOKENS, advance=False): 680 return self._parse_types() 681 682 return self._parse_function() or self._parse_types() 683 684 def _parse_options(self) -> t.Optional[t.List[exp.Expression]]: 685 if not self._match(TokenType.OPTION): 686 return None 687 688 def _parse_option() -> t.Optional[exp.Expression]: 689 option = self._parse_var_from_options(OPTIONS) 690 if not option: 691 return None 692 693 self._match(TokenType.EQ) 694 return self.expression( 695 exp.QueryOption, this=option, expression=self._parse_primary_or_var() 696 ) 697 698 return self._parse_wrapped_csv(_parse_option) 699 700 def _parse_xml_key_value_option(self) -> exp.XMLKeyValueOption: 701 this = self._parse_primary_or_var() 702 if self._match(TokenType.L_PAREN, advance=False): 703 expression = self._parse_wrapped(self._parse_string) 704 else: 705 expression = None 706 707 return exp.XMLKeyValueOption(this=this, expression=expression) 708 709 def _parse_for(self) -> t.Optional[t.List[exp.Expression]]: 710 if not self._match_pair(TokenType.FOR, TokenType.XML): 711 return None 712 713 def _parse_for_xml() -> t.Optional[exp.Expression]: 714 return self.expression( 715 exp.QueryOption, 716 this=self._parse_var_from_options(XML_OPTIONS, raise_unmatched=False) 717 or self._parse_xml_key_value_option(), 718 ) 719 720 return self._parse_csv(_parse_for_xml) 721 722 def _parse_projections(self) -> t.List[exp.Expression]: 723 """ 724 T-SQL supports the syntax alias = expression in the SELECT's projection list, 725 so we transform all parsed Selects to convert their EQ projections into Aliases. 726 727 See: https://learn.microsoft.com/en-us/sql/t-sql/queries/select-clause-transact-sql?view=sql-server-ver16#syntax 728 """ 729 return [ 730 ( 731 exp.alias_(projection.expression, projection.this.this, copy=False) 732 if isinstance(projection, exp.EQ) and isinstance(projection.this, exp.Column) 733 else projection 734 ) 735 for projection in super()._parse_projections() 736 ] 737 738 def _parse_commit_or_rollback(self) -> exp.Commit | exp.Rollback: 739 """Applies to SQL Server and Azure SQL Database 740 COMMIT [ { TRAN | TRANSACTION } 741 [ transaction_name | @tran_name_variable ] ] 742 [ WITH ( DELAYED_DURABILITY = { OFF | ON } ) ] 743 744 ROLLBACK { TRAN | TRANSACTION } 745 [ transaction_name | @tran_name_variable 746 | savepoint_name | @savepoint_variable ] 747 """ 748 rollback = self._prev.token_type == TokenType.ROLLBACK 749 750 self._match_texts(("TRAN", "TRANSACTION")) 751 this = self._parse_id_var() 752 753 if rollback: 754 return self.expression(exp.Rollback, this=this) 755 756 durability = None 757 if self._match_pair(TokenType.WITH, TokenType.L_PAREN): 758 self._match_text_seq("DELAYED_DURABILITY") 759 self._match(TokenType.EQ) 760 761 if self._match_text_seq("OFF"): 762 durability = False 763 else: 764 self._match(TokenType.ON) 765 durability = True 766 767 self._match_r_paren() 768 769 return self.expression(exp.Commit, this=this, durability=durability) 770 771 def _parse_transaction(self) -> exp.Transaction | exp.Command: 772 """Applies to SQL Server and Azure SQL Database 773 BEGIN { TRAN | TRANSACTION } 774 [ { transaction_name | @tran_name_variable } 775 [ WITH MARK [ 'description' ] ] 776 ] 777 """ 778 if self._match_texts(("TRAN", "TRANSACTION")): 779 transaction = self.expression(exp.Transaction, this=self._parse_id_var()) 780 if self._match_text_seq("WITH", "MARK"): 781 transaction.set("mark", self._parse_string()) 782 783 return transaction 784 785 return self._parse_as_command(self._prev) 786 787 def _parse_returns(self) -> exp.ReturnsProperty: 788 table = self._parse_id_var(any_token=False, tokens=self.RETURNS_TABLE_TOKENS) 789 returns = super()._parse_returns() 790 returns.set("table", table) 791 return returns 792 793 def _parse_convert( 794 self, strict: bool, safe: t.Optional[bool] = None 795 ) -> t.Optional[exp.Expression]: 796 this = self._parse_types() 797 self._match(TokenType.COMMA) 798 args = [this, *self._parse_csv(self._parse_assignment)] 799 convert = exp.Convert.from_arg_list(args) 800 convert.set("safe", safe) 801 convert.set("strict", strict) 802 return convert 803 804 def _parse_column_def( 805 self, this: t.Optional[exp.Expression], computed_column: bool = True 806 ) -> t.Optional[exp.Expression]: 807 this = super()._parse_column_def(this=this, computed_column=computed_column) 808 if not this: 809 return None 810 if self._match(TokenType.EQ): 811 this.set("default", self._parse_disjunction()) 812 if self._match_texts(self.COLUMN_DEFINITION_MODES): 813 this.set("output", self._prev.text) 814 return this 815 816 def _parse_user_defined_function( 817 self, kind: t.Optional[TokenType] = None 818 ) -> t.Optional[exp.Expression]: 819 this = super()._parse_user_defined_function(kind=kind) 820 821 if ( 822 kind == TokenType.FUNCTION 823 or isinstance(this, exp.UserDefinedFunction) 824 or self._match(TokenType.ALIAS, advance=False) 825 ): 826 return this 827 828 if not self._match(TokenType.WITH, advance=False): 829 expressions = self._parse_csv(self._parse_function_parameter) 830 else: 831 expressions = None 832 833 return self.expression(exp.UserDefinedFunction, this=this, expressions=expressions) 834 835 def _parse_into(self) -> t.Optional[exp.Into]: 836 into = super()._parse_into() 837 838 table = isinstance(into, exp.Into) and into.find(exp.Table) 839 if isinstance(table, exp.Table): 840 table_identifier = table.this 841 if table_identifier.args.get("temporary"): 842 # Promote the temporary property from the Identifier to the Into expression 843 t.cast(exp.Into, into).set("temporary", True) 844 845 return into 846 847 def _parse_id_var( 848 self, 849 any_token: bool = True, 850 tokens: t.Optional[t.Collection[TokenType]] = None, 851 ) -> t.Optional[exp.Expression]: 852 is_temporary = self._match(TokenType.HASH) 853 is_global = is_temporary and self._match(TokenType.HASH) 854 855 this = super()._parse_id_var(any_token=any_token, tokens=tokens) 856 if this: 857 if is_global: 858 this.set("global", True) 859 elif is_temporary: 860 this.set("temporary", True) 861 862 return this 863 864 def _parse_create(self) -> exp.Create | exp.Command: 865 create = super()._parse_create() 866 867 if isinstance(create, exp.Create): 868 table = create.this.this if isinstance(create.this, exp.Schema) else create.this 869 if isinstance(table, exp.Table) and table.this and table.this.args.get("temporary"): 870 if not create.args.get("properties"): 871 create.set("properties", exp.Properties(expressions=[])) 872 873 create.args["properties"].append("expressions", exp.TemporaryProperty()) 874 875 return create 876 877 def _parse_if(self) -> t.Optional[exp.Expression]: 878 index = self._index 879 880 if self._match_text_seq("OBJECT_ID"): 881 self._parse_wrapped_csv(self._parse_string) 882 if self._match_text_seq("IS", "NOT", "NULL") and self._match(TokenType.DROP): 883 return self._parse_drop(exists=True) 884 self._retreat(index) 885 886 return super()._parse_if() 887 888 def _parse_unique(self) -> exp.UniqueColumnConstraint: 889 if self._match_texts(("CLUSTERED", "NONCLUSTERED")): 890 this = self.CONSTRAINT_PARSERS[self._prev.text.upper()](self) 891 else: 892 this = self._parse_schema(self._parse_id_var(any_token=False)) 893 894 return self.expression(exp.UniqueColumnConstraint, this=this) 895 896 def _parse_partition(self) -> t.Optional[exp.Partition]: 897 if not self._match_text_seq("WITH", "(", "PARTITIONS"): 898 return None 899 900 def parse_range(): 901 low = self._parse_bitwise() 902 high = self._parse_bitwise() if self._match_text_seq("TO") else None 903 904 return ( 905 self.expression(exp.PartitionRange, this=low, expression=high) if high else low 906 ) 907 908 partition = self.expression( 909 exp.Partition, expressions=self._parse_wrapped_csv(parse_range) 910 ) 911 912 self._match_r_paren() 913 914 return partition 915 916 def _parse_declareitem(self) -> t.Optional[exp.DeclareItem]: 917 var = self._parse_id_var() 918 if not var: 919 return None 920 921 self._match(TokenType.ALIAS) 922 return self.expression( 923 exp.DeclareItem, 924 this=var, 925 kind=self._parse_schema() if self._match(TokenType.TABLE) else self._parse_types(), 926 default=self._match(TokenType.EQ) and self._parse_bitwise(), 927 ) 928 929 def _parse_alter_table_alter(self) -> t.Optional[exp.Expression]: 930 expression = super()._parse_alter_table_alter() 931 932 if expression is not None: 933 collation = expression.args.get("collate") 934 if isinstance(collation, exp.Column) and isinstance(collation.this, exp.Identifier): 935 identifier = collation.this 936 collation.set("this", exp.Var(this=identifier.name)) 937 938 return expression
Parser consumes a list of tokens produced by the Tokenizer and produces a parsed syntax tree.
Arguments:
- error_level: The desired error level. Default: ErrorLevel.IMMEDIATE
- error_message_context: The amount of context to capture from a query string when displaying the error message (in number of characters). Default: 100
- max_errors: Maximum number of error messages to include in a raised ParseError. This is only relevant if error_level is ErrorLevel.RAISE. Default: 3
Inherited Members
- sqlglot.parser.Parser
- Parser
- NO_PAREN_FUNCTIONS
- STRUCT_TYPE_TOKENS
- NESTED_TYPE_TOKENS
- ENUM_TYPE_TOKENS
- AGGREGATE_TYPE_TOKENS
- TYPE_TOKENS
- SIGNED_TO_UNSIGNED_TYPE_TOKEN
- SUBQUERY_PREDICATES
- RESERVED_TOKENS
- DB_CREATABLES
- CREATABLES
- ALTERABLES
- COLON_PLACEHOLDER_TOKENS
- ARRAY_CONSTRUCTORS
- TRIM_TYPES
- FUNC_TOKENS
- CONJUNCTION
- ASSIGNMENT
- DISJUNCTION
- EQUALITY
- COMPARISON
- BITWISE
- TERM
- FACTOR
- EXPONENT
- TIMES
- TIMESTAMPS
- SET_OPERATIONS
- JOIN_METHODS
- JOIN_SIDES
- JOIN_KINDS
- LAMBDAS
- CAST_COLUMN_OPERATORS
- EXPRESSION_PARSERS
- UNARY_PARSERS
- STRING_PARSERS
- NUMERIC_PARSERS
- PRIMARY_PARSERS
- PLACEHOLDER_PARSERS
- PIPE_SYNTAX_TRANSFORM_PARSERS
- PROPERTY_PARSERS
- CONSTRAINT_PARSERS
- ALTER_PARSERS
- ALTER_ALTER_PARSERS
- SCHEMA_UNNAMED_CONSTRAINTS
- INVALID_FUNC_NAME_TOKENS
- FUNCTIONS_WITH_ALIASED_ARGS
- KEY_VALUE_DEFINITIONS
- FUNCTION_PARSERS
- QUERY_MODIFIER_TOKENS
- SET_PARSERS
- SHOW_PARSERS
- TYPE_LITERAL_PARSERS
- TYPE_CONVERTERS
- DDL_SELECT_TOKENS
- PRE_VOLATILE_TOKENS
- TRANSACTION_KIND
- TRANSACTION_CHARACTERISTICS
- CONFLICT_ACTIONS
- CREATE_SEQUENCE
- ISOLATED_LOADING_OPTIONS
- USABLES
- CAST_ACTIONS
- SCHEMA_BINDING_OPTIONS
- EXECUTE_AS_OPTIONS
- KEY_CONSTRAINT_OPTIONS
- WINDOW_EXCLUDE_OPTIONS
- INSERT_ALTERNATIVES
- CLONE_KEYWORDS
- HISTORICAL_DATA_PREFIX
- HISTORICAL_DATA_KIND
- OPCLASS_FOLLOW_KEYWORDS
- OPTYPE_FOLLOW_TOKENS
- TABLE_INDEX_HINT_TOKENS
- VIEW_ATTRIBUTES
- WINDOW_ALIAS_TOKENS
- WINDOW_BEFORE_PAREN_TOKENS
- WINDOW_SIDES
- JSON_KEY_VALUE_SEPARATOR_TOKENS
- FETCH_TOKENS
- ADD_CONSTRAINT_TOKENS
- DISTINCT_TOKENS
- NULL_TOKENS
- UNNEST_OFFSET_ALIAS_TOKENS
- SELECT_START_TOKENS
- COPY_INTO_VARLEN_OPTIONS
- IS_JSON_PREDICATE_KIND
- ODBC_DATETIME_LITERALS
- ON_CONDITION_TOKENS
- PRIVILEGE_FOLLOW_TOKENS
- DESCRIBE_STYLES
- ANALYZE_STYLES
- ANALYZE_EXPRESSION_PARSERS
- PARTITION_KEYWORDS
- AMBIGUOUS_ALIAS_TOKENS
- OPERATION_MODIFIERS
- RECURSIVE_CTE_SEARCH_KIND
- MODIFIABLES
- STRICT_CAST
- PREFIXED_PIVOT_COLUMNS
- IDENTIFY_PIVOT_STRINGS
- TABLESAMPLE_CSV
- DEFAULT_SAMPLING_METHOD
- TRIM_PATTERN_FIRST
- MODIFIERS_ATTACHED_TO_SET_OP
- JSON_ARROWS_REQUIRE_JSON_TYPE
- COLON_IS_VARIANT_EXTRACT
- VALUES_FOLLOWED_BY_PAREN
- SUPPORTS_IMPLICIT_UNNEST
- INTERVAL_SPANS
- SUPPORTS_PARTITION_SELECTION
- WRAPPED_TRANSFORM_COLUMN_CONSTRAINT
- OPTIONAL_ALIAS_TOKEN_CTE
- ALTER_RENAME_REQUIRES_COLUMN
- JOINS_HAVE_EQUAL_PRECEDENCE
- ZONE_AWARE_TIMESTAMP_CONSTRUCTOR
- MAP_KEYS_ARE_ARBITRARY_EXPRESSIONS
- JSON_EXTRACT_REQUIRES_JSON_EXPRESSION
- error_level
- error_message_context
- max_errors
- dialect
- reset
- parse
- parse_into
- check_errors
- raise_error
- expression
- validate_expression
- parse_set_operation
- build_cast
- errors
- sql
940 class Generator(generator.Generator): 941 LIMIT_IS_TOP = True 942 QUERY_HINTS = False 943 RETURNING_END = False 944 NVL2_SUPPORTED = False 945 ALTER_TABLE_INCLUDE_COLUMN_KEYWORD = False 946 LIMIT_FETCH = "FETCH" 947 COMPUTED_COLUMN_WITH_TYPE = False 948 CTE_RECURSIVE_KEYWORD_REQUIRED = False 949 ENSURE_BOOLS = True 950 NULL_ORDERING_SUPPORTED = None 951 SUPPORTS_SINGLE_ARG_CONCAT = False 952 TABLESAMPLE_SEED_KEYWORD = "REPEATABLE" 953 SUPPORTS_SELECT_INTO = True 954 JSON_PATH_BRACKETED_KEY_SUPPORTED = False 955 SUPPORTS_TO_NUMBER = False 956 SET_OP_MODIFIERS = False 957 COPY_PARAMS_EQ_REQUIRED = True 958 PARSE_JSON_NAME = None 959 EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE = False 960 ALTER_SET_WRAPPED = True 961 ALTER_SET_TYPE = "" 962 963 EXPRESSIONS_WITHOUT_NESTED_CTES = { 964 exp.Create, 965 exp.Delete, 966 exp.Insert, 967 exp.Intersect, 968 exp.Except, 969 exp.Merge, 970 exp.Select, 971 exp.Subquery, 972 exp.Union, 973 exp.Update, 974 } 975 976 SUPPORTED_JSON_PATH_PARTS = { 977 exp.JSONPathKey, 978 exp.JSONPathRoot, 979 exp.JSONPathSubscript, 980 } 981 982 TYPE_MAPPING = { 983 **generator.Generator.TYPE_MAPPING, 984 exp.DataType.Type.BOOLEAN: "BIT", 985 exp.DataType.Type.DATETIME2: "DATETIME2", 986 exp.DataType.Type.DECIMAL: "NUMERIC", 987 exp.DataType.Type.DOUBLE: "FLOAT", 988 exp.DataType.Type.INT: "INTEGER", 989 exp.DataType.Type.ROWVERSION: "ROWVERSION", 990 exp.DataType.Type.TEXT: "VARCHAR(MAX)", 991 exp.DataType.Type.TIMESTAMP: "DATETIME2", 992 exp.DataType.Type.TIMESTAMPNTZ: "DATETIME2", 993 exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET", 994 exp.DataType.Type.SMALLDATETIME: "SMALLDATETIME", 995 exp.DataType.Type.UTINYINT: "TINYINT", 996 exp.DataType.Type.VARIANT: "SQL_VARIANT", 997 exp.DataType.Type.UUID: "UNIQUEIDENTIFIER", 998 } 999 1000 TYPE_MAPPING.pop(exp.DataType.Type.NCHAR) 1001 TYPE_MAPPING.pop(exp.DataType.Type.NVARCHAR) 1002 1003 TRANSFORMS = { 1004 **generator.Generator.TRANSFORMS, 1005 exp.AnyValue: any_value_to_max_sql, 1006 exp.ArrayToString: rename_func("STRING_AGG"), 1007 exp.AutoIncrementColumnConstraint: lambda *_: "IDENTITY", 1008 exp.Chr: rename_func("CHAR"), 1009 exp.DateAdd: date_delta_sql("DATEADD"), 1010 exp.DateDiff: date_delta_sql("DATEDIFF"), 1011 exp.CTE: transforms.preprocess([qualify_derived_table_outputs]), 1012 exp.CurrentDate: rename_func("GETDATE"), 1013 exp.CurrentTimestamp: rename_func("GETDATE"), 1014 exp.CurrentTimestampLTZ: rename_func("SYSDATETIMEOFFSET"), 1015 exp.DateStrToDate: datestrtodate_sql, 1016 exp.Extract: rename_func("DATEPART"), 1017 exp.GeneratedAsIdentityColumnConstraint: generatedasidentitycolumnconstraint_sql, 1018 exp.GroupConcat: _string_agg_sql, 1019 exp.If: rename_func("IIF"), 1020 exp.JSONExtract: _json_extract_sql, 1021 exp.JSONExtractScalar: _json_extract_sql, 1022 exp.LastDay: lambda self, e: self.func("EOMONTH", e.this), 1023 exp.Ln: rename_func("LOG"), 1024 exp.Max: max_or_greatest, 1025 exp.MD5: lambda self, e: self.func("HASHBYTES", exp.Literal.string("MD5"), e.this), 1026 exp.Min: min_or_least, 1027 exp.NumberToStr: _format_sql, 1028 exp.Repeat: rename_func("REPLICATE"), 1029 exp.CurrentSchema: rename_func("SCHEMA_NAME"), 1030 exp.Select: transforms.preprocess( 1031 [ 1032 transforms.eliminate_distinct_on, 1033 transforms.eliminate_semi_and_anti_joins, 1034 transforms.eliminate_qualify, 1035 transforms.unnest_generate_date_array_using_recursive_cte, 1036 ] 1037 ), 1038 exp.Stddev: rename_func("STDEV"), 1039 exp.StrPosition: lambda self, e: strposition_sql( 1040 self, e, func_name="CHARINDEX", supports_position=True 1041 ), 1042 exp.Subquery: transforms.preprocess([qualify_derived_table_outputs]), 1043 exp.SHA: lambda self, e: self.func("HASHBYTES", exp.Literal.string("SHA1"), e.this), 1044 exp.SHA2: lambda self, e: self.func( 1045 "HASHBYTES", exp.Literal.string(f"SHA2_{e.args.get('length', 256)}"), e.this 1046 ), 1047 exp.TemporaryProperty: lambda self, e: "", 1048 exp.TimeStrToTime: _timestrtotime_sql, 1049 exp.TimeToStr: _format_sql, 1050 exp.Trim: trim_sql, 1051 exp.TsOrDsAdd: date_delta_sql("DATEADD", cast=True), 1052 exp.TsOrDsDiff: date_delta_sql("DATEDIFF"), 1053 exp.TimestampTrunc: lambda self, e: self.func("DATETRUNC", e.unit, e.this), 1054 exp.Uuid: lambda *_: "NEWID()", 1055 exp.DateFromParts: rename_func("DATEFROMPARTS"), 1056 } 1057 1058 TRANSFORMS.pop(exp.ReturnsProperty) 1059 1060 PROPERTIES_LOCATION = { 1061 **generator.Generator.PROPERTIES_LOCATION, 1062 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 1063 } 1064 1065 def scope_resolution(self, rhs: str, scope_name: str) -> str: 1066 return f"{scope_name}::{rhs}" 1067 1068 def select_sql(self, expression: exp.Select) -> str: 1069 limit = expression.args.get("limit") 1070 offset = expression.args.get("offset") 1071 1072 if isinstance(limit, exp.Fetch) and not offset: 1073 # Dialects like Oracle can FETCH directly from a row set but 1074 # T-SQL requires an ORDER BY + OFFSET clause in order to FETCH 1075 offset = exp.Offset(expression=exp.Literal.number(0)) 1076 expression.set("offset", offset) 1077 1078 if offset: 1079 if not expression.args.get("order"): 1080 # ORDER BY is required in order to use OFFSET in a query, so we use 1081 # a noop order by, since we don't really care about the order. 1082 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 1083 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 1084 1085 if isinstance(limit, exp.Limit): 1086 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 1087 # we replace here because otherwise TOP would be generated in select_sql 1088 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 1089 1090 return super().select_sql(expression) 1091 1092 def convert_sql(self, expression: exp.Convert) -> str: 1093 name = "TRY_CONVERT" if expression.args.get("safe") else "CONVERT" 1094 return self.func( 1095 name, expression.this, expression.expression, expression.args.get("style") 1096 ) 1097 1098 def queryoption_sql(self, expression: exp.QueryOption) -> str: 1099 option = self.sql(expression, "this") 1100 value = self.sql(expression, "expression") 1101 if value: 1102 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 1103 return f"{option} {optional_equal_sign}{value}" 1104 return option 1105 1106 def lateral_op(self, expression: exp.Lateral) -> str: 1107 cross_apply = expression.args.get("cross_apply") 1108 if cross_apply is True: 1109 return "CROSS APPLY" 1110 if cross_apply is False: 1111 return "OUTER APPLY" 1112 1113 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 1114 self.unsupported("LATERAL clause is not supported.") 1115 return "LATERAL" 1116 1117 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 1118 this = expression.this 1119 split_count = len(this.name.split(".")) 1120 delimiter = expression.args.get("delimiter") 1121 part_index = expression.args.get("part_index") 1122 1123 if ( 1124 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 1125 or (delimiter and delimiter.name != ".") 1126 or not part_index 1127 or split_count > 4 1128 ): 1129 self.unsupported( 1130 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 1131 ) 1132 return "" 1133 1134 return self.func( 1135 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 1136 ) 1137 1138 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1139 nano = expression.args.get("nano") 1140 if nano is not None: 1141 nano.pop() 1142 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1143 1144 if expression.args.get("fractions") is None: 1145 expression.set("fractions", exp.Literal.number(0)) 1146 if expression.args.get("precision") is None: 1147 expression.set("precision", exp.Literal.number(0)) 1148 1149 return rename_func("TIMEFROMPARTS")(self, expression) 1150 1151 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1152 zone = expression.args.get("zone") 1153 if zone is not None: 1154 zone.pop() 1155 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1156 1157 nano = expression.args.get("nano") 1158 if nano is not None: 1159 nano.pop() 1160 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1161 1162 if expression.args.get("milli") is None: 1163 expression.set("milli", exp.Literal.number(0)) 1164 1165 return rename_func("DATETIMEFROMPARTS")(self, expression) 1166 1167 def setitem_sql(self, expression: exp.SetItem) -> str: 1168 this = expression.this 1169 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1170 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1171 return f"{self.sql(this.left)} {self.sql(this.right)}" 1172 1173 return super().setitem_sql(expression) 1174 1175 def boolean_sql(self, expression: exp.Boolean) -> str: 1176 if type(expression.parent) in BIT_TYPES or isinstance( 1177 expression.find_ancestor(exp.Values, exp.Select), exp.Values 1178 ): 1179 return "1" if expression.this else "0" 1180 1181 return "(1 = 1)" if expression.this else "(1 = 0)" 1182 1183 def is_sql(self, expression: exp.Is) -> str: 1184 if isinstance(expression.expression, exp.Boolean): 1185 return self.binary(expression, "=") 1186 return self.binary(expression, "IS") 1187 1188 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1189 sql = self.sql(expression, "this") 1190 properties = expression.args.get("properties") 1191 1192 if sql[:1] != "#" and any( 1193 isinstance(prop, exp.TemporaryProperty) 1194 for prop in (properties.expressions if properties else []) 1195 ): 1196 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1197 1198 return sql 1199 1200 def create_sql(self, expression: exp.Create) -> str: 1201 kind = expression.kind 1202 exists = expression.args.pop("exists", None) 1203 1204 like_property = expression.find(exp.LikeProperty) 1205 if like_property: 1206 ctas_expression = like_property.this 1207 else: 1208 ctas_expression = expression.expression 1209 1210 if kind == "VIEW": 1211 expression.this.set("catalog", None) 1212 with_ = expression.args.get("with") 1213 if ctas_expression and with_: 1214 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1215 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1216 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1217 ctas_expression.set("with", with_.pop()) 1218 1219 table = expression.find(exp.Table) 1220 1221 # Convert CTAS statement to SELECT .. INTO .. 1222 if kind == "TABLE" and ctas_expression: 1223 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1224 ctas_expression = ctas_expression.subquery() 1225 1226 properties = expression.args.get("properties") or exp.Properties() 1227 is_temp = any(isinstance(p, exp.TemporaryProperty) for p in properties.expressions) 1228 1229 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1230 select_into.set("into", exp.Into(this=table, temporary=is_temp)) 1231 1232 if like_property: 1233 select_into.limit(0, copy=False) 1234 1235 sql = self.sql(select_into) 1236 else: 1237 sql = super().create_sql(expression) 1238 1239 if exists: 1240 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1241 sql_with_ctes = self.prepend_ctes(expression, sql) 1242 sql_literal = self.sql(exp.Literal.string(sql_with_ctes)) 1243 if kind == "SCHEMA": 1244 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = {identifier}) EXEC({sql_literal})""" 1245 elif kind == "TABLE": 1246 assert table 1247 where = exp.and_( 1248 exp.column("TABLE_NAME").eq(table.name), 1249 exp.column("TABLE_SCHEMA").eq(table.db) if table.db else None, 1250 exp.column("TABLE_CATALOG").eq(table.catalog) if table.catalog else None, 1251 ) 1252 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE {where}) EXEC({sql_literal})""" 1253 elif kind == "INDEX": 1254 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1255 return f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql_literal})""" 1256 elif expression.args.get("replace"): 1257 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1258 1259 return self.prepend_ctes(expression, sql) 1260 1261 @generator.unsupported_args("unlogged", "expressions") 1262 def into_sql(self, expression: exp.Into) -> str: 1263 if expression.args.get("temporary"): 1264 # If the Into expression has a temporary property, push this down to the Identifier 1265 table = expression.find(exp.Table) 1266 if table and isinstance(table.this, exp.Identifier): 1267 table.this.set("temporary", True) 1268 1269 return f"{self.seg('INTO')} {self.sql(expression, 'this')}" 1270 1271 def count_sql(self, expression: exp.Count) -> str: 1272 func_name = "COUNT_BIG" if expression.args.get("big_int") else "COUNT" 1273 return rename_func(func_name)(self, expression) 1274 1275 def offset_sql(self, expression: exp.Offset) -> str: 1276 return f"{super().offset_sql(expression)} ROWS" 1277 1278 def version_sql(self, expression: exp.Version) -> str: 1279 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1280 this = f"FOR {name}" 1281 expr = expression.expression 1282 kind = expression.text("kind") 1283 if kind in ("FROM", "BETWEEN"): 1284 args = expr.expressions 1285 sep = "TO" if kind == "FROM" else "AND" 1286 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1287 else: 1288 expr_sql = self.sql(expr) 1289 1290 expr_sql = f" {expr_sql}" if expr_sql else "" 1291 return f"{this} {kind}{expr_sql}" 1292 1293 def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str: 1294 table = expression.args.get("table") 1295 table = f"{table} " if table else "" 1296 return f"RETURNS {table}{self.sql(expression, 'this')}" 1297 1298 def returning_sql(self, expression: exp.Returning) -> str: 1299 into = self.sql(expression, "into") 1300 into = self.seg(f"INTO {into}") if into else "" 1301 return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}" 1302 1303 def transaction_sql(self, expression: exp.Transaction) -> str: 1304 this = self.sql(expression, "this") 1305 this = f" {this}" if this else "" 1306 mark = self.sql(expression, "mark") 1307 mark = f" WITH MARK {mark}" if mark else "" 1308 return f"BEGIN TRANSACTION{this}{mark}" 1309 1310 def commit_sql(self, expression: exp.Commit) -> str: 1311 this = self.sql(expression, "this") 1312 this = f" {this}" if this else "" 1313 durability = expression.args.get("durability") 1314 durability = ( 1315 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1316 if durability is not None 1317 else "" 1318 ) 1319 return f"COMMIT TRANSACTION{this}{durability}" 1320 1321 def rollback_sql(self, expression: exp.Rollback) -> str: 1322 this = self.sql(expression, "this") 1323 this = f" {this}" if this else "" 1324 return f"ROLLBACK TRANSACTION{this}" 1325 1326 def identifier_sql(self, expression: exp.Identifier) -> str: 1327 identifier = super().identifier_sql(expression) 1328 1329 if expression.args.get("global"): 1330 identifier = f"##{identifier}" 1331 elif expression.args.get("temporary"): 1332 identifier = f"#{identifier}" 1333 1334 return identifier 1335 1336 def constraint_sql(self, expression: exp.Constraint) -> str: 1337 this = self.sql(expression, "this") 1338 expressions = self.expressions(expression, flat=True, sep=" ") 1339 return f"CONSTRAINT {this} {expressions}" 1340 1341 def length_sql(self, expression: exp.Length) -> str: 1342 return self._uncast_text(expression, "LEN") 1343 1344 def right_sql(self, expression: exp.Right) -> str: 1345 return self._uncast_text(expression, "RIGHT") 1346 1347 def left_sql(self, expression: exp.Left) -> str: 1348 return self._uncast_text(expression, "LEFT") 1349 1350 def _uncast_text(self, expression: exp.Expression, name: str) -> str: 1351 this = expression.this 1352 if isinstance(this, exp.Cast) and this.is_type(exp.DataType.Type.TEXT): 1353 this_sql = self.sql(this, "this") 1354 else: 1355 this_sql = self.sql(this) 1356 expression_sql = self.sql(expression, "expression") 1357 return self.func(name, this_sql, expression_sql if expression_sql else None) 1358 1359 def partition_sql(self, expression: exp.Partition) -> str: 1360 return f"WITH (PARTITIONS({self.expressions(expression, flat=True)}))" 1361 1362 def alter_sql(self, expression: exp.Alter) -> str: 1363 action = seq_get(expression.args.get("actions") or [], 0) 1364 if isinstance(action, exp.AlterRename): 1365 return f"EXEC sp_rename '{self.sql(expression.this)}', '{action.this.name}'" 1366 return super().alter_sql(expression) 1367 1368 def drop_sql(self, expression: exp.Drop) -> str: 1369 if expression.args["kind"] == "VIEW": 1370 expression.this.set("catalog", None) 1371 return super().drop_sql(expression) 1372 1373 def options_modifier(self, expression: exp.Expression) -> str: 1374 options = self.expressions(expression, key="options") 1375 return f" OPTION{self.wrap(options)}" if options else "" 1376 1377 def dpipe_sql(self, expression: exp.DPipe) -> str: 1378 return self.sql( 1379 reduce(lambda x, y: exp.Add(this=x, expression=y), expression.flatten()) 1380 ) 1381 1382 def isascii_sql(self, expression: exp.IsAscii) -> str: 1383 return f"(PATINDEX(CONVERT(VARCHAR(MAX), 0x255b5e002d7f5d25) COLLATE Latin1_General_BIN, {self.sql(expression.this)}) = 0)" 1384 1385 def columndef_sql(self, expression: exp.ColumnDef, sep: str = " ") -> str: 1386 this = super().columndef_sql(expression, sep) 1387 default = self.sql(expression, "default") 1388 default = f" = {default}" if default else "" 1389 output = self.sql(expression, "output") 1390 output = f" {output}" if output else "" 1391 return f"{this}{default}{output}" 1392 1393 def coalesce_sql(self, expression: exp.Coalesce) -> str: 1394 func_name = "ISNULL" if expression.args.get("is_null") else "COALESCE" 1395 return rename_func(func_name)(self, expression)
Generator converts a given syntax tree to the corresponding SQL string.
Arguments:
- pretty: Whether to format the produced SQL string. Default: False.
- identify: Determines when an identifier should be quoted. Possible values are: False (default): Never quote, except in cases where it's mandatory by the dialect. True or 'always': Always quote. 'safe': Only quote identifiers that are case insensitive.
- normalize: Whether to normalize identifiers to lowercase. Default: False.
- pad: The pad size in a formatted string. For example, this affects the indentation of a projection in a query, relative to its nesting level. Default: 2.
- indent: The indentation size in a formatted string. For example, this affects the
indentation of subqueries and filters under a
WHERE
clause. Default: 2. - normalize_functions: How to normalize function names. Possible values are: "upper" or True (default): Convert names to uppercase. "lower": Convert names to lowercase. False: Disables function name normalization.
- unsupported_level: Determines the generator's behavior when it encounters unsupported expressions. Default ErrorLevel.WARN.
- max_unsupported: Maximum number of unsupported messages to include in a raised UnsupportedError. This is only relevant if unsupported_level is ErrorLevel.RAISE. Default: 3
- leading_comma: Whether the comma is leading or trailing in select expressions. This is only relevant when generating in pretty mode. Default: False
- max_text_width: The max number of characters in a segment before creating new lines in pretty mode. The default is on the smaller end because the length only represents a segment and not the true line length. Default: 80
- comments: Whether to preserve comments in the output SQL code. Default: True
1068 def select_sql(self, expression: exp.Select) -> str: 1069 limit = expression.args.get("limit") 1070 offset = expression.args.get("offset") 1071 1072 if isinstance(limit, exp.Fetch) and not offset: 1073 # Dialects like Oracle can FETCH directly from a row set but 1074 # T-SQL requires an ORDER BY + OFFSET clause in order to FETCH 1075 offset = exp.Offset(expression=exp.Literal.number(0)) 1076 expression.set("offset", offset) 1077 1078 if offset: 1079 if not expression.args.get("order"): 1080 # ORDER BY is required in order to use OFFSET in a query, so we use 1081 # a noop order by, since we don't really care about the order. 1082 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 1083 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 1084 1085 if isinstance(limit, exp.Limit): 1086 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 1087 # we replace here because otherwise TOP would be generated in select_sql 1088 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 1089 1090 return super().select_sql(expression)
1098 def queryoption_sql(self, expression: exp.QueryOption) -> str: 1099 option = self.sql(expression, "this") 1100 value = self.sql(expression, "expression") 1101 if value: 1102 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 1103 return f"{option} {optional_equal_sign}{value}" 1104 return option
1106 def lateral_op(self, expression: exp.Lateral) -> str: 1107 cross_apply = expression.args.get("cross_apply") 1108 if cross_apply is True: 1109 return "CROSS APPLY" 1110 if cross_apply is False: 1111 return "OUTER APPLY" 1112 1113 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 1114 self.unsupported("LATERAL clause is not supported.") 1115 return "LATERAL"
1117 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 1118 this = expression.this 1119 split_count = len(this.name.split(".")) 1120 delimiter = expression.args.get("delimiter") 1121 part_index = expression.args.get("part_index") 1122 1123 if ( 1124 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 1125 or (delimiter and delimiter.name != ".") 1126 or not part_index 1127 or split_count > 4 1128 ): 1129 self.unsupported( 1130 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 1131 ) 1132 return "" 1133 1134 return self.func( 1135 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 1136 )
1138 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1139 nano = expression.args.get("nano") 1140 if nano is not None: 1141 nano.pop() 1142 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1143 1144 if expression.args.get("fractions") is None: 1145 expression.set("fractions", exp.Literal.number(0)) 1146 if expression.args.get("precision") is None: 1147 expression.set("precision", exp.Literal.number(0)) 1148 1149 return rename_func("TIMEFROMPARTS")(self, expression)
1151 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1152 zone = expression.args.get("zone") 1153 if zone is not None: 1154 zone.pop() 1155 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1156 1157 nano = expression.args.get("nano") 1158 if nano is not None: 1159 nano.pop() 1160 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1161 1162 if expression.args.get("milli") is None: 1163 expression.set("milli", exp.Literal.number(0)) 1164 1165 return rename_func("DATETIMEFROMPARTS")(self, expression)
1167 def setitem_sql(self, expression: exp.SetItem) -> str: 1168 this = expression.this 1169 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1170 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1171 return f"{self.sql(this.left)} {self.sql(this.right)}" 1172 1173 return super().setitem_sql(expression)
1188 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1189 sql = self.sql(expression, "this") 1190 properties = expression.args.get("properties") 1191 1192 if sql[:1] != "#" and any( 1193 isinstance(prop, exp.TemporaryProperty) 1194 for prop in (properties.expressions if properties else []) 1195 ): 1196 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1197 1198 return sql
1200 def create_sql(self, expression: exp.Create) -> str: 1201 kind = expression.kind 1202 exists = expression.args.pop("exists", None) 1203 1204 like_property = expression.find(exp.LikeProperty) 1205 if like_property: 1206 ctas_expression = like_property.this 1207 else: 1208 ctas_expression = expression.expression 1209 1210 if kind == "VIEW": 1211 expression.this.set("catalog", None) 1212 with_ = expression.args.get("with") 1213 if ctas_expression and with_: 1214 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1215 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1216 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1217 ctas_expression.set("with", with_.pop()) 1218 1219 table = expression.find(exp.Table) 1220 1221 # Convert CTAS statement to SELECT .. INTO .. 1222 if kind == "TABLE" and ctas_expression: 1223 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1224 ctas_expression = ctas_expression.subquery() 1225 1226 properties = expression.args.get("properties") or exp.Properties() 1227 is_temp = any(isinstance(p, exp.TemporaryProperty) for p in properties.expressions) 1228 1229 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1230 select_into.set("into", exp.Into(this=table, temporary=is_temp)) 1231 1232 if like_property: 1233 select_into.limit(0, copy=False) 1234 1235 sql = self.sql(select_into) 1236 else: 1237 sql = super().create_sql(expression) 1238 1239 if exists: 1240 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1241 sql_with_ctes = self.prepend_ctes(expression, sql) 1242 sql_literal = self.sql(exp.Literal.string(sql_with_ctes)) 1243 if kind == "SCHEMA": 1244 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = {identifier}) EXEC({sql_literal})""" 1245 elif kind == "TABLE": 1246 assert table 1247 where = exp.and_( 1248 exp.column("TABLE_NAME").eq(table.name), 1249 exp.column("TABLE_SCHEMA").eq(table.db) if table.db else None, 1250 exp.column("TABLE_CATALOG").eq(table.catalog) if table.catalog else None, 1251 ) 1252 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE {where}) EXEC({sql_literal})""" 1253 elif kind == "INDEX": 1254 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1255 return f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql_literal})""" 1256 elif expression.args.get("replace"): 1257 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1258 1259 return self.prepend_ctes(expression, sql)
1261 @generator.unsupported_args("unlogged", "expressions") 1262 def into_sql(self, expression: exp.Into) -> str: 1263 if expression.args.get("temporary"): 1264 # If the Into expression has a temporary property, push this down to the Identifier 1265 table = expression.find(exp.Table) 1266 if table and isinstance(table.this, exp.Identifier): 1267 table.this.set("temporary", True) 1268 1269 return f"{self.seg('INTO')} {self.sql(expression, 'this')}"
1278 def version_sql(self, expression: exp.Version) -> str: 1279 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1280 this = f"FOR {name}" 1281 expr = expression.expression 1282 kind = expression.text("kind") 1283 if kind in ("FROM", "BETWEEN"): 1284 args = expr.expressions 1285 sep = "TO" if kind == "FROM" else "AND" 1286 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1287 else: 1288 expr_sql = self.sql(expr) 1289 1290 expr_sql = f" {expr_sql}" if expr_sql else "" 1291 return f"{this} {kind}{expr_sql}"
1310 def commit_sql(self, expression: exp.Commit) -> str: 1311 this = self.sql(expression, "this") 1312 this = f" {this}" if this else "" 1313 durability = expression.args.get("durability") 1314 durability = ( 1315 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1316 if durability is not None 1317 else "" 1318 ) 1319 return f"COMMIT TRANSACTION{this}{durability}"
1326 def identifier_sql(self, expression: exp.Identifier) -> str: 1327 identifier = super().identifier_sql(expression) 1328 1329 if expression.args.get("global"): 1330 identifier = f"##{identifier}" 1331 elif expression.args.get("temporary"): 1332 identifier = f"#{identifier}" 1333 1334 return identifier
1385 def columndef_sql(self, expression: exp.ColumnDef, sep: str = " ") -> str: 1386 this = super().columndef_sql(expression, sep) 1387 default = self.sql(expression, "default") 1388 default = f" = {default}" if default else "" 1389 output = self.sql(expression, "output") 1390 output = f" {output}" if output else "" 1391 return f"{this}{default}{output}"
Inherited Members
- sqlglot.generator.Generator
- Generator
- IGNORE_NULLS_IN_FUNC
- LOCKING_READS_SUPPORTED
- WRAP_DERIVED_VALUES
- CREATE_FUNCTION_RETURN_AS
- MATCHED_BY_SOURCE
- SINGLE_STRING_INTERVAL
- INTERVAL_ALLOWS_PLURAL_FORM
- LIMIT_ONLY_LITERALS
- RENAME_TABLE_WITH_DB
- GROUPINGS_SEP
- INDEX_ON
- JOIN_HINTS
- TABLE_HINTS
- QUERY_HINT_SEP
- IS_BOOL_ALLOWED
- DUPLICATE_KEY_UPDATE_WITH_SET
- EXTRACT_ALLOWS_QUOTES
- TZ_TO_WITH_TIME_ZONE
- VALUES_AS_TABLE
- UNNEST_WITH_ORDINALITY
- AGGREGATE_FILTER_SUPPORTED
- SEMI_ANTI_JOIN_WITH_SIDE
- SUPPORTS_TABLE_COPY
- TABLESAMPLE_REQUIRES_PARENS
- TABLESAMPLE_SIZE_IS_ROWS
- TABLESAMPLE_KEYWORDS
- TABLESAMPLE_WITH_METHOD
- COLLATE_IS_FUNC
- DATA_TYPE_SPECIFIERS_ALLOWED
- LAST_DAY_SUPPORTS_DATE_PART
- SUPPORTS_TABLE_ALIAS_COLUMNS
- UNPIVOT_ALIASES_ARE_IDENTIFIERS
- JSON_KEY_VALUE_PAIR_SEP
- INSERT_OVERWRITE
- SUPPORTS_UNLOGGED_TABLES
- SUPPORTS_CREATE_TABLE_LIKE
- LIKE_PROPERTY_INSIDE_SCHEMA
- MULTI_ARG_DISTINCT
- JSON_TYPE_REQUIRED_FOR_EXTRACTION
- JSON_PATH_SINGLE_QUOTE_ESCAPE
- CAN_IMPLEMENT_ARRAY_ANY
- SUPPORTS_WINDOW_EXCLUDE
- COPY_PARAMS_ARE_WRAPPED
- COPY_HAS_INTO_KEYWORD
- STAR_EXCEPT
- HEX_FUNC
- WITH_PROPERTIES_PREFIX
- QUOTE_JSON_PATH
- PAD_FILL_PATTERN_IS_REQUIRED
- SUPPORTS_EXPLODING_PROJECTIONS
- ARRAY_CONCAT_IS_VAR_LEN
- SUPPORTS_CONVERT_TIMEZONE
- SUPPORTS_MEDIAN
- SUPPORTS_UNIX_SECONDS
- NORMALIZE_EXTRACT_DATE_PARTS
- ARRAY_SIZE_NAME
- ARRAY_SIZE_DIM_REQUIRED
- SUPPORTS_BETWEEN_FLAGS
- SUPPORTS_LIKE_QUANTIFIERS
- TIME_PART_SINGULARS
- TOKEN_MAPPING
- STRUCT_DELIMITER
- PARAMETER_TOKEN
- NAMED_PLACEHOLDER_TOKEN
- EXPRESSION_PRECEDES_PROPERTIES_CREATABLES
- RESERVED_KEYWORDS
- WITH_SEPARATED_COMMENTS
- EXCLUDE_COMMENTS
- UNWRAPPED_INTERVAL_VALUES
- PARAMETERIZABLE_TEXT_TYPES
- RESPECT_IGNORE_NULLS_UNSUPPORTED_EXPRESSIONS
- SAFE_JSON_PATH_KEY_RE
- SENTINEL_LINE_BREAK
- pretty
- identify
- normalize
- pad
- unsupported_level
- max_unsupported
- leading_comma
- max_text_width
- comments
- dialect
- normalize_functions
- unsupported_messages
- generate
- preprocess
- unsupported
- sep
- seg
- sanitize_comment
- maybe_comment
- wrap
- no_identify
- normalize_func
- indent
- sql
- uncache_sql
- cache_sql
- characterset_sql
- column_parts
- column_sql
- columnposition_sql
- columnconstraint_sql
- computedcolumnconstraint_sql
- autoincrementcolumnconstraint_sql
- compresscolumnconstraint_sql
- generatedasidentitycolumnconstraint_sql
- generatedasrowcolumnconstraint_sql
- periodforsystemtimeconstraint_sql
- notnullcolumnconstraint_sql
- primarykeycolumnconstraint_sql
- uniquecolumnconstraint_sql
- sequenceproperties_sql
- clone_sql
- describe_sql
- heredoc_sql
- prepend_ctes
- with_sql
- cte_sql
- tablealias_sql
- bitstring_sql
- hexstring_sql
- bytestring_sql
- unicodestring_sql
- rawstring_sql
- datatypeparam_sql
- datatype_sql
- directory_sql
- delete_sql
- set_operation
- set_operations
- fetch_sql
- limitoptions_sql
- filter_sql
- hint_sql
- indexparameters_sql
- index_sql
- hex_sql
- lowerhex_sql
- inputoutputformat_sql
- national_sql
- properties_sql
- root_properties
- properties
- with_properties
- locate_properties
- property_name
- property_sql
- likeproperty_sql
- fallbackproperty_sql
- journalproperty_sql
- freespaceproperty_sql
- checksumproperty_sql
- mergeblockratioproperty_sql
- datablocksizeproperty_sql
- blockcompressionproperty_sql
- isolatedloadingproperty_sql
- partitionboundspec_sql
- partitionedofproperty_sql
- lockingproperty_sql
- withdataproperty_sql
- withsystemversioningproperty_sql
- insert_sql
- introducer_sql
- kill_sql
- pseudotype_sql
- objectidentifier_sql
- onconflict_sql
- rowformatdelimitedproperty_sql
- withtablehint_sql
- indextablehint_sql
- historicaldata_sql
- table_parts
- table_sql
- tablefromrows_sql
- tablesample_sql
- pivot_sql
- tuple_sql
- update_sql
- values_sql
- var_sql
- from_sql
- groupingsets_sql
- rollup_sql
- cube_sql
- group_sql
- having_sql
- connect_sql
- prior_sql
- join_sql
- lambda_sql
- lateral_sql
- limit_sql
- set_sql
- queryband_sql
- pragma_sql
- lock_sql
- literal_sql
- escape_str
- loaddata_sql
- null_sql
- order_sql
- withfill_sql
- cluster_sql
- distribute_sql
- sort_sql
- ordered_sql
- matchrecognizemeasure_sql
- matchrecognize_sql
- query_modifiers
- for_modifiers
- offset_limit_modifiers
- after_limit_modifiers
- schema_sql
- schema_columns_sql
- star_sql
- parameter_sql
- sessionparameter_sql
- placeholder_sql
- subquery_sql
- qualify_sql
- unnest_sql
- prewhere_sql
- where_sql
- window_sql
- partition_by_sql
- windowspec_sql
- withingroup_sql
- between_sql
- bracket_offset_expressions
- bracket_sql
- all_sql
- any_sql
- exists_sql
- case_sql
- nextvaluefor_sql
- extract_sql
- trim_sql
- convert_concat_args
- concat_sql
- concatws_sql
- check_sql
- foreignkey_sql
- primarykey_sql
- if_sql
- matchagainst_sql
- jsonkeyvalue_sql
- jsonpath_sql
- json_path_part
- formatjson_sql
- formatphrase_sql
- jsonobject_sql
- jsonobjectagg_sql
- jsonarray_sql
- jsonarrayagg_sql
- jsoncolumndef_sql
- jsonschema_sql
- jsontable_sql
- openjsoncolumndef_sql
- openjson_sql
- in_sql
- in_unnest_op
- interval_sql
- return_sql
- reference_sql
- anonymous_sql
- paren_sql
- neg_sql
- not_sql
- alias_sql
- pivotalias_sql
- aliases_sql
- atindex_sql
- attimezone_sql
- fromtimezone_sql
- add_sql
- and_sql
- or_sql
- xor_sql
- connector_sql
- bitwiseand_sql
- bitwiseleftshift_sql
- bitwisenot_sql
- bitwiseor_sql
- bitwiserightshift_sql
- bitwisexor_sql
- cast_sql
- currentdate_sql
- collate_sql
- command_sql
- comment_sql
- mergetreettlaction_sql
- mergetreettl_sql
- altercolumn_sql
- alterindex_sql
- alterdiststyle_sql
- altersortkey_sql
- alterrename_sql
- renamecolumn_sql
- alterset_sql
- add_column_sql
- droppartition_sql
- addconstraint_sql
- addpartition_sql
- distinct_sql
- ignorenulls_sql
- respectnulls_sql
- havingmax_sql
- intdiv_sql
- div_sql
- safedivide_sql
- overlaps_sql
- distance_sql
- dot_sql
- eq_sql
- propertyeq_sql
- escape_sql
- glob_sql
- gt_sql
- gte_sql
- like_sql
- ilike_sql
- similarto_sql
- lt_sql
- lte_sql
- mod_sql
- mul_sql
- neq_sql
- nullsafeeq_sql
- nullsafeneq_sql
- slice_sql
- sub_sql
- trycast_sql
- jsoncast_sql
- try_sql
- log_sql
- use_sql
- binary
- ceil_floor
- function_fallback_sql
- func
- format_args
- too_wide
- format_time
- expressions
- op_expressions
- naked_property
- tag_sql
- token_sql
- userdefinedfunction_sql
- joinhint_sql
- kwarg_sql
- when_sql
- whens_sql
- merge_sql
- tochar_sql
- tonumber_sql
- dictproperty_sql
- dictrange_sql
- dictsubproperty_sql
- duplicatekeyproperty_sql
- uniquekeyproperty_sql
- distributedbyproperty_sql
- oncluster_sql
- clusteredbyproperty_sql
- anyvalue_sql
- querytransform_sql
- indexconstraintoption_sql
- checkcolumnconstraint_sql
- indexcolumnconstraint_sql
- nvl2_sql
- comprehension_sql
- columnprefix_sql
- opclass_sql
- predict_sql
- generateembedding_sql
- featuresattime_sql
- vectorsearch_sql
- forin_sql
- refresh_sql
- toarray_sql
- tsordstotime_sql
- tsordstotimestamp_sql
- tsordstodatetime_sql
- tsordstodate_sql
- unixdate_sql
- lastday_sql
- dateadd_sql
- arrayany_sql
- struct_sql
- partitionrange_sql
- truncatetable_sql
- copyparameter_sql
- credentials_sql
- copy_sql
- semicolon_sql
- datadeletionproperty_sql
- maskingpolicycolumnconstraint_sql
- gapfill_sql
- scoperesolution_sql
- parsejson_sql
- rand_sql
- changes_sql
- pad_sql
- summarize_sql
- explodinggenerateseries_sql
- arrayconcat_sql
- converttimezone_sql
- json_sql
- jsonvalue_sql
- conditionalinsert_sql
- multitableinserts_sql
- oncondition_sql
- jsonextractquote_sql
- jsonexists_sql
- arrayagg_sql
- apply_sql
- grant_sql
- revoke_sql
- grantprivilege_sql
- grantprincipal_sql
- columns_sql
- overlay_sql
- todouble_sql
- string_sql
- median_sql
- overflowtruncatebehavior_sql
- unixseconds_sql
- arraysize_sql
- attach_sql
- detach_sql
- attachoption_sql
- watermarkcolumnconstraint_sql
- encodeproperty_sql
- includeproperty_sql
- xmlelement_sql
- xmlkeyvalueoption_sql
- partitionbyrangeproperty_sql
- partitionbyrangepropertydynamic_sql
- unpivotcolumns_sql
- analyzesample_sql
- analyzestatistics_sql
- analyzehistogram_sql
- analyzedelete_sql
- analyzelistchainedrows_sql
- analyzevalidate_sql
- analyze_sql
- xmltable_sql
- xmlnamespace_sql
- export_sql
- declare_sql
- declareitem_sql
- recursivewithsearch_sql
- parameterizedagg_sql
- anonymousaggfunc_sql
- combinedaggfunc_sql
- combinedparameterizedagg_sql
- show_sql
- get_put_sql
- translatecharacters_sql
- decodecase_sql
- semanticview_sql
- getextract_sql
- datefromunixdate_sql
- space_sql
- buildproperty_sql
- refreshtriggerproperty_sql