sqlglot.dialects.tsql
1from __future__ import annotations 2 3import datetime 4import re 5import typing as t 6from functools import reduce 7 8from sqlglot import exp, generator, parser, tokens, transforms 9from sqlglot.dialects.dialect import ( 10 Dialect, 11 NormalizationStrategy, 12 any_value_to_max_sql, 13 build_date_delta, 14 date_delta_sql, 15 datestrtodate_sql, 16 generatedasidentitycolumnconstraint_sql, 17 max_or_greatest, 18 min_or_least, 19 rename_func, 20 strposition_sql, 21 timestrtotime_sql, 22 trim_sql, 23) 24from sqlglot.helper import seq_get 25from sqlglot.parser import build_coalesce 26from sqlglot.time import format_time 27from sqlglot.tokens import TokenType 28 29if t.TYPE_CHECKING: 30 from sqlglot._typing import E 31 32FULL_FORMAT_TIME_MAPPING = { 33 "weekday": "%A", 34 "dw": "%A", 35 "w": "%A", 36 "month": "%B", 37 "mm": "%B", 38 "m": "%B", 39} 40 41DATE_DELTA_INTERVAL = { 42 "year": "year", 43 "yyyy": "year", 44 "yy": "year", 45 "quarter": "quarter", 46 "qq": "quarter", 47 "q": "quarter", 48 "month": "month", 49 "mm": "month", 50 "m": "month", 51 "week": "week", 52 "ww": "week", 53 "wk": "week", 54 "day": "day", 55 "dd": "day", 56 "d": "day", 57} 58 59 60DATE_FMT_RE = re.compile("([dD]{1,2})|([mM]{1,2})|([yY]{1,4})|([hH]{1,2})|([sS]{1,2})") 61 62# N = Numeric, C=Currency 63TRANSPILE_SAFE_NUMBER_FMT = {"N", "C"} 64 65DEFAULT_START_DATE = datetime.date(1900, 1, 1) 66 67BIT_TYPES = {exp.EQ, exp.NEQ, exp.Is, exp.In, exp.Select, exp.Alias} 68 69# Unsupported options: 70# - OPTIMIZE FOR ( @variable_name { UNKNOWN | = <literal_constant> } [ , ...n ] ) 71# - TABLE HINT 72OPTIONS: parser.OPTIONS_TYPE = { 73 **dict.fromkeys( 74 ( 75 "DISABLE_OPTIMIZED_PLAN_FORCING", 76 "FAST", 77 "IGNORE_NONCLUSTERED_COLUMNSTORE_INDEX", 78 "LABEL", 79 "MAXDOP", 80 "MAXRECURSION", 81 "MAX_GRANT_PERCENT", 82 "MIN_GRANT_PERCENT", 83 "NO_PERFORMANCE_SPOOL", 84 "QUERYTRACEON", 85 "RECOMPILE", 86 ), 87 tuple(), 88 ), 89 "CONCAT": ("UNION",), 90 "DISABLE": ("EXTERNALPUSHDOWN", "SCALEOUTEXECUTION"), 91 "EXPAND": ("VIEWS",), 92 "FORCE": ("EXTERNALPUSHDOWN", "ORDER", "SCALEOUTEXECUTION"), 93 "HASH": ("GROUP", "JOIN", "UNION"), 94 "KEEP": ("PLAN",), 95 "KEEPFIXED": ("PLAN",), 96 "LOOP": ("JOIN",), 97 "MERGE": ("JOIN", "UNION"), 98 "OPTIMIZE": (("FOR", "UNKNOWN"),), 99 "ORDER": ("GROUP",), 100 "PARAMETERIZATION": ("FORCED", "SIMPLE"), 101 "ROBUST": ("PLAN",), 102 "USE": ("PLAN",), 103} 104 105 106XML_OPTIONS: parser.OPTIONS_TYPE = { 107 **dict.fromkeys( 108 ( 109 "AUTO", 110 "EXPLICIT", 111 "TYPE", 112 ), 113 tuple(), 114 ), 115 "ELEMENTS": ( 116 "XSINIL", 117 "ABSENT", 118 ), 119 "BINARY": ("BASE64",), 120} 121 122 123OPTIONS_THAT_REQUIRE_EQUAL = ("MAX_GRANT_PERCENT", "MIN_GRANT_PERCENT", "LABEL") 124 125 126def _build_formatted_time( 127 exp_class: t.Type[E], full_format_mapping: t.Optional[bool] = None 128) -> t.Callable[[t.List], E]: 129 def _builder(args: t.List) -> E: 130 fmt = seq_get(args, 0) 131 if isinstance(fmt, exp.Expression): 132 fmt = exp.Literal.string( 133 format_time( 134 fmt.name.lower(), 135 ( 136 {**TSQL.TIME_MAPPING, **FULL_FORMAT_TIME_MAPPING} 137 if full_format_mapping 138 else TSQL.TIME_MAPPING 139 ), 140 ) 141 ) 142 143 this = seq_get(args, 1) 144 if isinstance(this, exp.Expression): 145 this = exp.cast(this, exp.DataType.Type.DATETIME2) 146 147 return exp_class(this=this, format=fmt) 148 149 return _builder 150 151 152def _build_format(args: t.List) -> exp.NumberToStr | exp.TimeToStr: 153 this = seq_get(args, 0) 154 fmt = seq_get(args, 1) 155 culture = seq_get(args, 2) 156 157 number_fmt = fmt and (fmt.name in TRANSPILE_SAFE_NUMBER_FMT or not DATE_FMT_RE.search(fmt.name)) 158 159 if number_fmt: 160 return exp.NumberToStr(this=this, format=fmt, culture=culture) 161 162 if fmt: 163 fmt = exp.Literal.string( 164 format_time(fmt.name, TSQL.FORMAT_TIME_MAPPING) 165 if len(fmt.name) == 1 166 else format_time(fmt.name, TSQL.TIME_MAPPING) 167 ) 168 169 return exp.TimeToStr(this=this, format=fmt, culture=culture) 170 171 172def _build_eomonth(args: t.List) -> exp.LastDay: 173 date = exp.TsOrDsToDate(this=seq_get(args, 0)) 174 month_lag = seq_get(args, 1) 175 176 if month_lag is None: 177 this: exp.Expression = date 178 else: 179 unit = DATE_DELTA_INTERVAL.get("month") 180 this = exp.DateAdd(this=date, expression=month_lag, unit=unit and exp.var(unit)) 181 182 return exp.LastDay(this=this) 183 184 185def _build_hashbytes(args: t.List) -> exp.Expression: 186 kind, data = args 187 kind = kind.name.upper() if kind.is_string else "" 188 189 if kind == "MD5": 190 args.pop(0) 191 return exp.MD5(this=data) 192 if kind in ("SHA", "SHA1"): 193 args.pop(0) 194 return exp.SHA(this=data) 195 if kind == "SHA2_256": 196 return exp.SHA2(this=data, length=exp.Literal.number(256)) 197 if kind == "SHA2_512": 198 return exp.SHA2(this=data, length=exp.Literal.number(512)) 199 200 return exp.func("HASHBYTES", *args) 201 202 203DATEPART_ONLY_FORMATS = {"DW", "WK", "HOUR", "QUARTER"} 204 205 206def _format_sql(self: TSQL.Generator, expression: exp.NumberToStr | exp.TimeToStr) -> str: 207 fmt = expression.args["format"] 208 209 if not isinstance(expression, exp.NumberToStr): 210 if fmt.is_string: 211 mapped_fmt = format_time(fmt.name, TSQL.INVERSE_TIME_MAPPING) 212 213 name = (mapped_fmt or "").upper() 214 if name in DATEPART_ONLY_FORMATS: 215 return self.func("DATEPART", name, expression.this) 216 217 fmt_sql = self.sql(exp.Literal.string(mapped_fmt)) 218 else: 219 fmt_sql = self.format_time(expression) or self.sql(fmt) 220 else: 221 fmt_sql = self.sql(fmt) 222 223 return self.func("FORMAT", expression.this, fmt_sql, expression.args.get("culture")) 224 225 226def _string_agg_sql(self: TSQL.Generator, expression: exp.GroupConcat) -> str: 227 this = expression.this 228 distinct = expression.find(exp.Distinct) 229 if distinct: 230 # exp.Distinct can appear below an exp.Order or an exp.GroupConcat expression 231 self.unsupported("T-SQL STRING_AGG doesn't support DISTINCT.") 232 this = distinct.pop().expressions[0] 233 234 order = "" 235 if isinstance(expression.this, exp.Order): 236 if expression.this.this: 237 this = expression.this.this.pop() 238 # Order has a leading space 239 order = f" WITHIN GROUP ({self.sql(expression.this)[1:]})" 240 241 separator = expression.args.get("separator") or exp.Literal.string(",") 242 return f"STRING_AGG({self.format_args(this, separator)}){order}" 243 244 245def _build_date_delta( 246 exp_class: t.Type[E], unit_mapping: t.Optional[t.Dict[str, str]] = None 247) -> t.Callable[[t.List], E]: 248 def _builder(args: t.List) -> E: 249 unit = seq_get(args, 0) 250 if unit and unit_mapping: 251 unit = exp.var(unit_mapping.get(unit.name.lower(), unit.name)) 252 253 start_date = seq_get(args, 1) 254 if start_date and start_date.is_number: 255 # Numeric types are valid DATETIME values 256 if start_date.is_int: 257 adds = DEFAULT_START_DATE + datetime.timedelta(days=start_date.to_py()) 258 start_date = exp.Literal.string(adds.strftime("%F")) 259 else: 260 # We currently don't handle float values, i.e. they're not converted to equivalent DATETIMEs. 261 # This is not a problem when generating T-SQL code, it is when transpiling to other dialects. 262 return exp_class(this=seq_get(args, 2), expression=start_date, unit=unit) 263 264 return exp_class( 265 this=exp.TimeStrToTime(this=seq_get(args, 2)), 266 expression=exp.TimeStrToTime(this=start_date), 267 unit=unit, 268 ) 269 270 return _builder 271 272 273def qualify_derived_table_outputs(expression: exp.Expression) -> exp.Expression: 274 """Ensures all (unnamed) output columns are aliased for CTEs and Subqueries.""" 275 alias = expression.args.get("alias") 276 277 if ( 278 isinstance(expression, (exp.CTE, exp.Subquery)) 279 and isinstance(alias, exp.TableAlias) 280 and not alias.columns 281 ): 282 from sqlglot.optimizer.qualify_columns import qualify_outputs 283 284 # We keep track of the unaliased column projection indexes instead of the expressions 285 # themselves, because the latter are going to be replaced by new nodes when the aliases 286 # are added and hence we won't be able to reach these newly added Alias parents 287 query = expression.this 288 unaliased_column_indexes = ( 289 i for i, c in enumerate(query.selects) if isinstance(c, exp.Column) and not c.alias 290 ) 291 292 qualify_outputs(query) 293 294 # Preserve the quoting information of columns for newly added Alias nodes 295 query_selects = query.selects 296 for select_index in unaliased_column_indexes: 297 alias = query_selects[select_index] 298 column = alias.this 299 if isinstance(column.this, exp.Identifier): 300 alias.args["alias"].set("quoted", column.this.quoted) 301 302 return expression 303 304 305# https://learn.microsoft.com/en-us/sql/t-sql/functions/datetimefromparts-transact-sql?view=sql-server-ver16#syntax 306def _build_datetimefromparts(args: t.List) -> exp.TimestampFromParts: 307 return exp.TimestampFromParts( 308 year=seq_get(args, 0), 309 month=seq_get(args, 1), 310 day=seq_get(args, 2), 311 hour=seq_get(args, 3), 312 min=seq_get(args, 4), 313 sec=seq_get(args, 5), 314 milli=seq_get(args, 6), 315 ) 316 317 318# https://learn.microsoft.com/en-us/sql/t-sql/functions/timefromparts-transact-sql?view=sql-server-ver16#syntax 319def _build_timefromparts(args: t.List) -> exp.TimeFromParts: 320 return exp.TimeFromParts( 321 hour=seq_get(args, 0), 322 min=seq_get(args, 1), 323 sec=seq_get(args, 2), 324 fractions=seq_get(args, 3), 325 precision=seq_get(args, 4), 326 ) 327 328 329def _build_with_arg_as_text( 330 klass: t.Type[exp.Expression], 331) -> t.Callable[[t.List[exp.Expression]], exp.Expression]: 332 def _parse(args: t.List[exp.Expression]) -> exp.Expression: 333 this = seq_get(args, 0) 334 335 if this and not this.is_string: 336 this = exp.cast(this, exp.DataType.Type.TEXT) 337 338 expression = seq_get(args, 1) 339 kwargs = {"this": this} 340 341 if expression: 342 kwargs["expression"] = expression 343 344 return klass(**kwargs) 345 346 return _parse 347 348 349# https://learn.microsoft.com/en-us/sql/t-sql/functions/parsename-transact-sql?view=sql-server-ver16 350def _build_parsename(args: t.List) -> exp.SplitPart | exp.Anonymous: 351 # PARSENAME(...) will be stored into exp.SplitPart if: 352 # - All args are literals 353 # - The part index (2nd arg) is <= 4 (max valid value, otherwise TSQL returns NULL) 354 if len(args) == 2 and all(isinstance(arg, exp.Literal) for arg in args): 355 this = args[0] 356 part_index = args[1] 357 split_count = len(this.name.split(".")) 358 if split_count <= 4: 359 return exp.SplitPart( 360 this=this, 361 delimiter=exp.Literal.string("."), 362 part_index=exp.Literal.number(split_count + 1 - part_index.to_py()), 363 ) 364 365 return exp.Anonymous(this="PARSENAME", expressions=args) 366 367 368def _build_json_query(args: t.List, dialect: Dialect) -> exp.JSONExtract: 369 if len(args) == 1: 370 # The default value for path is '$'. As a result, if you don't provide a 371 # value for path, JSON_QUERY returns the input expression. 372 args.append(exp.Literal.string("$")) 373 374 return parser.build_extract_json_with_path(exp.JSONExtract)(args, dialect) 375 376 377def _json_extract_sql( 378 self: TSQL.Generator, expression: exp.JSONExtract | exp.JSONExtractScalar 379) -> str: 380 json_query = self.func("JSON_QUERY", expression.this, expression.expression) 381 json_value = self.func("JSON_VALUE", expression.this, expression.expression) 382 return self.func("ISNULL", json_query, json_value) 383 384 385def _timestrtotime_sql(self: TSQL.Generator, expression: exp.TimeStrToTime): 386 sql = timestrtotime_sql(self, expression) 387 if expression.args.get("zone"): 388 # If there is a timezone, produce an expression like: 389 # CAST('2020-01-01 12:13:14-08:00' AS DATETIMEOFFSET) AT TIME ZONE 'UTC' 390 # If you dont have AT TIME ZONE 'UTC', wrapping that expression in another cast back to DATETIME2 just drops the timezone information 391 return self.sql(exp.AtTimeZone(this=sql, zone=exp.Literal.string("UTC"))) 392 return sql 393 394 395def _build_datetrunc(args: t.List) -> exp.TimestampTrunc: 396 unit = seq_get(args, 0) 397 this = seq_get(args, 1) 398 399 if this and this.is_string: 400 this = exp.cast(this, exp.DataType.Type.DATETIME2) 401 402 return exp.TimestampTrunc(this=this, unit=unit) 403 404 405class TSQL(Dialect): 406 SUPPORTS_SEMI_ANTI_JOIN = False 407 LOG_BASE_FIRST = False 408 TYPED_DIVISION = True 409 CONCAT_COALESCE = True 410 NORMALIZATION_STRATEGY = NormalizationStrategy.CASE_INSENSITIVE 411 ALTER_TABLE_ADD_REQUIRED_FOR_EACH_COLUMN = False 412 413 TIME_FORMAT = "'yyyy-mm-dd hh:mm:ss'" 414 415 TIME_MAPPING = { 416 "year": "%Y", 417 "dayofyear": "%j", 418 "day": "%d", 419 "dy": "%d", 420 "y": "%Y", 421 "week": "%W", 422 "ww": "%W", 423 "wk": "%W", 424 "hour": "%h", 425 "hh": "%I", 426 "minute": "%M", 427 "mi": "%M", 428 "n": "%M", 429 "second": "%S", 430 "ss": "%S", 431 "s": "%-S", 432 "millisecond": "%f", 433 "ms": "%f", 434 "weekday": "%w", 435 "dw": "%w", 436 "month": "%m", 437 "mm": "%M", 438 "m": "%-M", 439 "Y": "%Y", 440 "YYYY": "%Y", 441 "YY": "%y", 442 "MMMM": "%B", 443 "MMM": "%b", 444 "MM": "%m", 445 "M": "%-m", 446 "dddd": "%A", 447 "dd": "%d", 448 "d": "%-d", 449 "HH": "%H", 450 "H": "%-H", 451 "h": "%-I", 452 "ffffff": "%f", 453 "yyyy": "%Y", 454 "yy": "%y", 455 } 456 457 CONVERT_FORMAT_MAPPING = { 458 "0": "%b %d %Y %-I:%M%p", 459 "1": "%m/%d/%y", 460 "2": "%y.%m.%d", 461 "3": "%d/%m/%y", 462 "4": "%d.%m.%y", 463 "5": "%d-%m-%y", 464 "6": "%d %b %y", 465 "7": "%b %d, %y", 466 "8": "%H:%M:%S", 467 "9": "%b %d %Y %-I:%M:%S:%f%p", 468 "10": "mm-dd-yy", 469 "11": "yy/mm/dd", 470 "12": "yymmdd", 471 "13": "%d %b %Y %H:%M:ss:%f", 472 "14": "%H:%M:%S:%f", 473 "20": "%Y-%m-%d %H:%M:%S", 474 "21": "%Y-%m-%d %H:%M:%S.%f", 475 "22": "%m/%d/%y %-I:%M:%S %p", 476 "23": "%Y-%m-%d", 477 "24": "%H:%M:%S", 478 "25": "%Y-%m-%d %H:%M:%S.%f", 479 "100": "%b %d %Y %-I:%M%p", 480 "101": "%m/%d/%Y", 481 "102": "%Y.%m.%d", 482 "103": "%d/%m/%Y", 483 "104": "%d.%m.%Y", 484 "105": "%d-%m-%Y", 485 "106": "%d %b %Y", 486 "107": "%b %d, %Y", 487 "108": "%H:%M:%S", 488 "109": "%b %d %Y %-I:%M:%S:%f%p", 489 "110": "%m-%d-%Y", 490 "111": "%Y/%m/%d", 491 "112": "%Y%m%d", 492 "113": "%d %b %Y %H:%M:%S:%f", 493 "114": "%H:%M:%S:%f", 494 "120": "%Y-%m-%d %H:%M:%S", 495 "121": "%Y-%m-%d %H:%M:%S.%f", 496 "126": "%Y-%m-%dT%H:%M:%S.%f", 497 } 498 499 FORMAT_TIME_MAPPING = { 500 "y": "%B %Y", 501 "d": "%m/%d/%Y", 502 "H": "%-H", 503 "h": "%-I", 504 "s": "%Y-%m-%d %H:%M:%S", 505 "D": "%A,%B,%Y", 506 "f": "%A,%B,%Y %-I:%M %p", 507 "F": "%A,%B,%Y %-I:%M:%S %p", 508 "g": "%m/%d/%Y %-I:%M %p", 509 "G": "%m/%d/%Y %-I:%M:%S %p", 510 "M": "%B %-d", 511 "m": "%B %-d", 512 "O": "%Y-%m-%dT%H:%M:%S", 513 "u": "%Y-%M-%D %H:%M:%S%z", 514 "U": "%A, %B %D, %Y %H:%M:%S%z", 515 "T": "%-I:%M:%S %p", 516 "t": "%-I:%M", 517 "Y": "%a %Y", 518 } 519 520 class Tokenizer(tokens.Tokenizer): 521 IDENTIFIERS = [("[", "]"), '"'] 522 QUOTES = ["'", '"'] 523 HEX_STRINGS = [("0x", ""), ("0X", "")] 524 VAR_SINGLE_TOKENS = {"@", "$", "#"} 525 526 KEYWORDS = { 527 **tokens.Tokenizer.KEYWORDS, 528 "CLUSTERED INDEX": TokenType.INDEX, 529 "DATETIME2": TokenType.DATETIME2, 530 "DATETIMEOFFSET": TokenType.TIMESTAMPTZ, 531 "DECLARE": TokenType.DECLARE, 532 "EXEC": TokenType.COMMAND, 533 "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT, 534 "GO": TokenType.COMMAND, 535 "IMAGE": TokenType.IMAGE, 536 "MONEY": TokenType.MONEY, 537 "NONCLUSTERED INDEX": TokenType.INDEX, 538 "NTEXT": TokenType.TEXT, 539 "OPTION": TokenType.OPTION, 540 "OUTPUT": TokenType.RETURNING, 541 "PRINT": TokenType.COMMAND, 542 "PROC": TokenType.PROCEDURE, 543 "REAL": TokenType.FLOAT, 544 "ROWVERSION": TokenType.ROWVERSION, 545 "SMALLDATETIME": TokenType.SMALLDATETIME, 546 "SMALLMONEY": TokenType.SMALLMONEY, 547 "SQL_VARIANT": TokenType.VARIANT, 548 "SYSTEM_USER": TokenType.CURRENT_USER, 549 "TOP": TokenType.TOP, 550 "TIMESTAMP": TokenType.ROWVERSION, 551 "TINYINT": TokenType.UTINYINT, 552 "UNIQUEIDENTIFIER": TokenType.UUID, 553 "UPDATE STATISTICS": TokenType.COMMAND, 554 "XML": TokenType.XML, 555 } 556 KEYWORDS.pop("/*+") 557 558 COMMANDS = {*tokens.Tokenizer.COMMANDS, TokenType.END} 559 560 class Parser(parser.Parser): 561 SET_REQUIRES_ASSIGNMENT_DELIMITER = False 562 LOG_DEFAULTS_TO_LN = True 563 STRING_ALIASES = True 564 NO_PAREN_IF_COMMANDS = False 565 566 QUERY_MODIFIER_PARSERS = { 567 **parser.Parser.QUERY_MODIFIER_PARSERS, 568 TokenType.OPTION: lambda self: ("options", self._parse_options()), 569 TokenType.FOR: lambda self: ("for", self._parse_for()), 570 } 571 572 # T-SQL does not allow BEGIN to be used as an identifier 573 ID_VAR_TOKENS = parser.Parser.ID_VAR_TOKENS - {TokenType.BEGIN} 574 ALIAS_TOKENS = parser.Parser.ALIAS_TOKENS - {TokenType.BEGIN} 575 TABLE_ALIAS_TOKENS = parser.Parser.TABLE_ALIAS_TOKENS - {TokenType.BEGIN} 576 COMMENT_TABLE_ALIAS_TOKENS = parser.Parser.COMMENT_TABLE_ALIAS_TOKENS - {TokenType.BEGIN} 577 UPDATE_ALIAS_TOKENS = parser.Parser.UPDATE_ALIAS_TOKENS - {TokenType.BEGIN} 578 579 FUNCTIONS = { 580 **parser.Parser.FUNCTIONS, 581 "CHARINDEX": lambda args: exp.StrPosition( 582 this=seq_get(args, 1), 583 substr=seq_get(args, 0), 584 position=seq_get(args, 2), 585 ), 586 "COUNT": lambda args: exp.Count( 587 this=seq_get(args, 0), expressions=args[1:], big_int=False 588 ), 589 "COUNT_BIG": lambda args: exp.Count( 590 this=seq_get(args, 0), expressions=args[1:], big_int=True 591 ), 592 "DATEADD": build_date_delta(exp.DateAdd, unit_mapping=DATE_DELTA_INTERVAL), 593 "DATEDIFF": _build_date_delta(exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL), 594 "DATENAME": _build_formatted_time(exp.TimeToStr, full_format_mapping=True), 595 "DATEPART": _build_formatted_time(exp.TimeToStr), 596 "DATETIMEFROMPARTS": _build_datetimefromparts, 597 "EOMONTH": _build_eomonth, 598 "FORMAT": _build_format, 599 "GETDATE": exp.CurrentTimestamp.from_arg_list, 600 "HASHBYTES": _build_hashbytes, 601 "ISNULL": lambda args: build_coalesce(args=args, is_null=True), 602 "JSON_QUERY": _build_json_query, 603 "JSON_VALUE": parser.build_extract_json_with_path(exp.JSONExtractScalar), 604 "LEN": _build_with_arg_as_text(exp.Length), 605 "LEFT": _build_with_arg_as_text(exp.Left), 606 "NEWID": exp.Uuid.from_arg_list, 607 "RIGHT": _build_with_arg_as_text(exp.Right), 608 "PARSENAME": _build_parsename, 609 "REPLICATE": exp.Repeat.from_arg_list, 610 "SCHEMA_NAME": exp.CurrentSchema.from_arg_list, 611 "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)), 612 "SYSDATETIME": exp.CurrentTimestamp.from_arg_list, 613 "SUSER_NAME": exp.CurrentUser.from_arg_list, 614 "SUSER_SNAME": exp.CurrentUser.from_arg_list, 615 "SYSDATETIMEOFFSET": exp.CurrentTimestampLTZ.from_arg_list, 616 "SYSTEM_USER": exp.CurrentUser.from_arg_list, 617 "TIMEFROMPARTS": _build_timefromparts, 618 "DATETRUNC": _build_datetrunc, 619 } 620 621 JOIN_HINTS = {"LOOP", "HASH", "MERGE", "REMOTE"} 622 623 PROCEDURE_OPTIONS = dict.fromkeys( 624 ("ENCRYPTION", "RECOMPILE", "SCHEMABINDING", "NATIVE_COMPILATION", "EXECUTE"), tuple() 625 ) 626 627 COLUMN_DEFINITION_MODES = {"OUT", "OUTPUT", "READONLY"} 628 629 RETURNS_TABLE_TOKENS = parser.Parser.ID_VAR_TOKENS - { 630 TokenType.TABLE, 631 *parser.Parser.TYPE_TOKENS, 632 } 633 634 STATEMENT_PARSERS = { 635 **parser.Parser.STATEMENT_PARSERS, 636 TokenType.DECLARE: lambda self: self._parse_declare(), 637 } 638 639 RANGE_PARSERS = { 640 **parser.Parser.RANGE_PARSERS, 641 TokenType.DCOLON: lambda self, this: self.expression( 642 exp.ScopeResolution, 643 this=this, 644 expression=self._parse_function() or self._parse_var(any_token=True), 645 ), 646 } 647 648 NO_PAREN_FUNCTION_PARSERS = { 649 **parser.Parser.NO_PAREN_FUNCTION_PARSERS, 650 "NEXT": lambda self: self._parse_next_value_for(), 651 } 652 653 FUNCTION_PARSERS: t.Dict[str, t.Callable] = { 654 **parser.Parser.FUNCTION_PARSERS, 655 "JSON_ARRAYAGG": lambda self: self.expression( 656 exp.JSONArrayAgg, 657 this=self._parse_bitwise(), 658 order=self._parse_order(), 659 null_handling=self._parse_on_handling("NULL", "NULL", "ABSENT"), 660 ), 661 } 662 663 # The DCOLON (::) operator serves as a scope resolution (exp.ScopeResolution) operator in T-SQL 664 COLUMN_OPERATORS = { 665 **parser.Parser.COLUMN_OPERATORS, 666 TokenType.DCOLON: lambda self, this, to: self.expression(exp.Cast, this=this, to=to) 667 if isinstance(to, exp.DataType) and to.this != exp.DataType.Type.USERDEFINED 668 else self.expression(exp.ScopeResolution, this=this, expression=to), 669 } 670 671 SET_OP_MODIFIERS = {"offset"} 672 673 ODBC_DATETIME_LITERALS = { 674 "d": exp.Date, 675 "t": exp.Time, 676 "ts": exp.Timestamp, 677 } 678 679 def _parse_alter_table_set(self) -> exp.AlterSet: 680 return self._parse_wrapped(super()._parse_alter_table_set) 681 682 def _parse_wrapped_select(self, table: bool = False) -> t.Optional[exp.Expression]: 683 if self._match(TokenType.MERGE): 684 comments = self._prev_comments 685 merge = self._parse_merge() 686 merge.add_comments(comments, prepend=True) 687 return merge 688 689 return super()._parse_wrapped_select(table=table) 690 691 def _parse_dcolon(self) -> t.Optional[exp.Expression]: 692 # We want to use _parse_types() if the first token after :: is a known type, 693 # otherwise we could parse something like x::varchar(max) into a function 694 if self._match_set(self.TYPE_TOKENS, advance=False): 695 return self._parse_types() 696 697 return self._parse_function() or self._parse_types() 698 699 def _parse_options(self) -> t.Optional[t.List[exp.Expression]]: 700 if not self._match(TokenType.OPTION): 701 return None 702 703 def _parse_option() -> t.Optional[exp.Expression]: 704 option = self._parse_var_from_options(OPTIONS) 705 if not option: 706 return None 707 708 self._match(TokenType.EQ) 709 return self.expression( 710 exp.QueryOption, this=option, expression=self._parse_primary_or_var() 711 ) 712 713 return self._parse_wrapped_csv(_parse_option) 714 715 def _parse_xml_key_value_option(self) -> exp.XMLKeyValueOption: 716 this = self._parse_primary_or_var() 717 if self._match(TokenType.L_PAREN, advance=False): 718 expression = self._parse_wrapped(self._parse_string) 719 else: 720 expression = None 721 722 return exp.XMLKeyValueOption(this=this, expression=expression) 723 724 def _parse_for(self) -> t.Optional[t.List[exp.Expression]]: 725 if not self._match_pair(TokenType.FOR, TokenType.XML): 726 return None 727 728 def _parse_for_xml() -> t.Optional[exp.Expression]: 729 return self.expression( 730 exp.QueryOption, 731 this=self._parse_var_from_options(XML_OPTIONS, raise_unmatched=False) 732 or self._parse_xml_key_value_option(), 733 ) 734 735 return self._parse_csv(_parse_for_xml) 736 737 def _parse_projections(self) -> t.List[exp.Expression]: 738 """ 739 T-SQL supports the syntax alias = expression in the SELECT's projection list, 740 so we transform all parsed Selects to convert their EQ projections into Aliases. 741 742 See: https://learn.microsoft.com/en-us/sql/t-sql/queries/select-clause-transact-sql?view=sql-server-ver16#syntax 743 """ 744 return [ 745 ( 746 exp.alias_(projection.expression, projection.this.this, copy=False) 747 if isinstance(projection, exp.EQ) and isinstance(projection.this, exp.Column) 748 else projection 749 ) 750 for projection in super()._parse_projections() 751 ] 752 753 def _parse_commit_or_rollback(self) -> exp.Commit | exp.Rollback: 754 """Applies to SQL Server and Azure SQL Database 755 COMMIT [ { TRAN | TRANSACTION } 756 [ transaction_name | @tran_name_variable ] ] 757 [ WITH ( DELAYED_DURABILITY = { OFF | ON } ) ] 758 759 ROLLBACK { TRAN | TRANSACTION } 760 [ transaction_name | @tran_name_variable 761 | savepoint_name | @savepoint_variable ] 762 """ 763 rollback = self._prev.token_type == TokenType.ROLLBACK 764 765 self._match_texts(("TRAN", "TRANSACTION")) 766 this = self._parse_id_var() 767 768 if rollback: 769 return self.expression(exp.Rollback, this=this) 770 771 durability = None 772 if self._match_pair(TokenType.WITH, TokenType.L_PAREN): 773 self._match_text_seq("DELAYED_DURABILITY") 774 self._match(TokenType.EQ) 775 776 if self._match_text_seq("OFF"): 777 durability = False 778 else: 779 self._match(TokenType.ON) 780 durability = True 781 782 self._match_r_paren() 783 784 return self.expression(exp.Commit, this=this, durability=durability) 785 786 def _parse_transaction(self) -> exp.Transaction | exp.Command: 787 """Applies to SQL Server and Azure SQL Database 788 BEGIN { TRAN | TRANSACTION } 789 [ { transaction_name | @tran_name_variable } 790 [ WITH MARK [ 'description' ] ] 791 ] 792 """ 793 if self._match_texts(("TRAN", "TRANSACTION")): 794 transaction = self.expression(exp.Transaction, this=self._parse_id_var()) 795 if self._match_text_seq("WITH", "MARK"): 796 transaction.set("mark", self._parse_string()) 797 798 return transaction 799 800 return self._parse_as_command(self._prev) 801 802 def _parse_returns(self) -> exp.ReturnsProperty: 803 table = self._parse_id_var(any_token=False, tokens=self.RETURNS_TABLE_TOKENS) 804 returns = super()._parse_returns() 805 returns.set("table", table) 806 return returns 807 808 def _parse_convert( 809 self, strict: bool, safe: t.Optional[bool] = None 810 ) -> t.Optional[exp.Expression]: 811 this = self._parse_types() 812 self._match(TokenType.COMMA) 813 args = [this, *self._parse_csv(self._parse_assignment)] 814 convert = exp.Convert.from_arg_list(args) 815 convert.set("safe", safe) 816 convert.set("strict", strict) 817 return convert 818 819 def _parse_column_def( 820 self, this: t.Optional[exp.Expression], computed_column: bool = True 821 ) -> t.Optional[exp.Expression]: 822 this = super()._parse_column_def(this=this, computed_column=computed_column) 823 if not this: 824 return None 825 if self._match(TokenType.EQ): 826 this.set("default", self._parse_disjunction()) 827 if self._match_texts(self.COLUMN_DEFINITION_MODES): 828 this.set("output", self._prev.text) 829 return this 830 831 def _parse_user_defined_function( 832 self, kind: t.Optional[TokenType] = None 833 ) -> t.Optional[exp.Expression]: 834 this = super()._parse_user_defined_function(kind=kind) 835 836 if ( 837 kind == TokenType.FUNCTION 838 or isinstance(this, exp.UserDefinedFunction) 839 or self._match(TokenType.ALIAS, advance=False) 840 ): 841 return this 842 843 if not self._match(TokenType.WITH, advance=False): 844 expressions = self._parse_csv(self._parse_function_parameter) 845 else: 846 expressions = None 847 848 return self.expression(exp.UserDefinedFunction, this=this, expressions=expressions) 849 850 def _parse_into(self) -> t.Optional[exp.Into]: 851 into = super()._parse_into() 852 853 table = isinstance(into, exp.Into) and into.find(exp.Table) 854 if isinstance(table, exp.Table): 855 table_identifier = table.this 856 if table_identifier.args.get("temporary"): 857 # Promote the temporary property from the Identifier to the Into expression 858 t.cast(exp.Into, into).set("temporary", True) 859 860 return into 861 862 def _parse_id_var( 863 self, 864 any_token: bool = True, 865 tokens: t.Optional[t.Collection[TokenType]] = None, 866 ) -> t.Optional[exp.Expression]: 867 is_temporary = self._match(TokenType.HASH) 868 is_global = is_temporary and self._match(TokenType.HASH) 869 870 this = super()._parse_id_var(any_token=any_token, tokens=tokens) 871 if this: 872 if is_global: 873 this.set("global", True) 874 elif is_temporary: 875 this.set("temporary", True) 876 877 return this 878 879 def _parse_create(self) -> exp.Create | exp.Command: 880 create = super()._parse_create() 881 882 if isinstance(create, exp.Create): 883 table = create.this.this if isinstance(create.this, exp.Schema) else create.this 884 if isinstance(table, exp.Table) and table.this and table.this.args.get("temporary"): 885 if not create.args.get("properties"): 886 create.set("properties", exp.Properties(expressions=[])) 887 888 create.args["properties"].append("expressions", exp.TemporaryProperty()) 889 890 return create 891 892 def _parse_if(self) -> t.Optional[exp.Expression]: 893 index = self._index 894 895 if self._match_text_seq("OBJECT_ID"): 896 self._parse_wrapped_csv(self._parse_string) 897 if self._match_text_seq("IS", "NOT", "NULL") and self._match(TokenType.DROP): 898 return self._parse_drop(exists=True) 899 self._retreat(index) 900 901 return super()._parse_if() 902 903 def _parse_unique(self) -> exp.UniqueColumnConstraint: 904 if self._match_texts(("CLUSTERED", "NONCLUSTERED")): 905 this = self.CONSTRAINT_PARSERS[self._prev.text.upper()](self) 906 else: 907 this = self._parse_schema(self._parse_id_var(any_token=False)) 908 909 return self.expression(exp.UniqueColumnConstraint, this=this) 910 911 def _parse_update(self) -> exp.Update: 912 expression = super()._parse_update() 913 expression.set("options", self._parse_options()) 914 return expression 915 916 def _parse_partition(self) -> t.Optional[exp.Partition]: 917 if not self._match_text_seq("WITH", "(", "PARTITIONS"): 918 return None 919 920 def parse_range(): 921 low = self._parse_bitwise() 922 high = self._parse_bitwise() if self._match_text_seq("TO") else None 923 924 return ( 925 self.expression(exp.PartitionRange, this=low, expression=high) if high else low 926 ) 927 928 partition = self.expression( 929 exp.Partition, expressions=self._parse_wrapped_csv(parse_range) 930 ) 931 932 self._match_r_paren() 933 934 return partition 935 936 def _parse_declareitem(self) -> t.Optional[exp.DeclareItem]: 937 var = self._parse_id_var() 938 if not var: 939 return None 940 941 self._match(TokenType.ALIAS) 942 return self.expression( 943 exp.DeclareItem, 944 this=var, 945 kind=self._parse_schema() if self._match(TokenType.TABLE) else self._parse_types(), 946 default=self._match(TokenType.EQ) and self._parse_bitwise(), 947 ) 948 949 def _parse_alter_table_alter(self) -> t.Optional[exp.Expression]: 950 expression = super()._parse_alter_table_alter() 951 952 if expression is not None: 953 collation = expression.args.get("collate") 954 if isinstance(collation, exp.Column) and isinstance(collation.this, exp.Identifier): 955 identifier = collation.this 956 collation.set("this", exp.Var(this=identifier.name)) 957 958 return expression 959 960 class Generator(generator.Generator): 961 LIMIT_IS_TOP = True 962 QUERY_HINTS = False 963 RETURNING_END = False 964 NVL2_SUPPORTED = False 965 ALTER_TABLE_INCLUDE_COLUMN_KEYWORD = False 966 LIMIT_FETCH = "FETCH" 967 COMPUTED_COLUMN_WITH_TYPE = False 968 CTE_RECURSIVE_KEYWORD_REQUIRED = False 969 ENSURE_BOOLS = True 970 NULL_ORDERING_SUPPORTED = None 971 SUPPORTS_SINGLE_ARG_CONCAT = False 972 TABLESAMPLE_SEED_KEYWORD = "REPEATABLE" 973 SUPPORTS_SELECT_INTO = True 974 JSON_PATH_BRACKETED_KEY_SUPPORTED = False 975 SUPPORTS_TO_NUMBER = False 976 SET_OP_MODIFIERS = False 977 COPY_PARAMS_EQ_REQUIRED = True 978 PARSE_JSON_NAME = None 979 EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE = False 980 ALTER_SET_WRAPPED = True 981 ALTER_SET_TYPE = "" 982 983 EXPRESSIONS_WITHOUT_NESTED_CTES = { 984 exp.Create, 985 exp.Delete, 986 exp.Insert, 987 exp.Intersect, 988 exp.Except, 989 exp.Merge, 990 exp.Select, 991 exp.Subquery, 992 exp.Union, 993 exp.Update, 994 } 995 996 SUPPORTED_JSON_PATH_PARTS = { 997 exp.JSONPathKey, 998 exp.JSONPathRoot, 999 exp.JSONPathSubscript, 1000 } 1001 1002 TYPE_MAPPING = { 1003 **generator.Generator.TYPE_MAPPING, 1004 exp.DataType.Type.BOOLEAN: "BIT", 1005 exp.DataType.Type.DATETIME2: "DATETIME2", 1006 exp.DataType.Type.DECIMAL: "NUMERIC", 1007 exp.DataType.Type.DOUBLE: "FLOAT", 1008 exp.DataType.Type.INT: "INTEGER", 1009 exp.DataType.Type.ROWVERSION: "ROWVERSION", 1010 exp.DataType.Type.TEXT: "VARCHAR(MAX)", 1011 exp.DataType.Type.TIMESTAMP: "DATETIME2", 1012 exp.DataType.Type.TIMESTAMPNTZ: "DATETIME2", 1013 exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET", 1014 exp.DataType.Type.SMALLDATETIME: "SMALLDATETIME", 1015 exp.DataType.Type.UTINYINT: "TINYINT", 1016 exp.DataType.Type.VARIANT: "SQL_VARIANT", 1017 exp.DataType.Type.UUID: "UNIQUEIDENTIFIER", 1018 } 1019 1020 TYPE_MAPPING.pop(exp.DataType.Type.NCHAR) 1021 TYPE_MAPPING.pop(exp.DataType.Type.NVARCHAR) 1022 1023 TRANSFORMS = { 1024 **generator.Generator.TRANSFORMS, 1025 exp.AnyValue: any_value_to_max_sql, 1026 exp.ArrayToString: rename_func("STRING_AGG"), 1027 exp.AutoIncrementColumnConstraint: lambda *_: "IDENTITY", 1028 exp.Chr: rename_func("CHAR"), 1029 exp.DateAdd: date_delta_sql("DATEADD"), 1030 exp.DateDiff: date_delta_sql("DATEDIFF"), 1031 exp.CTE: transforms.preprocess([qualify_derived_table_outputs]), 1032 exp.CurrentDate: rename_func("GETDATE"), 1033 exp.CurrentTimestamp: rename_func("GETDATE"), 1034 exp.CurrentTimestampLTZ: rename_func("SYSDATETIMEOFFSET"), 1035 exp.DateStrToDate: datestrtodate_sql, 1036 exp.Extract: rename_func("DATEPART"), 1037 exp.GeneratedAsIdentityColumnConstraint: generatedasidentitycolumnconstraint_sql, 1038 exp.GroupConcat: _string_agg_sql, 1039 exp.If: rename_func("IIF"), 1040 exp.JSONExtract: _json_extract_sql, 1041 exp.JSONExtractScalar: _json_extract_sql, 1042 exp.LastDay: lambda self, e: self.func("EOMONTH", e.this), 1043 exp.Ln: rename_func("LOG"), 1044 exp.Max: max_or_greatest, 1045 exp.MD5: lambda self, e: self.func("HASHBYTES", exp.Literal.string("MD5"), e.this), 1046 exp.Min: min_or_least, 1047 exp.NumberToStr: _format_sql, 1048 exp.Repeat: rename_func("REPLICATE"), 1049 exp.CurrentSchema: rename_func("SCHEMA_NAME"), 1050 exp.Select: transforms.preprocess( 1051 [ 1052 transforms.eliminate_distinct_on, 1053 transforms.eliminate_semi_and_anti_joins, 1054 transforms.eliminate_qualify, 1055 transforms.unnest_generate_date_array_using_recursive_cte, 1056 ] 1057 ), 1058 exp.Stddev: rename_func("STDEV"), 1059 exp.StrPosition: lambda self, e: strposition_sql( 1060 self, e, func_name="CHARINDEX", supports_position=True 1061 ), 1062 exp.Subquery: transforms.preprocess([qualify_derived_table_outputs]), 1063 exp.SHA: lambda self, e: self.func("HASHBYTES", exp.Literal.string("SHA1"), e.this), 1064 exp.SHA2: lambda self, e: self.func( 1065 "HASHBYTES", exp.Literal.string(f"SHA2_{e.args.get('length', 256)}"), e.this 1066 ), 1067 exp.TemporaryProperty: lambda self, e: "", 1068 exp.TimeStrToTime: _timestrtotime_sql, 1069 exp.TimeToStr: _format_sql, 1070 exp.Trim: trim_sql, 1071 exp.TsOrDsAdd: date_delta_sql("DATEADD", cast=True), 1072 exp.TsOrDsDiff: date_delta_sql("DATEDIFF"), 1073 exp.TimestampTrunc: lambda self, e: self.func("DATETRUNC", e.unit, e.this), 1074 exp.Uuid: lambda *_: "NEWID()", 1075 exp.DateFromParts: rename_func("DATEFROMPARTS"), 1076 } 1077 1078 TRANSFORMS.pop(exp.ReturnsProperty) 1079 1080 PROPERTIES_LOCATION = { 1081 **generator.Generator.PROPERTIES_LOCATION, 1082 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 1083 } 1084 1085 def scope_resolution(self, rhs: str, scope_name: str) -> str: 1086 return f"{scope_name}::{rhs}" 1087 1088 def select_sql(self, expression: exp.Select) -> str: 1089 limit = expression.args.get("limit") 1090 offset = expression.args.get("offset") 1091 1092 if isinstance(limit, exp.Fetch) and not offset: 1093 # Dialects like Oracle can FETCH directly from a row set but 1094 # T-SQL requires an ORDER BY + OFFSET clause in order to FETCH 1095 offset = exp.Offset(expression=exp.Literal.number(0)) 1096 expression.set("offset", offset) 1097 1098 if offset: 1099 if not expression.args.get("order"): 1100 # ORDER BY is required in order to use OFFSET in a query, so we use 1101 # a noop order by, since we don't really care about the order. 1102 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 1103 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 1104 1105 if isinstance(limit, exp.Limit): 1106 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 1107 # we replace here because otherwise TOP would be generated in select_sql 1108 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 1109 1110 return super().select_sql(expression) 1111 1112 def convert_sql(self, expression: exp.Convert) -> str: 1113 name = "TRY_CONVERT" if expression.args.get("safe") else "CONVERT" 1114 return self.func( 1115 name, expression.this, expression.expression, expression.args.get("style") 1116 ) 1117 1118 def queryoption_sql(self, expression: exp.QueryOption) -> str: 1119 option = self.sql(expression, "this") 1120 value = self.sql(expression, "expression") 1121 if value: 1122 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 1123 return f"{option} {optional_equal_sign}{value}" 1124 return option 1125 1126 def lateral_op(self, expression: exp.Lateral) -> str: 1127 cross_apply = expression.args.get("cross_apply") 1128 if cross_apply is True: 1129 return "CROSS APPLY" 1130 if cross_apply is False: 1131 return "OUTER APPLY" 1132 1133 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 1134 self.unsupported("LATERAL clause is not supported.") 1135 return "LATERAL" 1136 1137 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 1138 this = expression.this 1139 split_count = len(this.name.split(".")) 1140 delimiter = expression.args.get("delimiter") 1141 part_index = expression.args.get("part_index") 1142 1143 if ( 1144 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 1145 or (delimiter and delimiter.name != ".") 1146 or not part_index 1147 or split_count > 4 1148 ): 1149 self.unsupported( 1150 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 1151 ) 1152 return "" 1153 1154 return self.func( 1155 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 1156 ) 1157 1158 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1159 nano = expression.args.get("nano") 1160 if nano is not None: 1161 nano.pop() 1162 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1163 1164 if expression.args.get("fractions") is None: 1165 expression.set("fractions", exp.Literal.number(0)) 1166 if expression.args.get("precision") is None: 1167 expression.set("precision", exp.Literal.number(0)) 1168 1169 return rename_func("TIMEFROMPARTS")(self, expression) 1170 1171 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1172 zone = expression.args.get("zone") 1173 if zone is not None: 1174 zone.pop() 1175 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1176 1177 nano = expression.args.get("nano") 1178 if nano is not None: 1179 nano.pop() 1180 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1181 1182 if expression.args.get("milli") is None: 1183 expression.set("milli", exp.Literal.number(0)) 1184 1185 return rename_func("DATETIMEFROMPARTS")(self, expression) 1186 1187 def setitem_sql(self, expression: exp.SetItem) -> str: 1188 this = expression.this 1189 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1190 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1191 return f"{self.sql(this.left)} {self.sql(this.right)}" 1192 1193 return super().setitem_sql(expression) 1194 1195 def boolean_sql(self, expression: exp.Boolean) -> str: 1196 if type(expression.parent) in BIT_TYPES or isinstance( 1197 expression.find_ancestor(exp.Values, exp.Select), exp.Values 1198 ): 1199 return "1" if expression.this else "0" 1200 1201 return "(1 = 1)" if expression.this else "(1 = 0)" 1202 1203 def is_sql(self, expression: exp.Is) -> str: 1204 if isinstance(expression.expression, exp.Boolean): 1205 return self.binary(expression, "=") 1206 return self.binary(expression, "IS") 1207 1208 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1209 sql = self.sql(expression, "this") 1210 properties = expression.args.get("properties") 1211 1212 if sql[:1] != "#" and any( 1213 isinstance(prop, exp.TemporaryProperty) 1214 for prop in (properties.expressions if properties else []) 1215 ): 1216 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1217 1218 return sql 1219 1220 def create_sql(self, expression: exp.Create) -> str: 1221 kind = expression.kind 1222 exists = expression.args.pop("exists", None) 1223 1224 like_property = expression.find(exp.LikeProperty) 1225 if like_property: 1226 ctas_expression = like_property.this 1227 else: 1228 ctas_expression = expression.expression 1229 1230 if kind == "VIEW": 1231 expression.this.set("catalog", None) 1232 with_ = expression.args.get("with") 1233 if ctas_expression and with_: 1234 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1235 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1236 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1237 ctas_expression.set("with", with_.pop()) 1238 1239 table = expression.find(exp.Table) 1240 1241 # Convert CTAS statement to SELECT .. INTO .. 1242 if kind == "TABLE" and ctas_expression: 1243 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1244 ctas_expression = ctas_expression.subquery() 1245 1246 properties = expression.args.get("properties") or exp.Properties() 1247 is_temp = any(isinstance(p, exp.TemporaryProperty) for p in properties.expressions) 1248 1249 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1250 select_into.set("into", exp.Into(this=table, temporary=is_temp)) 1251 1252 if like_property: 1253 select_into.limit(0, copy=False) 1254 1255 sql = self.sql(select_into) 1256 else: 1257 sql = super().create_sql(expression) 1258 1259 if exists: 1260 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1261 sql_with_ctes = self.prepend_ctes(expression, sql) 1262 sql_literal = self.sql(exp.Literal.string(sql_with_ctes)) 1263 if kind == "SCHEMA": 1264 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = {identifier}) EXEC({sql_literal})""" 1265 elif kind == "TABLE": 1266 assert table 1267 where = exp.and_( 1268 exp.column("TABLE_NAME").eq(table.name), 1269 exp.column("TABLE_SCHEMA").eq(table.db) if table.db else None, 1270 exp.column("TABLE_CATALOG").eq(table.catalog) if table.catalog else None, 1271 ) 1272 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE {where}) EXEC({sql_literal})""" 1273 elif kind == "INDEX": 1274 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1275 return f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql_literal})""" 1276 elif expression.args.get("replace"): 1277 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1278 1279 return self.prepend_ctes(expression, sql) 1280 1281 @generator.unsupported_args("unlogged", "expressions") 1282 def into_sql(self, expression: exp.Into) -> str: 1283 if expression.args.get("temporary"): 1284 # If the Into expression has a temporary property, push this down to the Identifier 1285 table = expression.find(exp.Table) 1286 if table and isinstance(table.this, exp.Identifier): 1287 table.this.set("temporary", True) 1288 1289 return f"{self.seg('INTO')} {self.sql(expression, 'this')}" 1290 1291 def count_sql(self, expression: exp.Count) -> str: 1292 func_name = "COUNT_BIG" if expression.args.get("big_int") else "COUNT" 1293 return rename_func(func_name)(self, expression) 1294 1295 def offset_sql(self, expression: exp.Offset) -> str: 1296 return f"{super().offset_sql(expression)} ROWS" 1297 1298 def version_sql(self, expression: exp.Version) -> str: 1299 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1300 this = f"FOR {name}" 1301 expr = expression.expression 1302 kind = expression.text("kind") 1303 if kind in ("FROM", "BETWEEN"): 1304 args = expr.expressions 1305 sep = "TO" if kind == "FROM" else "AND" 1306 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1307 else: 1308 expr_sql = self.sql(expr) 1309 1310 expr_sql = f" {expr_sql}" if expr_sql else "" 1311 return f"{this} {kind}{expr_sql}" 1312 1313 def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str: 1314 table = expression.args.get("table") 1315 table = f"{table} " if table else "" 1316 return f"RETURNS {table}{self.sql(expression, 'this')}" 1317 1318 def returning_sql(self, expression: exp.Returning) -> str: 1319 into = self.sql(expression, "into") 1320 into = self.seg(f"INTO {into}") if into else "" 1321 return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}" 1322 1323 def transaction_sql(self, expression: exp.Transaction) -> str: 1324 this = self.sql(expression, "this") 1325 this = f" {this}" if this else "" 1326 mark = self.sql(expression, "mark") 1327 mark = f" WITH MARK {mark}" if mark else "" 1328 return f"BEGIN TRANSACTION{this}{mark}" 1329 1330 def commit_sql(self, expression: exp.Commit) -> str: 1331 this = self.sql(expression, "this") 1332 this = f" {this}" if this else "" 1333 durability = expression.args.get("durability") 1334 durability = ( 1335 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1336 if durability is not None 1337 else "" 1338 ) 1339 return f"COMMIT TRANSACTION{this}{durability}" 1340 1341 def rollback_sql(self, expression: exp.Rollback) -> str: 1342 this = self.sql(expression, "this") 1343 this = f" {this}" if this else "" 1344 return f"ROLLBACK TRANSACTION{this}" 1345 1346 def identifier_sql(self, expression: exp.Identifier) -> str: 1347 identifier = super().identifier_sql(expression) 1348 1349 if expression.args.get("global"): 1350 identifier = f"##{identifier}" 1351 elif expression.args.get("temporary"): 1352 identifier = f"#{identifier}" 1353 1354 return identifier 1355 1356 def constraint_sql(self, expression: exp.Constraint) -> str: 1357 this = self.sql(expression, "this") 1358 expressions = self.expressions(expression, flat=True, sep=" ") 1359 return f"CONSTRAINT {this} {expressions}" 1360 1361 def length_sql(self, expression: exp.Length) -> str: 1362 return self._uncast_text(expression, "LEN") 1363 1364 def right_sql(self, expression: exp.Right) -> str: 1365 return self._uncast_text(expression, "RIGHT") 1366 1367 def left_sql(self, expression: exp.Left) -> str: 1368 return self._uncast_text(expression, "LEFT") 1369 1370 def _uncast_text(self, expression: exp.Expression, name: str) -> str: 1371 this = expression.this 1372 if isinstance(this, exp.Cast) and this.is_type(exp.DataType.Type.TEXT): 1373 this_sql = self.sql(this, "this") 1374 else: 1375 this_sql = self.sql(this) 1376 expression_sql = self.sql(expression, "expression") 1377 return self.func(name, this_sql, expression_sql if expression_sql else None) 1378 1379 def partition_sql(self, expression: exp.Partition) -> str: 1380 return f"WITH (PARTITIONS({self.expressions(expression, flat=True)}))" 1381 1382 def alter_sql(self, expression: exp.Alter) -> str: 1383 action = seq_get(expression.args.get("actions") or [], 0) 1384 if isinstance(action, exp.AlterRename): 1385 return f"EXEC sp_rename '{self.sql(expression.this)}', '{action.this.name}'" 1386 return super().alter_sql(expression) 1387 1388 def drop_sql(self, expression: exp.Drop) -> str: 1389 if expression.args["kind"] == "VIEW": 1390 expression.this.set("catalog", None) 1391 return super().drop_sql(expression) 1392 1393 def options_modifier(self, expression: exp.Expression) -> str: 1394 options = self.expressions(expression, key="options") 1395 return f" OPTION{self.wrap(options)}" if options else "" 1396 1397 def dpipe_sql(self, expression: exp.DPipe) -> str: 1398 return self.sql( 1399 reduce(lambda x, y: exp.Add(this=x, expression=y), expression.flatten()) 1400 ) 1401 1402 def isascii_sql(self, expression: exp.IsAscii) -> str: 1403 return f"(PATINDEX(CONVERT(VARCHAR(MAX), 0x255b5e002d7f5d25) COLLATE Latin1_General_BIN, {self.sql(expression.this)}) = 0)" 1404 1405 def columndef_sql(self, expression: exp.ColumnDef, sep: str = " ") -> str: 1406 this = super().columndef_sql(expression, sep) 1407 default = self.sql(expression, "default") 1408 default = f" = {default}" if default else "" 1409 output = self.sql(expression, "output") 1410 output = f" {output}" if output else "" 1411 return f"{this}{default}{output}" 1412 1413 def coalesce_sql(self, expression: exp.Coalesce) -> str: 1414 func_name = "ISNULL" if expression.args.get("is_null") else "COALESCE" 1415 return rename_func(func_name)(self, expression)
274def qualify_derived_table_outputs(expression: exp.Expression) -> exp.Expression: 275 """Ensures all (unnamed) output columns are aliased for CTEs and Subqueries.""" 276 alias = expression.args.get("alias") 277 278 if ( 279 isinstance(expression, (exp.CTE, exp.Subquery)) 280 and isinstance(alias, exp.TableAlias) 281 and not alias.columns 282 ): 283 from sqlglot.optimizer.qualify_columns import qualify_outputs 284 285 # We keep track of the unaliased column projection indexes instead of the expressions 286 # themselves, because the latter are going to be replaced by new nodes when the aliases 287 # are added and hence we won't be able to reach these newly added Alias parents 288 query = expression.this 289 unaliased_column_indexes = ( 290 i for i, c in enumerate(query.selects) if isinstance(c, exp.Column) and not c.alias 291 ) 292 293 qualify_outputs(query) 294 295 # Preserve the quoting information of columns for newly added Alias nodes 296 query_selects = query.selects 297 for select_index in unaliased_column_indexes: 298 alias = query_selects[select_index] 299 column = alias.this 300 if isinstance(column.this, exp.Identifier): 301 alias.args["alias"].set("quoted", column.this.quoted) 302 303 return expression
Ensures all (unnamed) output columns are aliased for CTEs and Subqueries.
406class TSQL(Dialect): 407 SUPPORTS_SEMI_ANTI_JOIN = False 408 LOG_BASE_FIRST = False 409 TYPED_DIVISION = True 410 CONCAT_COALESCE = True 411 NORMALIZATION_STRATEGY = NormalizationStrategy.CASE_INSENSITIVE 412 ALTER_TABLE_ADD_REQUIRED_FOR_EACH_COLUMN = False 413 414 TIME_FORMAT = "'yyyy-mm-dd hh:mm:ss'" 415 416 TIME_MAPPING = { 417 "year": "%Y", 418 "dayofyear": "%j", 419 "day": "%d", 420 "dy": "%d", 421 "y": "%Y", 422 "week": "%W", 423 "ww": "%W", 424 "wk": "%W", 425 "hour": "%h", 426 "hh": "%I", 427 "minute": "%M", 428 "mi": "%M", 429 "n": "%M", 430 "second": "%S", 431 "ss": "%S", 432 "s": "%-S", 433 "millisecond": "%f", 434 "ms": "%f", 435 "weekday": "%w", 436 "dw": "%w", 437 "month": "%m", 438 "mm": "%M", 439 "m": "%-M", 440 "Y": "%Y", 441 "YYYY": "%Y", 442 "YY": "%y", 443 "MMMM": "%B", 444 "MMM": "%b", 445 "MM": "%m", 446 "M": "%-m", 447 "dddd": "%A", 448 "dd": "%d", 449 "d": "%-d", 450 "HH": "%H", 451 "H": "%-H", 452 "h": "%-I", 453 "ffffff": "%f", 454 "yyyy": "%Y", 455 "yy": "%y", 456 } 457 458 CONVERT_FORMAT_MAPPING = { 459 "0": "%b %d %Y %-I:%M%p", 460 "1": "%m/%d/%y", 461 "2": "%y.%m.%d", 462 "3": "%d/%m/%y", 463 "4": "%d.%m.%y", 464 "5": "%d-%m-%y", 465 "6": "%d %b %y", 466 "7": "%b %d, %y", 467 "8": "%H:%M:%S", 468 "9": "%b %d %Y %-I:%M:%S:%f%p", 469 "10": "mm-dd-yy", 470 "11": "yy/mm/dd", 471 "12": "yymmdd", 472 "13": "%d %b %Y %H:%M:ss:%f", 473 "14": "%H:%M:%S:%f", 474 "20": "%Y-%m-%d %H:%M:%S", 475 "21": "%Y-%m-%d %H:%M:%S.%f", 476 "22": "%m/%d/%y %-I:%M:%S %p", 477 "23": "%Y-%m-%d", 478 "24": "%H:%M:%S", 479 "25": "%Y-%m-%d %H:%M:%S.%f", 480 "100": "%b %d %Y %-I:%M%p", 481 "101": "%m/%d/%Y", 482 "102": "%Y.%m.%d", 483 "103": "%d/%m/%Y", 484 "104": "%d.%m.%Y", 485 "105": "%d-%m-%Y", 486 "106": "%d %b %Y", 487 "107": "%b %d, %Y", 488 "108": "%H:%M:%S", 489 "109": "%b %d %Y %-I:%M:%S:%f%p", 490 "110": "%m-%d-%Y", 491 "111": "%Y/%m/%d", 492 "112": "%Y%m%d", 493 "113": "%d %b %Y %H:%M:%S:%f", 494 "114": "%H:%M:%S:%f", 495 "120": "%Y-%m-%d %H:%M:%S", 496 "121": "%Y-%m-%d %H:%M:%S.%f", 497 "126": "%Y-%m-%dT%H:%M:%S.%f", 498 } 499 500 FORMAT_TIME_MAPPING = { 501 "y": "%B %Y", 502 "d": "%m/%d/%Y", 503 "H": "%-H", 504 "h": "%-I", 505 "s": "%Y-%m-%d %H:%M:%S", 506 "D": "%A,%B,%Y", 507 "f": "%A,%B,%Y %-I:%M %p", 508 "F": "%A,%B,%Y %-I:%M:%S %p", 509 "g": "%m/%d/%Y %-I:%M %p", 510 "G": "%m/%d/%Y %-I:%M:%S %p", 511 "M": "%B %-d", 512 "m": "%B %-d", 513 "O": "%Y-%m-%dT%H:%M:%S", 514 "u": "%Y-%M-%D %H:%M:%S%z", 515 "U": "%A, %B %D, %Y %H:%M:%S%z", 516 "T": "%-I:%M:%S %p", 517 "t": "%-I:%M", 518 "Y": "%a %Y", 519 } 520 521 class Tokenizer(tokens.Tokenizer): 522 IDENTIFIERS = [("[", "]"), '"'] 523 QUOTES = ["'", '"'] 524 HEX_STRINGS = [("0x", ""), ("0X", "")] 525 VAR_SINGLE_TOKENS = {"@", "$", "#"} 526 527 KEYWORDS = { 528 **tokens.Tokenizer.KEYWORDS, 529 "CLUSTERED INDEX": TokenType.INDEX, 530 "DATETIME2": TokenType.DATETIME2, 531 "DATETIMEOFFSET": TokenType.TIMESTAMPTZ, 532 "DECLARE": TokenType.DECLARE, 533 "EXEC": TokenType.COMMAND, 534 "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT, 535 "GO": TokenType.COMMAND, 536 "IMAGE": TokenType.IMAGE, 537 "MONEY": TokenType.MONEY, 538 "NONCLUSTERED INDEX": TokenType.INDEX, 539 "NTEXT": TokenType.TEXT, 540 "OPTION": TokenType.OPTION, 541 "OUTPUT": TokenType.RETURNING, 542 "PRINT": TokenType.COMMAND, 543 "PROC": TokenType.PROCEDURE, 544 "REAL": TokenType.FLOAT, 545 "ROWVERSION": TokenType.ROWVERSION, 546 "SMALLDATETIME": TokenType.SMALLDATETIME, 547 "SMALLMONEY": TokenType.SMALLMONEY, 548 "SQL_VARIANT": TokenType.VARIANT, 549 "SYSTEM_USER": TokenType.CURRENT_USER, 550 "TOP": TokenType.TOP, 551 "TIMESTAMP": TokenType.ROWVERSION, 552 "TINYINT": TokenType.UTINYINT, 553 "UNIQUEIDENTIFIER": TokenType.UUID, 554 "UPDATE STATISTICS": TokenType.COMMAND, 555 "XML": TokenType.XML, 556 } 557 KEYWORDS.pop("/*+") 558 559 COMMANDS = {*tokens.Tokenizer.COMMANDS, TokenType.END} 560 561 class Parser(parser.Parser): 562 SET_REQUIRES_ASSIGNMENT_DELIMITER = False 563 LOG_DEFAULTS_TO_LN = True 564 STRING_ALIASES = True 565 NO_PAREN_IF_COMMANDS = False 566 567 QUERY_MODIFIER_PARSERS = { 568 **parser.Parser.QUERY_MODIFIER_PARSERS, 569 TokenType.OPTION: lambda self: ("options", self._parse_options()), 570 TokenType.FOR: lambda self: ("for", self._parse_for()), 571 } 572 573 # T-SQL does not allow BEGIN to be used as an identifier 574 ID_VAR_TOKENS = parser.Parser.ID_VAR_TOKENS - {TokenType.BEGIN} 575 ALIAS_TOKENS = parser.Parser.ALIAS_TOKENS - {TokenType.BEGIN} 576 TABLE_ALIAS_TOKENS = parser.Parser.TABLE_ALIAS_TOKENS - {TokenType.BEGIN} 577 COMMENT_TABLE_ALIAS_TOKENS = parser.Parser.COMMENT_TABLE_ALIAS_TOKENS - {TokenType.BEGIN} 578 UPDATE_ALIAS_TOKENS = parser.Parser.UPDATE_ALIAS_TOKENS - {TokenType.BEGIN} 579 580 FUNCTIONS = { 581 **parser.Parser.FUNCTIONS, 582 "CHARINDEX": lambda args: exp.StrPosition( 583 this=seq_get(args, 1), 584 substr=seq_get(args, 0), 585 position=seq_get(args, 2), 586 ), 587 "COUNT": lambda args: exp.Count( 588 this=seq_get(args, 0), expressions=args[1:], big_int=False 589 ), 590 "COUNT_BIG": lambda args: exp.Count( 591 this=seq_get(args, 0), expressions=args[1:], big_int=True 592 ), 593 "DATEADD": build_date_delta(exp.DateAdd, unit_mapping=DATE_DELTA_INTERVAL), 594 "DATEDIFF": _build_date_delta(exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL), 595 "DATENAME": _build_formatted_time(exp.TimeToStr, full_format_mapping=True), 596 "DATEPART": _build_formatted_time(exp.TimeToStr), 597 "DATETIMEFROMPARTS": _build_datetimefromparts, 598 "EOMONTH": _build_eomonth, 599 "FORMAT": _build_format, 600 "GETDATE": exp.CurrentTimestamp.from_arg_list, 601 "HASHBYTES": _build_hashbytes, 602 "ISNULL": lambda args: build_coalesce(args=args, is_null=True), 603 "JSON_QUERY": _build_json_query, 604 "JSON_VALUE": parser.build_extract_json_with_path(exp.JSONExtractScalar), 605 "LEN": _build_with_arg_as_text(exp.Length), 606 "LEFT": _build_with_arg_as_text(exp.Left), 607 "NEWID": exp.Uuid.from_arg_list, 608 "RIGHT": _build_with_arg_as_text(exp.Right), 609 "PARSENAME": _build_parsename, 610 "REPLICATE": exp.Repeat.from_arg_list, 611 "SCHEMA_NAME": exp.CurrentSchema.from_arg_list, 612 "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)), 613 "SYSDATETIME": exp.CurrentTimestamp.from_arg_list, 614 "SUSER_NAME": exp.CurrentUser.from_arg_list, 615 "SUSER_SNAME": exp.CurrentUser.from_arg_list, 616 "SYSDATETIMEOFFSET": exp.CurrentTimestampLTZ.from_arg_list, 617 "SYSTEM_USER": exp.CurrentUser.from_arg_list, 618 "TIMEFROMPARTS": _build_timefromparts, 619 "DATETRUNC": _build_datetrunc, 620 } 621 622 JOIN_HINTS = {"LOOP", "HASH", "MERGE", "REMOTE"} 623 624 PROCEDURE_OPTIONS = dict.fromkeys( 625 ("ENCRYPTION", "RECOMPILE", "SCHEMABINDING", "NATIVE_COMPILATION", "EXECUTE"), tuple() 626 ) 627 628 COLUMN_DEFINITION_MODES = {"OUT", "OUTPUT", "READONLY"} 629 630 RETURNS_TABLE_TOKENS = parser.Parser.ID_VAR_TOKENS - { 631 TokenType.TABLE, 632 *parser.Parser.TYPE_TOKENS, 633 } 634 635 STATEMENT_PARSERS = { 636 **parser.Parser.STATEMENT_PARSERS, 637 TokenType.DECLARE: lambda self: self._parse_declare(), 638 } 639 640 RANGE_PARSERS = { 641 **parser.Parser.RANGE_PARSERS, 642 TokenType.DCOLON: lambda self, this: self.expression( 643 exp.ScopeResolution, 644 this=this, 645 expression=self._parse_function() or self._parse_var(any_token=True), 646 ), 647 } 648 649 NO_PAREN_FUNCTION_PARSERS = { 650 **parser.Parser.NO_PAREN_FUNCTION_PARSERS, 651 "NEXT": lambda self: self._parse_next_value_for(), 652 } 653 654 FUNCTION_PARSERS: t.Dict[str, t.Callable] = { 655 **parser.Parser.FUNCTION_PARSERS, 656 "JSON_ARRAYAGG": lambda self: self.expression( 657 exp.JSONArrayAgg, 658 this=self._parse_bitwise(), 659 order=self._parse_order(), 660 null_handling=self._parse_on_handling("NULL", "NULL", "ABSENT"), 661 ), 662 } 663 664 # The DCOLON (::) operator serves as a scope resolution (exp.ScopeResolution) operator in T-SQL 665 COLUMN_OPERATORS = { 666 **parser.Parser.COLUMN_OPERATORS, 667 TokenType.DCOLON: lambda self, this, to: self.expression(exp.Cast, this=this, to=to) 668 if isinstance(to, exp.DataType) and to.this != exp.DataType.Type.USERDEFINED 669 else self.expression(exp.ScopeResolution, this=this, expression=to), 670 } 671 672 SET_OP_MODIFIERS = {"offset"} 673 674 ODBC_DATETIME_LITERALS = { 675 "d": exp.Date, 676 "t": exp.Time, 677 "ts": exp.Timestamp, 678 } 679 680 def _parse_alter_table_set(self) -> exp.AlterSet: 681 return self._parse_wrapped(super()._parse_alter_table_set) 682 683 def _parse_wrapped_select(self, table: bool = False) -> t.Optional[exp.Expression]: 684 if self._match(TokenType.MERGE): 685 comments = self._prev_comments 686 merge = self._parse_merge() 687 merge.add_comments(comments, prepend=True) 688 return merge 689 690 return super()._parse_wrapped_select(table=table) 691 692 def _parse_dcolon(self) -> t.Optional[exp.Expression]: 693 # We want to use _parse_types() if the first token after :: is a known type, 694 # otherwise we could parse something like x::varchar(max) into a function 695 if self._match_set(self.TYPE_TOKENS, advance=False): 696 return self._parse_types() 697 698 return self._parse_function() or self._parse_types() 699 700 def _parse_options(self) -> t.Optional[t.List[exp.Expression]]: 701 if not self._match(TokenType.OPTION): 702 return None 703 704 def _parse_option() -> t.Optional[exp.Expression]: 705 option = self._parse_var_from_options(OPTIONS) 706 if not option: 707 return None 708 709 self._match(TokenType.EQ) 710 return self.expression( 711 exp.QueryOption, this=option, expression=self._parse_primary_or_var() 712 ) 713 714 return self._parse_wrapped_csv(_parse_option) 715 716 def _parse_xml_key_value_option(self) -> exp.XMLKeyValueOption: 717 this = self._parse_primary_or_var() 718 if self._match(TokenType.L_PAREN, advance=False): 719 expression = self._parse_wrapped(self._parse_string) 720 else: 721 expression = None 722 723 return exp.XMLKeyValueOption(this=this, expression=expression) 724 725 def _parse_for(self) -> t.Optional[t.List[exp.Expression]]: 726 if not self._match_pair(TokenType.FOR, TokenType.XML): 727 return None 728 729 def _parse_for_xml() -> t.Optional[exp.Expression]: 730 return self.expression( 731 exp.QueryOption, 732 this=self._parse_var_from_options(XML_OPTIONS, raise_unmatched=False) 733 or self._parse_xml_key_value_option(), 734 ) 735 736 return self._parse_csv(_parse_for_xml) 737 738 def _parse_projections(self) -> t.List[exp.Expression]: 739 """ 740 T-SQL supports the syntax alias = expression in the SELECT's projection list, 741 so we transform all parsed Selects to convert their EQ projections into Aliases. 742 743 See: https://learn.microsoft.com/en-us/sql/t-sql/queries/select-clause-transact-sql?view=sql-server-ver16#syntax 744 """ 745 return [ 746 ( 747 exp.alias_(projection.expression, projection.this.this, copy=False) 748 if isinstance(projection, exp.EQ) and isinstance(projection.this, exp.Column) 749 else projection 750 ) 751 for projection in super()._parse_projections() 752 ] 753 754 def _parse_commit_or_rollback(self) -> exp.Commit | exp.Rollback: 755 """Applies to SQL Server and Azure SQL Database 756 COMMIT [ { TRAN | TRANSACTION } 757 [ transaction_name | @tran_name_variable ] ] 758 [ WITH ( DELAYED_DURABILITY = { OFF | ON } ) ] 759 760 ROLLBACK { TRAN | TRANSACTION } 761 [ transaction_name | @tran_name_variable 762 | savepoint_name | @savepoint_variable ] 763 """ 764 rollback = self._prev.token_type == TokenType.ROLLBACK 765 766 self._match_texts(("TRAN", "TRANSACTION")) 767 this = self._parse_id_var() 768 769 if rollback: 770 return self.expression(exp.Rollback, this=this) 771 772 durability = None 773 if self._match_pair(TokenType.WITH, TokenType.L_PAREN): 774 self._match_text_seq("DELAYED_DURABILITY") 775 self._match(TokenType.EQ) 776 777 if self._match_text_seq("OFF"): 778 durability = False 779 else: 780 self._match(TokenType.ON) 781 durability = True 782 783 self._match_r_paren() 784 785 return self.expression(exp.Commit, this=this, durability=durability) 786 787 def _parse_transaction(self) -> exp.Transaction | exp.Command: 788 """Applies to SQL Server and Azure SQL Database 789 BEGIN { TRAN | TRANSACTION } 790 [ { transaction_name | @tran_name_variable } 791 [ WITH MARK [ 'description' ] ] 792 ] 793 """ 794 if self._match_texts(("TRAN", "TRANSACTION")): 795 transaction = self.expression(exp.Transaction, this=self._parse_id_var()) 796 if self._match_text_seq("WITH", "MARK"): 797 transaction.set("mark", self._parse_string()) 798 799 return transaction 800 801 return self._parse_as_command(self._prev) 802 803 def _parse_returns(self) -> exp.ReturnsProperty: 804 table = self._parse_id_var(any_token=False, tokens=self.RETURNS_TABLE_TOKENS) 805 returns = super()._parse_returns() 806 returns.set("table", table) 807 return returns 808 809 def _parse_convert( 810 self, strict: bool, safe: t.Optional[bool] = None 811 ) -> t.Optional[exp.Expression]: 812 this = self._parse_types() 813 self._match(TokenType.COMMA) 814 args = [this, *self._parse_csv(self._parse_assignment)] 815 convert = exp.Convert.from_arg_list(args) 816 convert.set("safe", safe) 817 convert.set("strict", strict) 818 return convert 819 820 def _parse_column_def( 821 self, this: t.Optional[exp.Expression], computed_column: bool = True 822 ) -> t.Optional[exp.Expression]: 823 this = super()._parse_column_def(this=this, computed_column=computed_column) 824 if not this: 825 return None 826 if self._match(TokenType.EQ): 827 this.set("default", self._parse_disjunction()) 828 if self._match_texts(self.COLUMN_DEFINITION_MODES): 829 this.set("output", self._prev.text) 830 return this 831 832 def _parse_user_defined_function( 833 self, kind: t.Optional[TokenType] = None 834 ) -> t.Optional[exp.Expression]: 835 this = super()._parse_user_defined_function(kind=kind) 836 837 if ( 838 kind == TokenType.FUNCTION 839 or isinstance(this, exp.UserDefinedFunction) 840 or self._match(TokenType.ALIAS, advance=False) 841 ): 842 return this 843 844 if not self._match(TokenType.WITH, advance=False): 845 expressions = self._parse_csv(self._parse_function_parameter) 846 else: 847 expressions = None 848 849 return self.expression(exp.UserDefinedFunction, this=this, expressions=expressions) 850 851 def _parse_into(self) -> t.Optional[exp.Into]: 852 into = super()._parse_into() 853 854 table = isinstance(into, exp.Into) and into.find(exp.Table) 855 if isinstance(table, exp.Table): 856 table_identifier = table.this 857 if table_identifier.args.get("temporary"): 858 # Promote the temporary property from the Identifier to the Into expression 859 t.cast(exp.Into, into).set("temporary", True) 860 861 return into 862 863 def _parse_id_var( 864 self, 865 any_token: bool = True, 866 tokens: t.Optional[t.Collection[TokenType]] = None, 867 ) -> t.Optional[exp.Expression]: 868 is_temporary = self._match(TokenType.HASH) 869 is_global = is_temporary and self._match(TokenType.HASH) 870 871 this = super()._parse_id_var(any_token=any_token, tokens=tokens) 872 if this: 873 if is_global: 874 this.set("global", True) 875 elif is_temporary: 876 this.set("temporary", True) 877 878 return this 879 880 def _parse_create(self) -> exp.Create | exp.Command: 881 create = super()._parse_create() 882 883 if isinstance(create, exp.Create): 884 table = create.this.this if isinstance(create.this, exp.Schema) else create.this 885 if isinstance(table, exp.Table) and table.this and table.this.args.get("temporary"): 886 if not create.args.get("properties"): 887 create.set("properties", exp.Properties(expressions=[])) 888 889 create.args["properties"].append("expressions", exp.TemporaryProperty()) 890 891 return create 892 893 def _parse_if(self) -> t.Optional[exp.Expression]: 894 index = self._index 895 896 if self._match_text_seq("OBJECT_ID"): 897 self._parse_wrapped_csv(self._parse_string) 898 if self._match_text_seq("IS", "NOT", "NULL") and self._match(TokenType.DROP): 899 return self._parse_drop(exists=True) 900 self._retreat(index) 901 902 return super()._parse_if() 903 904 def _parse_unique(self) -> exp.UniqueColumnConstraint: 905 if self._match_texts(("CLUSTERED", "NONCLUSTERED")): 906 this = self.CONSTRAINT_PARSERS[self._prev.text.upper()](self) 907 else: 908 this = self._parse_schema(self._parse_id_var(any_token=False)) 909 910 return self.expression(exp.UniqueColumnConstraint, this=this) 911 912 def _parse_update(self) -> exp.Update: 913 expression = super()._parse_update() 914 expression.set("options", self._parse_options()) 915 return expression 916 917 def _parse_partition(self) -> t.Optional[exp.Partition]: 918 if not self._match_text_seq("WITH", "(", "PARTITIONS"): 919 return None 920 921 def parse_range(): 922 low = self._parse_bitwise() 923 high = self._parse_bitwise() if self._match_text_seq("TO") else None 924 925 return ( 926 self.expression(exp.PartitionRange, this=low, expression=high) if high else low 927 ) 928 929 partition = self.expression( 930 exp.Partition, expressions=self._parse_wrapped_csv(parse_range) 931 ) 932 933 self._match_r_paren() 934 935 return partition 936 937 def _parse_declareitem(self) -> t.Optional[exp.DeclareItem]: 938 var = self._parse_id_var() 939 if not var: 940 return None 941 942 self._match(TokenType.ALIAS) 943 return self.expression( 944 exp.DeclareItem, 945 this=var, 946 kind=self._parse_schema() if self._match(TokenType.TABLE) else self._parse_types(), 947 default=self._match(TokenType.EQ) and self._parse_bitwise(), 948 ) 949 950 def _parse_alter_table_alter(self) -> t.Optional[exp.Expression]: 951 expression = super()._parse_alter_table_alter() 952 953 if expression is not None: 954 collation = expression.args.get("collate") 955 if isinstance(collation, exp.Column) and isinstance(collation.this, exp.Identifier): 956 identifier = collation.this 957 collation.set("this", exp.Var(this=identifier.name)) 958 959 return expression 960 961 class Generator(generator.Generator): 962 LIMIT_IS_TOP = True 963 QUERY_HINTS = False 964 RETURNING_END = False 965 NVL2_SUPPORTED = False 966 ALTER_TABLE_INCLUDE_COLUMN_KEYWORD = False 967 LIMIT_FETCH = "FETCH" 968 COMPUTED_COLUMN_WITH_TYPE = False 969 CTE_RECURSIVE_KEYWORD_REQUIRED = False 970 ENSURE_BOOLS = True 971 NULL_ORDERING_SUPPORTED = None 972 SUPPORTS_SINGLE_ARG_CONCAT = False 973 TABLESAMPLE_SEED_KEYWORD = "REPEATABLE" 974 SUPPORTS_SELECT_INTO = True 975 JSON_PATH_BRACKETED_KEY_SUPPORTED = False 976 SUPPORTS_TO_NUMBER = False 977 SET_OP_MODIFIERS = False 978 COPY_PARAMS_EQ_REQUIRED = True 979 PARSE_JSON_NAME = None 980 EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE = False 981 ALTER_SET_WRAPPED = True 982 ALTER_SET_TYPE = "" 983 984 EXPRESSIONS_WITHOUT_NESTED_CTES = { 985 exp.Create, 986 exp.Delete, 987 exp.Insert, 988 exp.Intersect, 989 exp.Except, 990 exp.Merge, 991 exp.Select, 992 exp.Subquery, 993 exp.Union, 994 exp.Update, 995 } 996 997 SUPPORTED_JSON_PATH_PARTS = { 998 exp.JSONPathKey, 999 exp.JSONPathRoot, 1000 exp.JSONPathSubscript, 1001 } 1002 1003 TYPE_MAPPING = { 1004 **generator.Generator.TYPE_MAPPING, 1005 exp.DataType.Type.BOOLEAN: "BIT", 1006 exp.DataType.Type.DATETIME2: "DATETIME2", 1007 exp.DataType.Type.DECIMAL: "NUMERIC", 1008 exp.DataType.Type.DOUBLE: "FLOAT", 1009 exp.DataType.Type.INT: "INTEGER", 1010 exp.DataType.Type.ROWVERSION: "ROWVERSION", 1011 exp.DataType.Type.TEXT: "VARCHAR(MAX)", 1012 exp.DataType.Type.TIMESTAMP: "DATETIME2", 1013 exp.DataType.Type.TIMESTAMPNTZ: "DATETIME2", 1014 exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET", 1015 exp.DataType.Type.SMALLDATETIME: "SMALLDATETIME", 1016 exp.DataType.Type.UTINYINT: "TINYINT", 1017 exp.DataType.Type.VARIANT: "SQL_VARIANT", 1018 exp.DataType.Type.UUID: "UNIQUEIDENTIFIER", 1019 } 1020 1021 TYPE_MAPPING.pop(exp.DataType.Type.NCHAR) 1022 TYPE_MAPPING.pop(exp.DataType.Type.NVARCHAR) 1023 1024 TRANSFORMS = { 1025 **generator.Generator.TRANSFORMS, 1026 exp.AnyValue: any_value_to_max_sql, 1027 exp.ArrayToString: rename_func("STRING_AGG"), 1028 exp.AutoIncrementColumnConstraint: lambda *_: "IDENTITY", 1029 exp.Chr: rename_func("CHAR"), 1030 exp.DateAdd: date_delta_sql("DATEADD"), 1031 exp.DateDiff: date_delta_sql("DATEDIFF"), 1032 exp.CTE: transforms.preprocess([qualify_derived_table_outputs]), 1033 exp.CurrentDate: rename_func("GETDATE"), 1034 exp.CurrentTimestamp: rename_func("GETDATE"), 1035 exp.CurrentTimestampLTZ: rename_func("SYSDATETIMEOFFSET"), 1036 exp.DateStrToDate: datestrtodate_sql, 1037 exp.Extract: rename_func("DATEPART"), 1038 exp.GeneratedAsIdentityColumnConstraint: generatedasidentitycolumnconstraint_sql, 1039 exp.GroupConcat: _string_agg_sql, 1040 exp.If: rename_func("IIF"), 1041 exp.JSONExtract: _json_extract_sql, 1042 exp.JSONExtractScalar: _json_extract_sql, 1043 exp.LastDay: lambda self, e: self.func("EOMONTH", e.this), 1044 exp.Ln: rename_func("LOG"), 1045 exp.Max: max_or_greatest, 1046 exp.MD5: lambda self, e: self.func("HASHBYTES", exp.Literal.string("MD5"), e.this), 1047 exp.Min: min_or_least, 1048 exp.NumberToStr: _format_sql, 1049 exp.Repeat: rename_func("REPLICATE"), 1050 exp.CurrentSchema: rename_func("SCHEMA_NAME"), 1051 exp.Select: transforms.preprocess( 1052 [ 1053 transforms.eliminate_distinct_on, 1054 transforms.eliminate_semi_and_anti_joins, 1055 transforms.eliminate_qualify, 1056 transforms.unnest_generate_date_array_using_recursive_cte, 1057 ] 1058 ), 1059 exp.Stddev: rename_func("STDEV"), 1060 exp.StrPosition: lambda self, e: strposition_sql( 1061 self, e, func_name="CHARINDEX", supports_position=True 1062 ), 1063 exp.Subquery: transforms.preprocess([qualify_derived_table_outputs]), 1064 exp.SHA: lambda self, e: self.func("HASHBYTES", exp.Literal.string("SHA1"), e.this), 1065 exp.SHA2: lambda self, e: self.func( 1066 "HASHBYTES", exp.Literal.string(f"SHA2_{e.args.get('length', 256)}"), e.this 1067 ), 1068 exp.TemporaryProperty: lambda self, e: "", 1069 exp.TimeStrToTime: _timestrtotime_sql, 1070 exp.TimeToStr: _format_sql, 1071 exp.Trim: trim_sql, 1072 exp.TsOrDsAdd: date_delta_sql("DATEADD", cast=True), 1073 exp.TsOrDsDiff: date_delta_sql("DATEDIFF"), 1074 exp.TimestampTrunc: lambda self, e: self.func("DATETRUNC", e.unit, e.this), 1075 exp.Uuid: lambda *_: "NEWID()", 1076 exp.DateFromParts: rename_func("DATEFROMPARTS"), 1077 } 1078 1079 TRANSFORMS.pop(exp.ReturnsProperty) 1080 1081 PROPERTIES_LOCATION = { 1082 **generator.Generator.PROPERTIES_LOCATION, 1083 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 1084 } 1085 1086 def scope_resolution(self, rhs: str, scope_name: str) -> str: 1087 return f"{scope_name}::{rhs}" 1088 1089 def select_sql(self, expression: exp.Select) -> str: 1090 limit = expression.args.get("limit") 1091 offset = expression.args.get("offset") 1092 1093 if isinstance(limit, exp.Fetch) and not offset: 1094 # Dialects like Oracle can FETCH directly from a row set but 1095 # T-SQL requires an ORDER BY + OFFSET clause in order to FETCH 1096 offset = exp.Offset(expression=exp.Literal.number(0)) 1097 expression.set("offset", offset) 1098 1099 if offset: 1100 if not expression.args.get("order"): 1101 # ORDER BY is required in order to use OFFSET in a query, so we use 1102 # a noop order by, since we don't really care about the order. 1103 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 1104 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 1105 1106 if isinstance(limit, exp.Limit): 1107 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 1108 # we replace here because otherwise TOP would be generated in select_sql 1109 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 1110 1111 return super().select_sql(expression) 1112 1113 def convert_sql(self, expression: exp.Convert) -> str: 1114 name = "TRY_CONVERT" if expression.args.get("safe") else "CONVERT" 1115 return self.func( 1116 name, expression.this, expression.expression, expression.args.get("style") 1117 ) 1118 1119 def queryoption_sql(self, expression: exp.QueryOption) -> str: 1120 option = self.sql(expression, "this") 1121 value = self.sql(expression, "expression") 1122 if value: 1123 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 1124 return f"{option} {optional_equal_sign}{value}" 1125 return option 1126 1127 def lateral_op(self, expression: exp.Lateral) -> str: 1128 cross_apply = expression.args.get("cross_apply") 1129 if cross_apply is True: 1130 return "CROSS APPLY" 1131 if cross_apply is False: 1132 return "OUTER APPLY" 1133 1134 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 1135 self.unsupported("LATERAL clause is not supported.") 1136 return "LATERAL" 1137 1138 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 1139 this = expression.this 1140 split_count = len(this.name.split(".")) 1141 delimiter = expression.args.get("delimiter") 1142 part_index = expression.args.get("part_index") 1143 1144 if ( 1145 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 1146 or (delimiter and delimiter.name != ".") 1147 or not part_index 1148 or split_count > 4 1149 ): 1150 self.unsupported( 1151 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 1152 ) 1153 return "" 1154 1155 return self.func( 1156 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 1157 ) 1158 1159 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1160 nano = expression.args.get("nano") 1161 if nano is not None: 1162 nano.pop() 1163 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1164 1165 if expression.args.get("fractions") is None: 1166 expression.set("fractions", exp.Literal.number(0)) 1167 if expression.args.get("precision") is None: 1168 expression.set("precision", exp.Literal.number(0)) 1169 1170 return rename_func("TIMEFROMPARTS")(self, expression) 1171 1172 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1173 zone = expression.args.get("zone") 1174 if zone is not None: 1175 zone.pop() 1176 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1177 1178 nano = expression.args.get("nano") 1179 if nano is not None: 1180 nano.pop() 1181 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1182 1183 if expression.args.get("milli") is None: 1184 expression.set("milli", exp.Literal.number(0)) 1185 1186 return rename_func("DATETIMEFROMPARTS")(self, expression) 1187 1188 def setitem_sql(self, expression: exp.SetItem) -> str: 1189 this = expression.this 1190 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1191 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1192 return f"{self.sql(this.left)} {self.sql(this.right)}" 1193 1194 return super().setitem_sql(expression) 1195 1196 def boolean_sql(self, expression: exp.Boolean) -> str: 1197 if type(expression.parent) in BIT_TYPES or isinstance( 1198 expression.find_ancestor(exp.Values, exp.Select), exp.Values 1199 ): 1200 return "1" if expression.this else "0" 1201 1202 return "(1 = 1)" if expression.this else "(1 = 0)" 1203 1204 def is_sql(self, expression: exp.Is) -> str: 1205 if isinstance(expression.expression, exp.Boolean): 1206 return self.binary(expression, "=") 1207 return self.binary(expression, "IS") 1208 1209 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1210 sql = self.sql(expression, "this") 1211 properties = expression.args.get("properties") 1212 1213 if sql[:1] != "#" and any( 1214 isinstance(prop, exp.TemporaryProperty) 1215 for prop in (properties.expressions if properties else []) 1216 ): 1217 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1218 1219 return sql 1220 1221 def create_sql(self, expression: exp.Create) -> str: 1222 kind = expression.kind 1223 exists = expression.args.pop("exists", None) 1224 1225 like_property = expression.find(exp.LikeProperty) 1226 if like_property: 1227 ctas_expression = like_property.this 1228 else: 1229 ctas_expression = expression.expression 1230 1231 if kind == "VIEW": 1232 expression.this.set("catalog", None) 1233 with_ = expression.args.get("with") 1234 if ctas_expression and with_: 1235 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1236 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1237 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1238 ctas_expression.set("with", with_.pop()) 1239 1240 table = expression.find(exp.Table) 1241 1242 # Convert CTAS statement to SELECT .. INTO .. 1243 if kind == "TABLE" and ctas_expression: 1244 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1245 ctas_expression = ctas_expression.subquery() 1246 1247 properties = expression.args.get("properties") or exp.Properties() 1248 is_temp = any(isinstance(p, exp.TemporaryProperty) for p in properties.expressions) 1249 1250 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1251 select_into.set("into", exp.Into(this=table, temporary=is_temp)) 1252 1253 if like_property: 1254 select_into.limit(0, copy=False) 1255 1256 sql = self.sql(select_into) 1257 else: 1258 sql = super().create_sql(expression) 1259 1260 if exists: 1261 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1262 sql_with_ctes = self.prepend_ctes(expression, sql) 1263 sql_literal = self.sql(exp.Literal.string(sql_with_ctes)) 1264 if kind == "SCHEMA": 1265 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = {identifier}) EXEC({sql_literal})""" 1266 elif kind == "TABLE": 1267 assert table 1268 where = exp.and_( 1269 exp.column("TABLE_NAME").eq(table.name), 1270 exp.column("TABLE_SCHEMA").eq(table.db) if table.db else None, 1271 exp.column("TABLE_CATALOG").eq(table.catalog) if table.catalog else None, 1272 ) 1273 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE {where}) EXEC({sql_literal})""" 1274 elif kind == "INDEX": 1275 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1276 return f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql_literal})""" 1277 elif expression.args.get("replace"): 1278 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1279 1280 return self.prepend_ctes(expression, sql) 1281 1282 @generator.unsupported_args("unlogged", "expressions") 1283 def into_sql(self, expression: exp.Into) -> str: 1284 if expression.args.get("temporary"): 1285 # If the Into expression has a temporary property, push this down to the Identifier 1286 table = expression.find(exp.Table) 1287 if table and isinstance(table.this, exp.Identifier): 1288 table.this.set("temporary", True) 1289 1290 return f"{self.seg('INTO')} {self.sql(expression, 'this')}" 1291 1292 def count_sql(self, expression: exp.Count) -> str: 1293 func_name = "COUNT_BIG" if expression.args.get("big_int") else "COUNT" 1294 return rename_func(func_name)(self, expression) 1295 1296 def offset_sql(self, expression: exp.Offset) -> str: 1297 return f"{super().offset_sql(expression)} ROWS" 1298 1299 def version_sql(self, expression: exp.Version) -> str: 1300 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1301 this = f"FOR {name}" 1302 expr = expression.expression 1303 kind = expression.text("kind") 1304 if kind in ("FROM", "BETWEEN"): 1305 args = expr.expressions 1306 sep = "TO" if kind == "FROM" else "AND" 1307 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1308 else: 1309 expr_sql = self.sql(expr) 1310 1311 expr_sql = f" {expr_sql}" if expr_sql else "" 1312 return f"{this} {kind}{expr_sql}" 1313 1314 def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str: 1315 table = expression.args.get("table") 1316 table = f"{table} " if table else "" 1317 return f"RETURNS {table}{self.sql(expression, 'this')}" 1318 1319 def returning_sql(self, expression: exp.Returning) -> str: 1320 into = self.sql(expression, "into") 1321 into = self.seg(f"INTO {into}") if into else "" 1322 return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}" 1323 1324 def transaction_sql(self, expression: exp.Transaction) -> str: 1325 this = self.sql(expression, "this") 1326 this = f" {this}" if this else "" 1327 mark = self.sql(expression, "mark") 1328 mark = f" WITH MARK {mark}" if mark else "" 1329 return f"BEGIN TRANSACTION{this}{mark}" 1330 1331 def commit_sql(self, expression: exp.Commit) -> str: 1332 this = self.sql(expression, "this") 1333 this = f" {this}" if this else "" 1334 durability = expression.args.get("durability") 1335 durability = ( 1336 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1337 if durability is not None 1338 else "" 1339 ) 1340 return f"COMMIT TRANSACTION{this}{durability}" 1341 1342 def rollback_sql(self, expression: exp.Rollback) -> str: 1343 this = self.sql(expression, "this") 1344 this = f" {this}" if this else "" 1345 return f"ROLLBACK TRANSACTION{this}" 1346 1347 def identifier_sql(self, expression: exp.Identifier) -> str: 1348 identifier = super().identifier_sql(expression) 1349 1350 if expression.args.get("global"): 1351 identifier = f"##{identifier}" 1352 elif expression.args.get("temporary"): 1353 identifier = f"#{identifier}" 1354 1355 return identifier 1356 1357 def constraint_sql(self, expression: exp.Constraint) -> str: 1358 this = self.sql(expression, "this") 1359 expressions = self.expressions(expression, flat=True, sep=" ") 1360 return f"CONSTRAINT {this} {expressions}" 1361 1362 def length_sql(self, expression: exp.Length) -> str: 1363 return self._uncast_text(expression, "LEN") 1364 1365 def right_sql(self, expression: exp.Right) -> str: 1366 return self._uncast_text(expression, "RIGHT") 1367 1368 def left_sql(self, expression: exp.Left) -> str: 1369 return self._uncast_text(expression, "LEFT") 1370 1371 def _uncast_text(self, expression: exp.Expression, name: str) -> str: 1372 this = expression.this 1373 if isinstance(this, exp.Cast) and this.is_type(exp.DataType.Type.TEXT): 1374 this_sql = self.sql(this, "this") 1375 else: 1376 this_sql = self.sql(this) 1377 expression_sql = self.sql(expression, "expression") 1378 return self.func(name, this_sql, expression_sql if expression_sql else None) 1379 1380 def partition_sql(self, expression: exp.Partition) -> str: 1381 return f"WITH (PARTITIONS({self.expressions(expression, flat=True)}))" 1382 1383 def alter_sql(self, expression: exp.Alter) -> str: 1384 action = seq_get(expression.args.get("actions") or [], 0) 1385 if isinstance(action, exp.AlterRename): 1386 return f"EXEC sp_rename '{self.sql(expression.this)}', '{action.this.name}'" 1387 return super().alter_sql(expression) 1388 1389 def drop_sql(self, expression: exp.Drop) -> str: 1390 if expression.args["kind"] == "VIEW": 1391 expression.this.set("catalog", None) 1392 return super().drop_sql(expression) 1393 1394 def options_modifier(self, expression: exp.Expression) -> str: 1395 options = self.expressions(expression, key="options") 1396 return f" OPTION{self.wrap(options)}" if options else "" 1397 1398 def dpipe_sql(self, expression: exp.DPipe) -> str: 1399 return self.sql( 1400 reduce(lambda x, y: exp.Add(this=x, expression=y), expression.flatten()) 1401 ) 1402 1403 def isascii_sql(self, expression: exp.IsAscii) -> str: 1404 return f"(PATINDEX(CONVERT(VARCHAR(MAX), 0x255b5e002d7f5d25) COLLATE Latin1_General_BIN, {self.sql(expression.this)}) = 0)" 1405 1406 def columndef_sql(self, expression: exp.ColumnDef, sep: str = " ") -> str: 1407 this = super().columndef_sql(expression, sep) 1408 default = self.sql(expression, "default") 1409 default = f" = {default}" if default else "" 1410 output = self.sql(expression, "output") 1411 output = f" {output}" if output else "" 1412 return f"{this}{default}{output}" 1413 1414 def coalesce_sql(self, expression: exp.Coalesce) -> str: 1415 func_name = "ISNULL" if expression.args.get("is_null") else "COALESCE" 1416 return rename_func(func_name)(self, expression)
Whether the base comes first in the LOG
function.
Possible values: True
, False
, None
(two arguments are not supported by LOG
)
Whether the behavior of a / b
depends on the types of a
and b
.
False means a / b
is always float division.
True means a / b
is integer division if both a
and b
are integers.
A NULL
arg in CONCAT
yields NULL
by default, but in some dialects it yields an empty string.
Specifies the strategy according to which identifiers should be normalized.
Associates this dialect's time formats with their equivalent Python strftime
formats.
521 class Tokenizer(tokens.Tokenizer): 522 IDENTIFIERS = [("[", "]"), '"'] 523 QUOTES = ["'", '"'] 524 HEX_STRINGS = [("0x", ""), ("0X", "")] 525 VAR_SINGLE_TOKENS = {"@", "$", "#"} 526 527 KEYWORDS = { 528 **tokens.Tokenizer.KEYWORDS, 529 "CLUSTERED INDEX": TokenType.INDEX, 530 "DATETIME2": TokenType.DATETIME2, 531 "DATETIMEOFFSET": TokenType.TIMESTAMPTZ, 532 "DECLARE": TokenType.DECLARE, 533 "EXEC": TokenType.COMMAND, 534 "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT, 535 "GO": TokenType.COMMAND, 536 "IMAGE": TokenType.IMAGE, 537 "MONEY": TokenType.MONEY, 538 "NONCLUSTERED INDEX": TokenType.INDEX, 539 "NTEXT": TokenType.TEXT, 540 "OPTION": TokenType.OPTION, 541 "OUTPUT": TokenType.RETURNING, 542 "PRINT": TokenType.COMMAND, 543 "PROC": TokenType.PROCEDURE, 544 "REAL": TokenType.FLOAT, 545 "ROWVERSION": TokenType.ROWVERSION, 546 "SMALLDATETIME": TokenType.SMALLDATETIME, 547 "SMALLMONEY": TokenType.SMALLMONEY, 548 "SQL_VARIANT": TokenType.VARIANT, 549 "SYSTEM_USER": TokenType.CURRENT_USER, 550 "TOP": TokenType.TOP, 551 "TIMESTAMP": TokenType.ROWVERSION, 552 "TINYINT": TokenType.UTINYINT, 553 "UNIQUEIDENTIFIER": TokenType.UUID, 554 "UPDATE STATISTICS": TokenType.COMMAND, 555 "XML": TokenType.XML, 556 } 557 KEYWORDS.pop("/*+") 558 559 COMMANDS = {*tokens.Tokenizer.COMMANDS, TokenType.END}
Inherited Members
- sqlglot.tokens.Tokenizer
- Tokenizer
- SINGLE_TOKENS
- BIT_STRINGS
- BYTE_STRINGS
- RAW_STRINGS
- HEREDOC_STRINGS
- UNICODE_STRINGS
- STRING_ESCAPES
- IDENTIFIER_ESCAPES
- HEREDOC_TAG_IS_IDENTIFIER
- HEREDOC_STRING_ALTERNATIVE
- STRING_ESCAPES_ALLOWED_IN_RAW_STRINGS
- NESTED_COMMENTS
- HINT_START
- TOKENS_PRECEDING_HINT
- WHITE_SPACE
- COMMAND_PREFIX_TOKENS
- NUMERIC_LITERALS
- COMMENTS
- dialect
- use_rs_tokenizer
- reset
- tokenize
- tokenize_rs
- size
- sql
- tokens
561 class Parser(parser.Parser): 562 SET_REQUIRES_ASSIGNMENT_DELIMITER = False 563 LOG_DEFAULTS_TO_LN = True 564 STRING_ALIASES = True 565 NO_PAREN_IF_COMMANDS = False 566 567 QUERY_MODIFIER_PARSERS = { 568 **parser.Parser.QUERY_MODIFIER_PARSERS, 569 TokenType.OPTION: lambda self: ("options", self._parse_options()), 570 TokenType.FOR: lambda self: ("for", self._parse_for()), 571 } 572 573 # T-SQL does not allow BEGIN to be used as an identifier 574 ID_VAR_TOKENS = parser.Parser.ID_VAR_TOKENS - {TokenType.BEGIN} 575 ALIAS_TOKENS = parser.Parser.ALIAS_TOKENS - {TokenType.BEGIN} 576 TABLE_ALIAS_TOKENS = parser.Parser.TABLE_ALIAS_TOKENS - {TokenType.BEGIN} 577 COMMENT_TABLE_ALIAS_TOKENS = parser.Parser.COMMENT_TABLE_ALIAS_TOKENS - {TokenType.BEGIN} 578 UPDATE_ALIAS_TOKENS = parser.Parser.UPDATE_ALIAS_TOKENS - {TokenType.BEGIN} 579 580 FUNCTIONS = { 581 **parser.Parser.FUNCTIONS, 582 "CHARINDEX": lambda args: exp.StrPosition( 583 this=seq_get(args, 1), 584 substr=seq_get(args, 0), 585 position=seq_get(args, 2), 586 ), 587 "COUNT": lambda args: exp.Count( 588 this=seq_get(args, 0), expressions=args[1:], big_int=False 589 ), 590 "COUNT_BIG": lambda args: exp.Count( 591 this=seq_get(args, 0), expressions=args[1:], big_int=True 592 ), 593 "DATEADD": build_date_delta(exp.DateAdd, unit_mapping=DATE_DELTA_INTERVAL), 594 "DATEDIFF": _build_date_delta(exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL), 595 "DATENAME": _build_formatted_time(exp.TimeToStr, full_format_mapping=True), 596 "DATEPART": _build_formatted_time(exp.TimeToStr), 597 "DATETIMEFROMPARTS": _build_datetimefromparts, 598 "EOMONTH": _build_eomonth, 599 "FORMAT": _build_format, 600 "GETDATE": exp.CurrentTimestamp.from_arg_list, 601 "HASHBYTES": _build_hashbytes, 602 "ISNULL": lambda args: build_coalesce(args=args, is_null=True), 603 "JSON_QUERY": _build_json_query, 604 "JSON_VALUE": parser.build_extract_json_with_path(exp.JSONExtractScalar), 605 "LEN": _build_with_arg_as_text(exp.Length), 606 "LEFT": _build_with_arg_as_text(exp.Left), 607 "NEWID": exp.Uuid.from_arg_list, 608 "RIGHT": _build_with_arg_as_text(exp.Right), 609 "PARSENAME": _build_parsename, 610 "REPLICATE": exp.Repeat.from_arg_list, 611 "SCHEMA_NAME": exp.CurrentSchema.from_arg_list, 612 "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)), 613 "SYSDATETIME": exp.CurrentTimestamp.from_arg_list, 614 "SUSER_NAME": exp.CurrentUser.from_arg_list, 615 "SUSER_SNAME": exp.CurrentUser.from_arg_list, 616 "SYSDATETIMEOFFSET": exp.CurrentTimestampLTZ.from_arg_list, 617 "SYSTEM_USER": exp.CurrentUser.from_arg_list, 618 "TIMEFROMPARTS": _build_timefromparts, 619 "DATETRUNC": _build_datetrunc, 620 } 621 622 JOIN_HINTS = {"LOOP", "HASH", "MERGE", "REMOTE"} 623 624 PROCEDURE_OPTIONS = dict.fromkeys( 625 ("ENCRYPTION", "RECOMPILE", "SCHEMABINDING", "NATIVE_COMPILATION", "EXECUTE"), tuple() 626 ) 627 628 COLUMN_DEFINITION_MODES = {"OUT", "OUTPUT", "READONLY"} 629 630 RETURNS_TABLE_TOKENS = parser.Parser.ID_VAR_TOKENS - { 631 TokenType.TABLE, 632 *parser.Parser.TYPE_TOKENS, 633 } 634 635 STATEMENT_PARSERS = { 636 **parser.Parser.STATEMENT_PARSERS, 637 TokenType.DECLARE: lambda self: self._parse_declare(), 638 } 639 640 RANGE_PARSERS = { 641 **parser.Parser.RANGE_PARSERS, 642 TokenType.DCOLON: lambda self, this: self.expression( 643 exp.ScopeResolution, 644 this=this, 645 expression=self._parse_function() or self._parse_var(any_token=True), 646 ), 647 } 648 649 NO_PAREN_FUNCTION_PARSERS = { 650 **parser.Parser.NO_PAREN_FUNCTION_PARSERS, 651 "NEXT": lambda self: self._parse_next_value_for(), 652 } 653 654 FUNCTION_PARSERS: t.Dict[str, t.Callable] = { 655 **parser.Parser.FUNCTION_PARSERS, 656 "JSON_ARRAYAGG": lambda self: self.expression( 657 exp.JSONArrayAgg, 658 this=self._parse_bitwise(), 659 order=self._parse_order(), 660 null_handling=self._parse_on_handling("NULL", "NULL", "ABSENT"), 661 ), 662 } 663 664 # The DCOLON (::) operator serves as a scope resolution (exp.ScopeResolution) operator in T-SQL 665 COLUMN_OPERATORS = { 666 **parser.Parser.COLUMN_OPERATORS, 667 TokenType.DCOLON: lambda self, this, to: self.expression(exp.Cast, this=this, to=to) 668 if isinstance(to, exp.DataType) and to.this != exp.DataType.Type.USERDEFINED 669 else self.expression(exp.ScopeResolution, this=this, expression=to), 670 } 671 672 SET_OP_MODIFIERS = {"offset"} 673 674 ODBC_DATETIME_LITERALS = { 675 "d": exp.Date, 676 "t": exp.Time, 677 "ts": exp.Timestamp, 678 } 679 680 def _parse_alter_table_set(self) -> exp.AlterSet: 681 return self._parse_wrapped(super()._parse_alter_table_set) 682 683 def _parse_wrapped_select(self, table: bool = False) -> t.Optional[exp.Expression]: 684 if self._match(TokenType.MERGE): 685 comments = self._prev_comments 686 merge = self._parse_merge() 687 merge.add_comments(comments, prepend=True) 688 return merge 689 690 return super()._parse_wrapped_select(table=table) 691 692 def _parse_dcolon(self) -> t.Optional[exp.Expression]: 693 # We want to use _parse_types() if the first token after :: is a known type, 694 # otherwise we could parse something like x::varchar(max) into a function 695 if self._match_set(self.TYPE_TOKENS, advance=False): 696 return self._parse_types() 697 698 return self._parse_function() or self._parse_types() 699 700 def _parse_options(self) -> t.Optional[t.List[exp.Expression]]: 701 if not self._match(TokenType.OPTION): 702 return None 703 704 def _parse_option() -> t.Optional[exp.Expression]: 705 option = self._parse_var_from_options(OPTIONS) 706 if not option: 707 return None 708 709 self._match(TokenType.EQ) 710 return self.expression( 711 exp.QueryOption, this=option, expression=self._parse_primary_or_var() 712 ) 713 714 return self._parse_wrapped_csv(_parse_option) 715 716 def _parse_xml_key_value_option(self) -> exp.XMLKeyValueOption: 717 this = self._parse_primary_or_var() 718 if self._match(TokenType.L_PAREN, advance=False): 719 expression = self._parse_wrapped(self._parse_string) 720 else: 721 expression = None 722 723 return exp.XMLKeyValueOption(this=this, expression=expression) 724 725 def _parse_for(self) -> t.Optional[t.List[exp.Expression]]: 726 if not self._match_pair(TokenType.FOR, TokenType.XML): 727 return None 728 729 def _parse_for_xml() -> t.Optional[exp.Expression]: 730 return self.expression( 731 exp.QueryOption, 732 this=self._parse_var_from_options(XML_OPTIONS, raise_unmatched=False) 733 or self._parse_xml_key_value_option(), 734 ) 735 736 return self._parse_csv(_parse_for_xml) 737 738 def _parse_projections(self) -> t.List[exp.Expression]: 739 """ 740 T-SQL supports the syntax alias = expression in the SELECT's projection list, 741 so we transform all parsed Selects to convert their EQ projections into Aliases. 742 743 See: https://learn.microsoft.com/en-us/sql/t-sql/queries/select-clause-transact-sql?view=sql-server-ver16#syntax 744 """ 745 return [ 746 ( 747 exp.alias_(projection.expression, projection.this.this, copy=False) 748 if isinstance(projection, exp.EQ) and isinstance(projection.this, exp.Column) 749 else projection 750 ) 751 for projection in super()._parse_projections() 752 ] 753 754 def _parse_commit_or_rollback(self) -> exp.Commit | exp.Rollback: 755 """Applies to SQL Server and Azure SQL Database 756 COMMIT [ { TRAN | TRANSACTION } 757 [ transaction_name | @tran_name_variable ] ] 758 [ WITH ( DELAYED_DURABILITY = { OFF | ON } ) ] 759 760 ROLLBACK { TRAN | TRANSACTION } 761 [ transaction_name | @tran_name_variable 762 | savepoint_name | @savepoint_variable ] 763 """ 764 rollback = self._prev.token_type == TokenType.ROLLBACK 765 766 self._match_texts(("TRAN", "TRANSACTION")) 767 this = self._parse_id_var() 768 769 if rollback: 770 return self.expression(exp.Rollback, this=this) 771 772 durability = None 773 if self._match_pair(TokenType.WITH, TokenType.L_PAREN): 774 self._match_text_seq("DELAYED_DURABILITY") 775 self._match(TokenType.EQ) 776 777 if self._match_text_seq("OFF"): 778 durability = False 779 else: 780 self._match(TokenType.ON) 781 durability = True 782 783 self._match_r_paren() 784 785 return self.expression(exp.Commit, this=this, durability=durability) 786 787 def _parse_transaction(self) -> exp.Transaction | exp.Command: 788 """Applies to SQL Server and Azure SQL Database 789 BEGIN { TRAN | TRANSACTION } 790 [ { transaction_name | @tran_name_variable } 791 [ WITH MARK [ 'description' ] ] 792 ] 793 """ 794 if self._match_texts(("TRAN", "TRANSACTION")): 795 transaction = self.expression(exp.Transaction, this=self._parse_id_var()) 796 if self._match_text_seq("WITH", "MARK"): 797 transaction.set("mark", self._parse_string()) 798 799 return transaction 800 801 return self._parse_as_command(self._prev) 802 803 def _parse_returns(self) -> exp.ReturnsProperty: 804 table = self._parse_id_var(any_token=False, tokens=self.RETURNS_TABLE_TOKENS) 805 returns = super()._parse_returns() 806 returns.set("table", table) 807 return returns 808 809 def _parse_convert( 810 self, strict: bool, safe: t.Optional[bool] = None 811 ) -> t.Optional[exp.Expression]: 812 this = self._parse_types() 813 self._match(TokenType.COMMA) 814 args = [this, *self._parse_csv(self._parse_assignment)] 815 convert = exp.Convert.from_arg_list(args) 816 convert.set("safe", safe) 817 convert.set("strict", strict) 818 return convert 819 820 def _parse_column_def( 821 self, this: t.Optional[exp.Expression], computed_column: bool = True 822 ) -> t.Optional[exp.Expression]: 823 this = super()._parse_column_def(this=this, computed_column=computed_column) 824 if not this: 825 return None 826 if self._match(TokenType.EQ): 827 this.set("default", self._parse_disjunction()) 828 if self._match_texts(self.COLUMN_DEFINITION_MODES): 829 this.set("output", self._prev.text) 830 return this 831 832 def _parse_user_defined_function( 833 self, kind: t.Optional[TokenType] = None 834 ) -> t.Optional[exp.Expression]: 835 this = super()._parse_user_defined_function(kind=kind) 836 837 if ( 838 kind == TokenType.FUNCTION 839 or isinstance(this, exp.UserDefinedFunction) 840 or self._match(TokenType.ALIAS, advance=False) 841 ): 842 return this 843 844 if not self._match(TokenType.WITH, advance=False): 845 expressions = self._parse_csv(self._parse_function_parameter) 846 else: 847 expressions = None 848 849 return self.expression(exp.UserDefinedFunction, this=this, expressions=expressions) 850 851 def _parse_into(self) -> t.Optional[exp.Into]: 852 into = super()._parse_into() 853 854 table = isinstance(into, exp.Into) and into.find(exp.Table) 855 if isinstance(table, exp.Table): 856 table_identifier = table.this 857 if table_identifier.args.get("temporary"): 858 # Promote the temporary property from the Identifier to the Into expression 859 t.cast(exp.Into, into).set("temporary", True) 860 861 return into 862 863 def _parse_id_var( 864 self, 865 any_token: bool = True, 866 tokens: t.Optional[t.Collection[TokenType]] = None, 867 ) -> t.Optional[exp.Expression]: 868 is_temporary = self._match(TokenType.HASH) 869 is_global = is_temporary and self._match(TokenType.HASH) 870 871 this = super()._parse_id_var(any_token=any_token, tokens=tokens) 872 if this: 873 if is_global: 874 this.set("global", True) 875 elif is_temporary: 876 this.set("temporary", True) 877 878 return this 879 880 def _parse_create(self) -> exp.Create | exp.Command: 881 create = super()._parse_create() 882 883 if isinstance(create, exp.Create): 884 table = create.this.this if isinstance(create.this, exp.Schema) else create.this 885 if isinstance(table, exp.Table) and table.this and table.this.args.get("temporary"): 886 if not create.args.get("properties"): 887 create.set("properties", exp.Properties(expressions=[])) 888 889 create.args["properties"].append("expressions", exp.TemporaryProperty()) 890 891 return create 892 893 def _parse_if(self) -> t.Optional[exp.Expression]: 894 index = self._index 895 896 if self._match_text_seq("OBJECT_ID"): 897 self._parse_wrapped_csv(self._parse_string) 898 if self._match_text_seq("IS", "NOT", "NULL") and self._match(TokenType.DROP): 899 return self._parse_drop(exists=True) 900 self._retreat(index) 901 902 return super()._parse_if() 903 904 def _parse_unique(self) -> exp.UniqueColumnConstraint: 905 if self._match_texts(("CLUSTERED", "NONCLUSTERED")): 906 this = self.CONSTRAINT_PARSERS[self._prev.text.upper()](self) 907 else: 908 this = self._parse_schema(self._parse_id_var(any_token=False)) 909 910 return self.expression(exp.UniqueColumnConstraint, this=this) 911 912 def _parse_update(self) -> exp.Update: 913 expression = super()._parse_update() 914 expression.set("options", self._parse_options()) 915 return expression 916 917 def _parse_partition(self) -> t.Optional[exp.Partition]: 918 if not self._match_text_seq("WITH", "(", "PARTITIONS"): 919 return None 920 921 def parse_range(): 922 low = self._parse_bitwise() 923 high = self._parse_bitwise() if self._match_text_seq("TO") else None 924 925 return ( 926 self.expression(exp.PartitionRange, this=low, expression=high) if high else low 927 ) 928 929 partition = self.expression( 930 exp.Partition, expressions=self._parse_wrapped_csv(parse_range) 931 ) 932 933 self._match_r_paren() 934 935 return partition 936 937 def _parse_declareitem(self) -> t.Optional[exp.DeclareItem]: 938 var = self._parse_id_var() 939 if not var: 940 return None 941 942 self._match(TokenType.ALIAS) 943 return self.expression( 944 exp.DeclareItem, 945 this=var, 946 kind=self._parse_schema() if self._match(TokenType.TABLE) else self._parse_types(), 947 default=self._match(TokenType.EQ) and self._parse_bitwise(), 948 ) 949 950 def _parse_alter_table_alter(self) -> t.Optional[exp.Expression]: 951 expression = super()._parse_alter_table_alter() 952 953 if expression is not None: 954 collation = expression.args.get("collate") 955 if isinstance(collation, exp.Column) and isinstance(collation.this, exp.Identifier): 956 identifier = collation.this 957 collation.set("this", exp.Var(this=identifier.name)) 958 959 return expression
Parser consumes a list of tokens produced by the Tokenizer and produces a parsed syntax tree.
Arguments:
- error_level: The desired error level. Default: ErrorLevel.IMMEDIATE
- error_message_context: The amount of context to capture from a query string when displaying the error message (in number of characters). Default: 100
- max_errors: Maximum number of error messages to include in a raised ParseError. This is only relevant if error_level is ErrorLevel.RAISE. Default: 3
Inherited Members
- sqlglot.parser.Parser
- Parser
- NO_PAREN_FUNCTIONS
- STRUCT_TYPE_TOKENS
- NESTED_TYPE_TOKENS
- ENUM_TYPE_TOKENS
- AGGREGATE_TYPE_TOKENS
- TYPE_TOKENS
- SIGNED_TO_UNSIGNED_TYPE_TOKEN
- SUBQUERY_PREDICATES
- RESERVED_TOKENS
- DB_CREATABLES
- CREATABLES
- ALTERABLES
- COLON_PLACEHOLDER_TOKENS
- ARRAY_CONSTRUCTORS
- TRIM_TYPES
- FUNC_TOKENS
- CONJUNCTION
- ASSIGNMENT
- DISJUNCTION
- EQUALITY
- COMPARISON
- BITWISE
- TERM
- FACTOR
- EXPONENT
- TIMES
- TIMESTAMPS
- SET_OPERATIONS
- JOIN_METHODS
- JOIN_SIDES
- JOIN_KINDS
- LAMBDAS
- CAST_COLUMN_OPERATORS
- EXPRESSION_PARSERS
- UNARY_PARSERS
- STRING_PARSERS
- NUMERIC_PARSERS
- PRIMARY_PARSERS
- PLACEHOLDER_PARSERS
- PIPE_SYNTAX_TRANSFORM_PARSERS
- PROPERTY_PARSERS
- CONSTRAINT_PARSERS
- ALTER_PARSERS
- ALTER_ALTER_PARSERS
- SCHEMA_UNNAMED_CONSTRAINTS
- INVALID_FUNC_NAME_TOKENS
- FUNCTIONS_WITH_ALIASED_ARGS
- KEY_VALUE_DEFINITIONS
- QUERY_MODIFIER_TOKENS
- SET_PARSERS
- SHOW_PARSERS
- TYPE_LITERAL_PARSERS
- TYPE_CONVERTERS
- DDL_SELECT_TOKENS
- PRE_VOLATILE_TOKENS
- TRANSACTION_KIND
- TRANSACTION_CHARACTERISTICS
- CONFLICT_ACTIONS
- CREATE_SEQUENCE
- ISOLATED_LOADING_OPTIONS
- USABLES
- CAST_ACTIONS
- SCHEMA_BINDING_OPTIONS
- EXECUTE_AS_OPTIONS
- KEY_CONSTRAINT_OPTIONS
- WINDOW_EXCLUDE_OPTIONS
- INSERT_ALTERNATIVES
- CLONE_KEYWORDS
- HISTORICAL_DATA_PREFIX
- HISTORICAL_DATA_KIND
- OPCLASS_FOLLOW_KEYWORDS
- OPTYPE_FOLLOW_TOKENS
- TABLE_INDEX_HINT_TOKENS
- VIEW_ATTRIBUTES
- WINDOW_ALIAS_TOKENS
- WINDOW_BEFORE_PAREN_TOKENS
- WINDOW_SIDES
- JSON_KEY_VALUE_SEPARATOR_TOKENS
- FETCH_TOKENS
- ADD_CONSTRAINT_TOKENS
- DISTINCT_TOKENS
- UNNEST_OFFSET_ALIAS_TOKENS
- SELECT_START_TOKENS
- COPY_INTO_VARLEN_OPTIONS
- IS_JSON_PREDICATE_KIND
- ON_CONDITION_TOKENS
- PRIVILEGE_FOLLOW_TOKENS
- DESCRIBE_STYLES
- ANALYZE_STYLES
- ANALYZE_EXPRESSION_PARSERS
- PARTITION_KEYWORDS
- AMBIGUOUS_ALIAS_TOKENS
- OPERATION_MODIFIERS
- RECURSIVE_CTE_SEARCH_KIND
- MODIFIABLES
- STRICT_CAST
- PREFIXED_PIVOT_COLUMNS
- IDENTIFY_PIVOT_STRINGS
- TABLESAMPLE_CSV
- DEFAULT_SAMPLING_METHOD
- TRIM_PATTERN_FIRST
- MODIFIERS_ATTACHED_TO_SET_OP
- JSON_ARROWS_REQUIRE_JSON_TYPE
- COLON_IS_VARIANT_EXTRACT
- VALUES_FOLLOWED_BY_PAREN
- SUPPORTS_IMPLICIT_UNNEST
- INTERVAL_SPANS
- SUPPORTS_PARTITION_SELECTION
- WRAPPED_TRANSFORM_COLUMN_CONSTRAINT
- OPTIONAL_ALIAS_TOKEN_CTE
- ALTER_RENAME_REQUIRES_COLUMN
- ALTER_TABLE_PARTITIONS
- JOINS_HAVE_EQUAL_PRECEDENCE
- ZONE_AWARE_TIMESTAMP_CONSTRUCTOR
- MAP_KEYS_ARE_ARBITRARY_EXPRESSIONS
- JSON_EXTRACT_REQUIRES_JSON_EXPRESSION
- ADD_JOIN_ON_TRUE
- SUPPORTS_OMITTED_INTERVAL_SPAN_UNIT
- error_level
- error_message_context
- max_errors
- dialect
- reset
- parse
- parse_into
- check_errors
- raise_error
- expression
- validate_expression
- parse_set_operation
- build_cast
- errors
- sql
961 class Generator(generator.Generator): 962 LIMIT_IS_TOP = True 963 QUERY_HINTS = False 964 RETURNING_END = False 965 NVL2_SUPPORTED = False 966 ALTER_TABLE_INCLUDE_COLUMN_KEYWORD = False 967 LIMIT_FETCH = "FETCH" 968 COMPUTED_COLUMN_WITH_TYPE = False 969 CTE_RECURSIVE_KEYWORD_REQUIRED = False 970 ENSURE_BOOLS = True 971 NULL_ORDERING_SUPPORTED = None 972 SUPPORTS_SINGLE_ARG_CONCAT = False 973 TABLESAMPLE_SEED_KEYWORD = "REPEATABLE" 974 SUPPORTS_SELECT_INTO = True 975 JSON_PATH_BRACKETED_KEY_SUPPORTED = False 976 SUPPORTS_TO_NUMBER = False 977 SET_OP_MODIFIERS = False 978 COPY_PARAMS_EQ_REQUIRED = True 979 PARSE_JSON_NAME = None 980 EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE = False 981 ALTER_SET_WRAPPED = True 982 ALTER_SET_TYPE = "" 983 984 EXPRESSIONS_WITHOUT_NESTED_CTES = { 985 exp.Create, 986 exp.Delete, 987 exp.Insert, 988 exp.Intersect, 989 exp.Except, 990 exp.Merge, 991 exp.Select, 992 exp.Subquery, 993 exp.Union, 994 exp.Update, 995 } 996 997 SUPPORTED_JSON_PATH_PARTS = { 998 exp.JSONPathKey, 999 exp.JSONPathRoot, 1000 exp.JSONPathSubscript, 1001 } 1002 1003 TYPE_MAPPING = { 1004 **generator.Generator.TYPE_MAPPING, 1005 exp.DataType.Type.BOOLEAN: "BIT", 1006 exp.DataType.Type.DATETIME2: "DATETIME2", 1007 exp.DataType.Type.DECIMAL: "NUMERIC", 1008 exp.DataType.Type.DOUBLE: "FLOAT", 1009 exp.DataType.Type.INT: "INTEGER", 1010 exp.DataType.Type.ROWVERSION: "ROWVERSION", 1011 exp.DataType.Type.TEXT: "VARCHAR(MAX)", 1012 exp.DataType.Type.TIMESTAMP: "DATETIME2", 1013 exp.DataType.Type.TIMESTAMPNTZ: "DATETIME2", 1014 exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET", 1015 exp.DataType.Type.SMALLDATETIME: "SMALLDATETIME", 1016 exp.DataType.Type.UTINYINT: "TINYINT", 1017 exp.DataType.Type.VARIANT: "SQL_VARIANT", 1018 exp.DataType.Type.UUID: "UNIQUEIDENTIFIER", 1019 } 1020 1021 TYPE_MAPPING.pop(exp.DataType.Type.NCHAR) 1022 TYPE_MAPPING.pop(exp.DataType.Type.NVARCHAR) 1023 1024 TRANSFORMS = { 1025 **generator.Generator.TRANSFORMS, 1026 exp.AnyValue: any_value_to_max_sql, 1027 exp.ArrayToString: rename_func("STRING_AGG"), 1028 exp.AutoIncrementColumnConstraint: lambda *_: "IDENTITY", 1029 exp.Chr: rename_func("CHAR"), 1030 exp.DateAdd: date_delta_sql("DATEADD"), 1031 exp.DateDiff: date_delta_sql("DATEDIFF"), 1032 exp.CTE: transforms.preprocess([qualify_derived_table_outputs]), 1033 exp.CurrentDate: rename_func("GETDATE"), 1034 exp.CurrentTimestamp: rename_func("GETDATE"), 1035 exp.CurrentTimestampLTZ: rename_func("SYSDATETIMEOFFSET"), 1036 exp.DateStrToDate: datestrtodate_sql, 1037 exp.Extract: rename_func("DATEPART"), 1038 exp.GeneratedAsIdentityColumnConstraint: generatedasidentitycolumnconstraint_sql, 1039 exp.GroupConcat: _string_agg_sql, 1040 exp.If: rename_func("IIF"), 1041 exp.JSONExtract: _json_extract_sql, 1042 exp.JSONExtractScalar: _json_extract_sql, 1043 exp.LastDay: lambda self, e: self.func("EOMONTH", e.this), 1044 exp.Ln: rename_func("LOG"), 1045 exp.Max: max_or_greatest, 1046 exp.MD5: lambda self, e: self.func("HASHBYTES", exp.Literal.string("MD5"), e.this), 1047 exp.Min: min_or_least, 1048 exp.NumberToStr: _format_sql, 1049 exp.Repeat: rename_func("REPLICATE"), 1050 exp.CurrentSchema: rename_func("SCHEMA_NAME"), 1051 exp.Select: transforms.preprocess( 1052 [ 1053 transforms.eliminate_distinct_on, 1054 transforms.eliminate_semi_and_anti_joins, 1055 transforms.eliminate_qualify, 1056 transforms.unnest_generate_date_array_using_recursive_cte, 1057 ] 1058 ), 1059 exp.Stddev: rename_func("STDEV"), 1060 exp.StrPosition: lambda self, e: strposition_sql( 1061 self, e, func_name="CHARINDEX", supports_position=True 1062 ), 1063 exp.Subquery: transforms.preprocess([qualify_derived_table_outputs]), 1064 exp.SHA: lambda self, e: self.func("HASHBYTES", exp.Literal.string("SHA1"), e.this), 1065 exp.SHA2: lambda self, e: self.func( 1066 "HASHBYTES", exp.Literal.string(f"SHA2_{e.args.get('length', 256)}"), e.this 1067 ), 1068 exp.TemporaryProperty: lambda self, e: "", 1069 exp.TimeStrToTime: _timestrtotime_sql, 1070 exp.TimeToStr: _format_sql, 1071 exp.Trim: trim_sql, 1072 exp.TsOrDsAdd: date_delta_sql("DATEADD", cast=True), 1073 exp.TsOrDsDiff: date_delta_sql("DATEDIFF"), 1074 exp.TimestampTrunc: lambda self, e: self.func("DATETRUNC", e.unit, e.this), 1075 exp.Uuid: lambda *_: "NEWID()", 1076 exp.DateFromParts: rename_func("DATEFROMPARTS"), 1077 } 1078 1079 TRANSFORMS.pop(exp.ReturnsProperty) 1080 1081 PROPERTIES_LOCATION = { 1082 **generator.Generator.PROPERTIES_LOCATION, 1083 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 1084 } 1085 1086 def scope_resolution(self, rhs: str, scope_name: str) -> str: 1087 return f"{scope_name}::{rhs}" 1088 1089 def select_sql(self, expression: exp.Select) -> str: 1090 limit = expression.args.get("limit") 1091 offset = expression.args.get("offset") 1092 1093 if isinstance(limit, exp.Fetch) and not offset: 1094 # Dialects like Oracle can FETCH directly from a row set but 1095 # T-SQL requires an ORDER BY + OFFSET clause in order to FETCH 1096 offset = exp.Offset(expression=exp.Literal.number(0)) 1097 expression.set("offset", offset) 1098 1099 if offset: 1100 if not expression.args.get("order"): 1101 # ORDER BY is required in order to use OFFSET in a query, so we use 1102 # a noop order by, since we don't really care about the order. 1103 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 1104 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 1105 1106 if isinstance(limit, exp.Limit): 1107 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 1108 # we replace here because otherwise TOP would be generated in select_sql 1109 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 1110 1111 return super().select_sql(expression) 1112 1113 def convert_sql(self, expression: exp.Convert) -> str: 1114 name = "TRY_CONVERT" if expression.args.get("safe") else "CONVERT" 1115 return self.func( 1116 name, expression.this, expression.expression, expression.args.get("style") 1117 ) 1118 1119 def queryoption_sql(self, expression: exp.QueryOption) -> str: 1120 option = self.sql(expression, "this") 1121 value = self.sql(expression, "expression") 1122 if value: 1123 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 1124 return f"{option} {optional_equal_sign}{value}" 1125 return option 1126 1127 def lateral_op(self, expression: exp.Lateral) -> str: 1128 cross_apply = expression.args.get("cross_apply") 1129 if cross_apply is True: 1130 return "CROSS APPLY" 1131 if cross_apply is False: 1132 return "OUTER APPLY" 1133 1134 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 1135 self.unsupported("LATERAL clause is not supported.") 1136 return "LATERAL" 1137 1138 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 1139 this = expression.this 1140 split_count = len(this.name.split(".")) 1141 delimiter = expression.args.get("delimiter") 1142 part_index = expression.args.get("part_index") 1143 1144 if ( 1145 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 1146 or (delimiter and delimiter.name != ".") 1147 or not part_index 1148 or split_count > 4 1149 ): 1150 self.unsupported( 1151 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 1152 ) 1153 return "" 1154 1155 return self.func( 1156 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 1157 ) 1158 1159 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1160 nano = expression.args.get("nano") 1161 if nano is not None: 1162 nano.pop() 1163 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1164 1165 if expression.args.get("fractions") is None: 1166 expression.set("fractions", exp.Literal.number(0)) 1167 if expression.args.get("precision") is None: 1168 expression.set("precision", exp.Literal.number(0)) 1169 1170 return rename_func("TIMEFROMPARTS")(self, expression) 1171 1172 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1173 zone = expression.args.get("zone") 1174 if zone is not None: 1175 zone.pop() 1176 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1177 1178 nano = expression.args.get("nano") 1179 if nano is not None: 1180 nano.pop() 1181 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1182 1183 if expression.args.get("milli") is None: 1184 expression.set("milli", exp.Literal.number(0)) 1185 1186 return rename_func("DATETIMEFROMPARTS")(self, expression) 1187 1188 def setitem_sql(self, expression: exp.SetItem) -> str: 1189 this = expression.this 1190 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1191 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1192 return f"{self.sql(this.left)} {self.sql(this.right)}" 1193 1194 return super().setitem_sql(expression) 1195 1196 def boolean_sql(self, expression: exp.Boolean) -> str: 1197 if type(expression.parent) in BIT_TYPES or isinstance( 1198 expression.find_ancestor(exp.Values, exp.Select), exp.Values 1199 ): 1200 return "1" if expression.this else "0" 1201 1202 return "(1 = 1)" if expression.this else "(1 = 0)" 1203 1204 def is_sql(self, expression: exp.Is) -> str: 1205 if isinstance(expression.expression, exp.Boolean): 1206 return self.binary(expression, "=") 1207 return self.binary(expression, "IS") 1208 1209 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1210 sql = self.sql(expression, "this") 1211 properties = expression.args.get("properties") 1212 1213 if sql[:1] != "#" and any( 1214 isinstance(prop, exp.TemporaryProperty) 1215 for prop in (properties.expressions if properties else []) 1216 ): 1217 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1218 1219 return sql 1220 1221 def create_sql(self, expression: exp.Create) -> str: 1222 kind = expression.kind 1223 exists = expression.args.pop("exists", None) 1224 1225 like_property = expression.find(exp.LikeProperty) 1226 if like_property: 1227 ctas_expression = like_property.this 1228 else: 1229 ctas_expression = expression.expression 1230 1231 if kind == "VIEW": 1232 expression.this.set("catalog", None) 1233 with_ = expression.args.get("with") 1234 if ctas_expression and with_: 1235 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1236 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1237 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1238 ctas_expression.set("with", with_.pop()) 1239 1240 table = expression.find(exp.Table) 1241 1242 # Convert CTAS statement to SELECT .. INTO .. 1243 if kind == "TABLE" and ctas_expression: 1244 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1245 ctas_expression = ctas_expression.subquery() 1246 1247 properties = expression.args.get("properties") or exp.Properties() 1248 is_temp = any(isinstance(p, exp.TemporaryProperty) for p in properties.expressions) 1249 1250 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1251 select_into.set("into", exp.Into(this=table, temporary=is_temp)) 1252 1253 if like_property: 1254 select_into.limit(0, copy=False) 1255 1256 sql = self.sql(select_into) 1257 else: 1258 sql = super().create_sql(expression) 1259 1260 if exists: 1261 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1262 sql_with_ctes = self.prepend_ctes(expression, sql) 1263 sql_literal = self.sql(exp.Literal.string(sql_with_ctes)) 1264 if kind == "SCHEMA": 1265 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = {identifier}) EXEC({sql_literal})""" 1266 elif kind == "TABLE": 1267 assert table 1268 where = exp.and_( 1269 exp.column("TABLE_NAME").eq(table.name), 1270 exp.column("TABLE_SCHEMA").eq(table.db) if table.db else None, 1271 exp.column("TABLE_CATALOG").eq(table.catalog) if table.catalog else None, 1272 ) 1273 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE {where}) EXEC({sql_literal})""" 1274 elif kind == "INDEX": 1275 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1276 return f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql_literal})""" 1277 elif expression.args.get("replace"): 1278 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1279 1280 return self.prepend_ctes(expression, sql) 1281 1282 @generator.unsupported_args("unlogged", "expressions") 1283 def into_sql(self, expression: exp.Into) -> str: 1284 if expression.args.get("temporary"): 1285 # If the Into expression has a temporary property, push this down to the Identifier 1286 table = expression.find(exp.Table) 1287 if table and isinstance(table.this, exp.Identifier): 1288 table.this.set("temporary", True) 1289 1290 return f"{self.seg('INTO')} {self.sql(expression, 'this')}" 1291 1292 def count_sql(self, expression: exp.Count) -> str: 1293 func_name = "COUNT_BIG" if expression.args.get("big_int") else "COUNT" 1294 return rename_func(func_name)(self, expression) 1295 1296 def offset_sql(self, expression: exp.Offset) -> str: 1297 return f"{super().offset_sql(expression)} ROWS" 1298 1299 def version_sql(self, expression: exp.Version) -> str: 1300 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1301 this = f"FOR {name}" 1302 expr = expression.expression 1303 kind = expression.text("kind") 1304 if kind in ("FROM", "BETWEEN"): 1305 args = expr.expressions 1306 sep = "TO" if kind == "FROM" else "AND" 1307 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1308 else: 1309 expr_sql = self.sql(expr) 1310 1311 expr_sql = f" {expr_sql}" if expr_sql else "" 1312 return f"{this} {kind}{expr_sql}" 1313 1314 def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str: 1315 table = expression.args.get("table") 1316 table = f"{table} " if table else "" 1317 return f"RETURNS {table}{self.sql(expression, 'this')}" 1318 1319 def returning_sql(self, expression: exp.Returning) -> str: 1320 into = self.sql(expression, "into") 1321 into = self.seg(f"INTO {into}") if into else "" 1322 return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}" 1323 1324 def transaction_sql(self, expression: exp.Transaction) -> str: 1325 this = self.sql(expression, "this") 1326 this = f" {this}" if this else "" 1327 mark = self.sql(expression, "mark") 1328 mark = f" WITH MARK {mark}" if mark else "" 1329 return f"BEGIN TRANSACTION{this}{mark}" 1330 1331 def commit_sql(self, expression: exp.Commit) -> str: 1332 this = self.sql(expression, "this") 1333 this = f" {this}" if this else "" 1334 durability = expression.args.get("durability") 1335 durability = ( 1336 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1337 if durability is not None 1338 else "" 1339 ) 1340 return f"COMMIT TRANSACTION{this}{durability}" 1341 1342 def rollback_sql(self, expression: exp.Rollback) -> str: 1343 this = self.sql(expression, "this") 1344 this = f" {this}" if this else "" 1345 return f"ROLLBACK TRANSACTION{this}" 1346 1347 def identifier_sql(self, expression: exp.Identifier) -> str: 1348 identifier = super().identifier_sql(expression) 1349 1350 if expression.args.get("global"): 1351 identifier = f"##{identifier}" 1352 elif expression.args.get("temporary"): 1353 identifier = f"#{identifier}" 1354 1355 return identifier 1356 1357 def constraint_sql(self, expression: exp.Constraint) -> str: 1358 this = self.sql(expression, "this") 1359 expressions = self.expressions(expression, flat=True, sep=" ") 1360 return f"CONSTRAINT {this} {expressions}" 1361 1362 def length_sql(self, expression: exp.Length) -> str: 1363 return self._uncast_text(expression, "LEN") 1364 1365 def right_sql(self, expression: exp.Right) -> str: 1366 return self._uncast_text(expression, "RIGHT") 1367 1368 def left_sql(self, expression: exp.Left) -> str: 1369 return self._uncast_text(expression, "LEFT") 1370 1371 def _uncast_text(self, expression: exp.Expression, name: str) -> str: 1372 this = expression.this 1373 if isinstance(this, exp.Cast) and this.is_type(exp.DataType.Type.TEXT): 1374 this_sql = self.sql(this, "this") 1375 else: 1376 this_sql = self.sql(this) 1377 expression_sql = self.sql(expression, "expression") 1378 return self.func(name, this_sql, expression_sql if expression_sql else None) 1379 1380 def partition_sql(self, expression: exp.Partition) -> str: 1381 return f"WITH (PARTITIONS({self.expressions(expression, flat=True)}))" 1382 1383 def alter_sql(self, expression: exp.Alter) -> str: 1384 action = seq_get(expression.args.get("actions") or [], 0) 1385 if isinstance(action, exp.AlterRename): 1386 return f"EXEC sp_rename '{self.sql(expression.this)}', '{action.this.name}'" 1387 return super().alter_sql(expression) 1388 1389 def drop_sql(self, expression: exp.Drop) -> str: 1390 if expression.args["kind"] == "VIEW": 1391 expression.this.set("catalog", None) 1392 return super().drop_sql(expression) 1393 1394 def options_modifier(self, expression: exp.Expression) -> str: 1395 options = self.expressions(expression, key="options") 1396 return f" OPTION{self.wrap(options)}" if options else "" 1397 1398 def dpipe_sql(self, expression: exp.DPipe) -> str: 1399 return self.sql( 1400 reduce(lambda x, y: exp.Add(this=x, expression=y), expression.flatten()) 1401 ) 1402 1403 def isascii_sql(self, expression: exp.IsAscii) -> str: 1404 return f"(PATINDEX(CONVERT(VARCHAR(MAX), 0x255b5e002d7f5d25) COLLATE Latin1_General_BIN, {self.sql(expression.this)}) = 0)" 1405 1406 def columndef_sql(self, expression: exp.ColumnDef, sep: str = " ") -> str: 1407 this = super().columndef_sql(expression, sep) 1408 default = self.sql(expression, "default") 1409 default = f" = {default}" if default else "" 1410 output = self.sql(expression, "output") 1411 output = f" {output}" if output else "" 1412 return f"{this}{default}{output}" 1413 1414 def coalesce_sql(self, expression: exp.Coalesce) -> str: 1415 func_name = "ISNULL" if expression.args.get("is_null") else "COALESCE" 1416 return rename_func(func_name)(self, expression)
Generator converts a given syntax tree to the corresponding SQL string.
Arguments:
- pretty: Whether to format the produced SQL string. Default: False.
- identify: Determines when an identifier should be quoted. Possible values are: False (default): Never quote, except in cases where it's mandatory by the dialect. True or 'always': Always quote. 'safe': Only quote identifiers that are case insensitive.
- normalize: Whether to normalize identifiers to lowercase. Default: False.
- pad: The pad size in a formatted string. For example, this affects the indentation of a projection in a query, relative to its nesting level. Default: 2.
- indent: The indentation size in a formatted string. For example, this affects the
indentation of subqueries and filters under a
WHERE
clause. Default: 2. - normalize_functions: How to normalize function names. Possible values are: "upper" or True (default): Convert names to uppercase. "lower": Convert names to lowercase. False: Disables function name normalization.
- unsupported_level: Determines the generator's behavior when it encounters unsupported expressions. Default ErrorLevel.WARN.
- max_unsupported: Maximum number of unsupported messages to include in a raised UnsupportedError. This is only relevant if unsupported_level is ErrorLevel.RAISE. Default: 3
- leading_comma: Whether the comma is leading or trailing in select expressions. This is only relevant when generating in pretty mode. Default: False
- max_text_width: The max number of characters in a segment before creating new lines in pretty mode. The default is on the smaller end because the length only represents a segment and not the true line length. Default: 80
- comments: Whether to preserve comments in the output SQL code. Default: True
1089 def select_sql(self, expression: exp.Select) -> str: 1090 limit = expression.args.get("limit") 1091 offset = expression.args.get("offset") 1092 1093 if isinstance(limit, exp.Fetch) and not offset: 1094 # Dialects like Oracle can FETCH directly from a row set but 1095 # T-SQL requires an ORDER BY + OFFSET clause in order to FETCH 1096 offset = exp.Offset(expression=exp.Literal.number(0)) 1097 expression.set("offset", offset) 1098 1099 if offset: 1100 if not expression.args.get("order"): 1101 # ORDER BY is required in order to use OFFSET in a query, so we use 1102 # a noop order by, since we don't really care about the order. 1103 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 1104 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 1105 1106 if isinstance(limit, exp.Limit): 1107 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 1108 # we replace here because otherwise TOP would be generated in select_sql 1109 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 1110 1111 return super().select_sql(expression)
1119 def queryoption_sql(self, expression: exp.QueryOption) -> str: 1120 option = self.sql(expression, "this") 1121 value = self.sql(expression, "expression") 1122 if value: 1123 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 1124 return f"{option} {optional_equal_sign}{value}" 1125 return option
1127 def lateral_op(self, expression: exp.Lateral) -> str: 1128 cross_apply = expression.args.get("cross_apply") 1129 if cross_apply is True: 1130 return "CROSS APPLY" 1131 if cross_apply is False: 1132 return "OUTER APPLY" 1133 1134 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 1135 self.unsupported("LATERAL clause is not supported.") 1136 return "LATERAL"
1138 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 1139 this = expression.this 1140 split_count = len(this.name.split(".")) 1141 delimiter = expression.args.get("delimiter") 1142 part_index = expression.args.get("part_index") 1143 1144 if ( 1145 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 1146 or (delimiter and delimiter.name != ".") 1147 or not part_index 1148 or split_count > 4 1149 ): 1150 self.unsupported( 1151 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 1152 ) 1153 return "" 1154 1155 return self.func( 1156 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 1157 )
1159 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1160 nano = expression.args.get("nano") 1161 if nano is not None: 1162 nano.pop() 1163 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1164 1165 if expression.args.get("fractions") is None: 1166 expression.set("fractions", exp.Literal.number(0)) 1167 if expression.args.get("precision") is None: 1168 expression.set("precision", exp.Literal.number(0)) 1169 1170 return rename_func("TIMEFROMPARTS")(self, expression)
1172 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1173 zone = expression.args.get("zone") 1174 if zone is not None: 1175 zone.pop() 1176 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1177 1178 nano = expression.args.get("nano") 1179 if nano is not None: 1180 nano.pop() 1181 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1182 1183 if expression.args.get("milli") is None: 1184 expression.set("milli", exp.Literal.number(0)) 1185 1186 return rename_func("DATETIMEFROMPARTS")(self, expression)
1188 def setitem_sql(self, expression: exp.SetItem) -> str: 1189 this = expression.this 1190 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1191 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1192 return f"{self.sql(this.left)} {self.sql(this.right)}" 1193 1194 return super().setitem_sql(expression)
1209 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1210 sql = self.sql(expression, "this") 1211 properties = expression.args.get("properties") 1212 1213 if sql[:1] != "#" and any( 1214 isinstance(prop, exp.TemporaryProperty) 1215 for prop in (properties.expressions if properties else []) 1216 ): 1217 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1218 1219 return sql
1221 def create_sql(self, expression: exp.Create) -> str: 1222 kind = expression.kind 1223 exists = expression.args.pop("exists", None) 1224 1225 like_property = expression.find(exp.LikeProperty) 1226 if like_property: 1227 ctas_expression = like_property.this 1228 else: 1229 ctas_expression = expression.expression 1230 1231 if kind == "VIEW": 1232 expression.this.set("catalog", None) 1233 with_ = expression.args.get("with") 1234 if ctas_expression and with_: 1235 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1236 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1237 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1238 ctas_expression.set("with", with_.pop()) 1239 1240 table = expression.find(exp.Table) 1241 1242 # Convert CTAS statement to SELECT .. INTO .. 1243 if kind == "TABLE" and ctas_expression: 1244 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1245 ctas_expression = ctas_expression.subquery() 1246 1247 properties = expression.args.get("properties") or exp.Properties() 1248 is_temp = any(isinstance(p, exp.TemporaryProperty) for p in properties.expressions) 1249 1250 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1251 select_into.set("into", exp.Into(this=table, temporary=is_temp)) 1252 1253 if like_property: 1254 select_into.limit(0, copy=False) 1255 1256 sql = self.sql(select_into) 1257 else: 1258 sql = super().create_sql(expression) 1259 1260 if exists: 1261 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1262 sql_with_ctes = self.prepend_ctes(expression, sql) 1263 sql_literal = self.sql(exp.Literal.string(sql_with_ctes)) 1264 if kind == "SCHEMA": 1265 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = {identifier}) EXEC({sql_literal})""" 1266 elif kind == "TABLE": 1267 assert table 1268 where = exp.and_( 1269 exp.column("TABLE_NAME").eq(table.name), 1270 exp.column("TABLE_SCHEMA").eq(table.db) if table.db else None, 1271 exp.column("TABLE_CATALOG").eq(table.catalog) if table.catalog else None, 1272 ) 1273 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE {where}) EXEC({sql_literal})""" 1274 elif kind == "INDEX": 1275 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1276 return f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql_literal})""" 1277 elif expression.args.get("replace"): 1278 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1279 1280 return self.prepend_ctes(expression, sql)
1282 @generator.unsupported_args("unlogged", "expressions") 1283 def into_sql(self, expression: exp.Into) -> str: 1284 if expression.args.get("temporary"): 1285 # If the Into expression has a temporary property, push this down to the Identifier 1286 table = expression.find(exp.Table) 1287 if table and isinstance(table.this, exp.Identifier): 1288 table.this.set("temporary", True) 1289 1290 return f"{self.seg('INTO')} {self.sql(expression, 'this')}"
1299 def version_sql(self, expression: exp.Version) -> str: 1300 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1301 this = f"FOR {name}" 1302 expr = expression.expression 1303 kind = expression.text("kind") 1304 if kind in ("FROM", "BETWEEN"): 1305 args = expr.expressions 1306 sep = "TO" if kind == "FROM" else "AND" 1307 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1308 else: 1309 expr_sql = self.sql(expr) 1310 1311 expr_sql = f" {expr_sql}" if expr_sql else "" 1312 return f"{this} {kind}{expr_sql}"
1331 def commit_sql(self, expression: exp.Commit) -> str: 1332 this = self.sql(expression, "this") 1333 this = f" {this}" if this else "" 1334 durability = expression.args.get("durability") 1335 durability = ( 1336 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1337 if durability is not None 1338 else "" 1339 ) 1340 return f"COMMIT TRANSACTION{this}{durability}"
1347 def identifier_sql(self, expression: exp.Identifier) -> str: 1348 identifier = super().identifier_sql(expression) 1349 1350 if expression.args.get("global"): 1351 identifier = f"##{identifier}" 1352 elif expression.args.get("temporary"): 1353 identifier = f"#{identifier}" 1354 1355 return identifier
1406 def columndef_sql(self, expression: exp.ColumnDef, sep: str = " ") -> str: 1407 this = super().columndef_sql(expression, sep) 1408 default = self.sql(expression, "default") 1409 default = f" = {default}" if default else "" 1410 output = self.sql(expression, "output") 1411 output = f" {output}" if output else "" 1412 return f"{this}{default}{output}"
Inherited Members
- sqlglot.generator.Generator
- Generator
- IGNORE_NULLS_IN_FUNC
- LOCKING_READS_SUPPORTED
- WRAP_DERIVED_VALUES
- CREATE_FUNCTION_RETURN_AS
- MATCHED_BY_SOURCE
- SINGLE_STRING_INTERVAL
- INTERVAL_ALLOWS_PLURAL_FORM
- LIMIT_ONLY_LITERALS
- RENAME_TABLE_WITH_DB
- GROUPINGS_SEP
- INDEX_ON
- JOIN_HINTS
- TABLE_HINTS
- QUERY_HINT_SEP
- IS_BOOL_ALLOWED
- DUPLICATE_KEY_UPDATE_WITH_SET
- EXTRACT_ALLOWS_QUOTES
- TZ_TO_WITH_TIME_ZONE
- VALUES_AS_TABLE
- UNNEST_WITH_ORDINALITY
- AGGREGATE_FILTER_SUPPORTED
- SEMI_ANTI_JOIN_WITH_SIDE
- SUPPORTS_TABLE_COPY
- TABLESAMPLE_REQUIRES_PARENS
- TABLESAMPLE_SIZE_IS_ROWS
- TABLESAMPLE_KEYWORDS
- TABLESAMPLE_WITH_METHOD
- COLLATE_IS_FUNC
- DATA_TYPE_SPECIFIERS_ALLOWED
- LAST_DAY_SUPPORTS_DATE_PART
- SUPPORTS_TABLE_ALIAS_COLUMNS
- UNPIVOT_ALIASES_ARE_IDENTIFIERS
- JSON_KEY_VALUE_PAIR_SEP
- INSERT_OVERWRITE
- SUPPORTS_UNLOGGED_TABLES
- SUPPORTS_CREATE_TABLE_LIKE
- LIKE_PROPERTY_INSIDE_SCHEMA
- MULTI_ARG_DISTINCT
- JSON_TYPE_REQUIRED_FOR_EXTRACTION
- JSON_PATH_SINGLE_QUOTE_ESCAPE
- CAN_IMPLEMENT_ARRAY_ANY
- SUPPORTS_WINDOW_EXCLUDE
- COPY_PARAMS_ARE_WRAPPED
- COPY_HAS_INTO_KEYWORD
- UNICODE_SUBSTITUTE
- STAR_EXCEPT
- HEX_FUNC
- WITH_PROPERTIES_PREFIX
- QUOTE_JSON_PATH
- PAD_FILL_PATTERN_IS_REQUIRED
- SUPPORTS_EXPLODING_PROJECTIONS
- ARRAY_CONCAT_IS_VAR_LEN
- SUPPORTS_CONVERT_TIMEZONE
- SUPPORTS_MEDIAN
- SUPPORTS_UNIX_SECONDS
- NORMALIZE_EXTRACT_DATE_PARTS
- ARRAY_SIZE_NAME
- ARRAY_SIZE_DIM_REQUIRED
- SUPPORTS_BETWEEN_FLAGS
- SUPPORTS_LIKE_QUANTIFIERS
- MATCH_AGAINST_TABLE_PREFIX
- UNSUPPORTED_TYPES
- TIME_PART_SINGULARS
- TOKEN_MAPPING
- STRUCT_DELIMITER
- PARAMETER_TOKEN
- NAMED_PLACEHOLDER_TOKEN
- EXPRESSION_PRECEDES_PROPERTIES_CREATABLES
- RESERVED_KEYWORDS
- WITH_SEPARATED_COMMENTS
- EXCLUDE_COMMENTS
- UNWRAPPED_INTERVAL_VALUES
- PARAMETERIZABLE_TEXT_TYPES
- RESPECT_IGNORE_NULLS_UNSUPPORTED_EXPRESSIONS
- SAFE_JSON_PATH_KEY_RE
- SENTINEL_LINE_BREAK
- pretty
- identify
- normalize
- pad
- unsupported_level
- max_unsupported
- leading_comma
- max_text_width
- comments
- dialect
- normalize_functions
- unsupported_messages
- generate
- preprocess
- unsupported
- sep
- seg
- sanitize_comment
- maybe_comment
- wrap
- no_identify
- normalize_func
- indent
- sql
- uncache_sql
- cache_sql
- characterset_sql
- column_parts
- column_sql
- columnposition_sql
- columnconstraint_sql
- computedcolumnconstraint_sql
- autoincrementcolumnconstraint_sql
- compresscolumnconstraint_sql
- generatedasidentitycolumnconstraint_sql
- generatedasrowcolumnconstraint_sql
- periodforsystemtimeconstraint_sql
- notnullcolumnconstraint_sql
- primarykeycolumnconstraint_sql
- uniquecolumnconstraint_sql
- sequenceproperties_sql
- clone_sql
- describe_sql
- heredoc_sql
- prepend_ctes
- with_sql
- cte_sql
- tablealias_sql
- bitstring_sql
- hexstring_sql
- bytestring_sql
- unicodestring_sql
- rawstring_sql
- datatypeparam_sql
- datatype_sql
- directory_sql
- delete_sql
- set_operation
- set_operations
- fetch_sql
- limitoptions_sql
- filter_sql
- hint_sql
- indexparameters_sql
- index_sql
- hex_sql
- lowerhex_sql
- inputoutputformat_sql
- national_sql
- properties_sql
- root_properties
- properties
- with_properties
- locate_properties
- property_name
- property_sql
- likeproperty_sql
- fallbackproperty_sql
- journalproperty_sql
- freespaceproperty_sql
- checksumproperty_sql
- mergeblockratioproperty_sql
- datablocksizeproperty_sql
- blockcompressionproperty_sql
- isolatedloadingproperty_sql
- partitionboundspec_sql
- partitionedofproperty_sql
- lockingproperty_sql
- withdataproperty_sql
- withsystemversioningproperty_sql
- insert_sql
- introducer_sql
- kill_sql
- pseudotype_sql
- objectidentifier_sql
- onconflict_sql
- rowformatdelimitedproperty_sql
- withtablehint_sql
- indextablehint_sql
- historicaldata_sql
- table_parts
- table_sql
- tablefromrows_sql
- tablesample_sql
- pivot_sql
- tuple_sql
- update_sql
- values_sql
- var_sql
- from_sql
- groupingsets_sql
- rollup_sql
- cube_sql
- group_sql
- having_sql
- connect_sql
- prior_sql
- join_sql
- lambda_sql
- lateral_sql
- limit_sql
- set_sql
- queryband_sql
- pragma_sql
- lock_sql
- literal_sql
- escape_str
- loaddata_sql
- null_sql
- order_sql
- withfill_sql
- cluster_sql
- distribute_sql
- sort_sql
- ordered_sql
- matchrecognizemeasure_sql
- matchrecognize_sql
- query_modifiers
- for_modifiers
- offset_limit_modifiers
- after_limit_modifiers
- schema_sql
- schema_columns_sql
- star_sql
- parameter_sql
- sessionparameter_sql
- placeholder_sql
- subquery_sql
- qualify_sql
- unnest_sql
- prewhere_sql
- where_sql
- window_sql
- partition_by_sql
- windowspec_sql
- withingroup_sql
- between_sql
- bracket_offset_expressions
- bracket_sql
- all_sql
- any_sql
- exists_sql
- case_sql
- nextvaluefor_sql
- extract_sql
- trim_sql
- convert_concat_args
- concat_sql
- concatws_sql
- check_sql
- foreignkey_sql
- primarykey_sql
- if_sql
- matchagainst_sql
- jsonkeyvalue_sql
- jsonpath_sql
- json_path_part
- formatjson_sql
- formatphrase_sql
- jsonobject_sql
- jsonobjectagg_sql
- jsonarray_sql
- jsonarrayagg_sql
- jsoncolumndef_sql
- jsonschema_sql
- jsontable_sql
- openjsoncolumndef_sql
- openjson_sql
- in_sql
- in_unnest_op
- interval_sql
- return_sql
- reference_sql
- anonymous_sql
- paren_sql
- neg_sql
- not_sql
- alias_sql
- pivotalias_sql
- aliases_sql
- atindex_sql
- attimezone_sql
- fromtimezone_sql
- add_sql
- and_sql
- or_sql
- xor_sql
- connector_sql
- bitwiseand_sql
- bitwiseleftshift_sql
- bitwisenot_sql
- bitwiseor_sql
- bitwiserightshift_sql
- bitwisexor_sql
- cast_sql
- currentdate_sql
- collate_sql
- command_sql
- comment_sql
- mergetreettlaction_sql
- mergetreettl_sql
- altercolumn_sql
- alterindex_sql
- alterdiststyle_sql
- altersortkey_sql
- alterrename_sql
- renamecolumn_sql
- alterset_sql
- altersession_sql
- add_column_sql
- droppartition_sql
- addconstraint_sql
- addpartition_sql
- distinct_sql
- ignorenulls_sql
- respectnulls_sql
- havingmax_sql
- intdiv_sql
- div_sql
- safedivide_sql
- overlaps_sql
- distance_sql
- dot_sql
- eq_sql
- propertyeq_sql
- escape_sql
- glob_sql
- gt_sql
- gte_sql
- like_sql
- ilike_sql
- similarto_sql
- lt_sql
- lte_sql
- mod_sql
- mul_sql
- neq_sql
- nullsafeeq_sql
- nullsafeneq_sql
- slice_sql
- sub_sql
- trycast_sql
- jsoncast_sql
- try_sql
- log_sql
- use_sql
- binary
- ceil_floor
- function_fallback_sql
- func
- format_args
- too_wide
- format_time
- expressions
- op_expressions
- naked_property
- tag_sql
- token_sql
- userdefinedfunction_sql
- joinhint_sql
- kwarg_sql
- when_sql
- whens_sql
- merge_sql
- tochar_sql
- tonumber_sql
- dictproperty_sql
- dictrange_sql
- dictsubproperty_sql
- duplicatekeyproperty_sql
- uniquekeyproperty_sql
- distributedbyproperty_sql
- oncluster_sql
- clusteredbyproperty_sql
- anyvalue_sql
- querytransform_sql
- indexconstraintoption_sql
- checkcolumnconstraint_sql
- indexcolumnconstraint_sql
- nvl2_sql
- comprehension_sql
- columnprefix_sql
- opclass_sql
- predict_sql
- generateembedding_sql
- mltranslate_sql
- mlforecast_sql
- featuresattime_sql
- vectorsearch_sql
- forin_sql
- refresh_sql
- toarray_sql
- tsordstotime_sql
- tsordstotimestamp_sql
- tsordstodatetime_sql
- tsordstodate_sql
- unixdate_sql
- lastday_sql
- dateadd_sql
- arrayany_sql
- struct_sql
- partitionrange_sql
- truncatetable_sql
- copyparameter_sql
- credentials_sql
- copy_sql
- semicolon_sql
- datadeletionproperty_sql
- maskingpolicycolumnconstraint_sql
- gapfill_sql
- scoperesolution_sql
- parsejson_sql
- rand_sql
- changes_sql
- pad_sql
- summarize_sql
- explodinggenerateseries_sql
- arrayconcat_sql
- converttimezone_sql
- json_sql
- jsonvalue_sql
- conditionalinsert_sql
- multitableinserts_sql
- oncondition_sql
- jsonextractquote_sql
- jsonexists_sql
- arrayagg_sql
- apply_sql
- grant_sql
- revoke_sql
- grantprivilege_sql
- grantprincipal_sql
- columns_sql
- overlay_sql
- todouble_sql
- string_sql
- median_sql
- overflowtruncatebehavior_sql
- unixseconds_sql
- arraysize_sql
- attach_sql
- detach_sql
- attachoption_sql
- watermarkcolumnconstraint_sql
- encodeproperty_sql
- includeproperty_sql
- xmlelement_sql
- xmlkeyvalueoption_sql
- partitionbyrangeproperty_sql
- partitionbyrangepropertydynamic_sql
- unpivotcolumns_sql
- analyzesample_sql
- analyzestatistics_sql
- analyzehistogram_sql
- analyzedelete_sql
- analyzelistchainedrows_sql
- analyzevalidate_sql
- analyze_sql
- xmltable_sql
- xmlnamespace_sql
- export_sql
- declare_sql
- declareitem_sql
- recursivewithsearch_sql
- parameterizedagg_sql
- anonymousaggfunc_sql
- combinedaggfunc_sql
- combinedparameterizedagg_sql
- show_sql
- install_sql
- get_put_sql
- translatecharacters_sql
- decodecase_sql
- semanticview_sql
- getextract_sql
- datefromunixdate_sql
- space_sql
- buildproperty_sql
- refreshtriggerproperty_sql
- modelattribute_sql
- directorystage_sql