sqlglot.dialects.tsql
1from __future__ import annotations 2 3import datetime 4import re 5import typing as t 6from functools import partial, reduce 7 8from sqlglot import exp, generator, parser, tokens, transforms 9from sqlglot.dialects.dialect import ( 10 Dialect, 11 NormalizationStrategy, 12 any_value_to_max_sql, 13 build_date_delta, 14 date_delta_sql, 15 datestrtodate_sql, 16 generatedasidentitycolumnconstraint_sql, 17 max_or_greatest, 18 min_or_least, 19 rename_func, 20 strposition_sql, 21 timestrtotime_sql, 22 trim_sql, 23) 24from sqlglot.helper import seq_get 25from sqlglot.parser import build_coalesce 26from sqlglot.time import format_time 27from sqlglot.tokens import TokenType 28 29if t.TYPE_CHECKING: 30 from sqlglot._typing import E 31 32FULL_FORMAT_TIME_MAPPING = { 33 "weekday": "%A", 34 "dw": "%A", 35 "w": "%A", 36 "month": "%B", 37 "mm": "%B", 38 "m": "%B", 39} 40 41DATE_DELTA_INTERVAL = { 42 "year": "year", 43 "yyyy": "year", 44 "yy": "year", 45 "quarter": "quarter", 46 "qq": "quarter", 47 "q": "quarter", 48 "month": "month", 49 "mm": "month", 50 "m": "month", 51 "week": "week", 52 "ww": "week", 53 "wk": "week", 54 "day": "day", 55 "dd": "day", 56 "d": "day", 57} 58 59 60DATE_FMT_RE = re.compile("([dD]{1,2})|([mM]{1,2})|([yY]{1,4})|([hH]{1,2})|([sS]{1,2})") 61 62# N = Numeric, C=Currency 63TRANSPILE_SAFE_NUMBER_FMT = {"N", "C"} 64 65DEFAULT_START_DATE = datetime.date(1900, 1, 1) 66 67BIT_TYPES = {exp.EQ, exp.NEQ, exp.Is, exp.In, exp.Select, exp.Alias} 68 69# Unsupported options: 70# - OPTIMIZE FOR ( @variable_name { UNKNOWN | = <literal_constant> } [ , ...n ] ) 71# - TABLE HINT 72OPTIONS: parser.OPTIONS_TYPE = { 73 **dict.fromkeys( 74 ( 75 "DISABLE_OPTIMIZED_PLAN_FORCING", 76 "FAST", 77 "IGNORE_NONCLUSTERED_COLUMNSTORE_INDEX", 78 "LABEL", 79 "MAXDOP", 80 "MAXRECURSION", 81 "MAX_GRANT_PERCENT", 82 "MIN_GRANT_PERCENT", 83 "NO_PERFORMANCE_SPOOL", 84 "QUERYTRACEON", 85 "RECOMPILE", 86 ), 87 tuple(), 88 ), 89 "CONCAT": ("UNION",), 90 "DISABLE": ("EXTERNALPUSHDOWN", "SCALEOUTEXECUTION"), 91 "EXPAND": ("VIEWS",), 92 "FORCE": ("EXTERNALPUSHDOWN", "ORDER", "SCALEOUTEXECUTION"), 93 "HASH": ("GROUP", "JOIN", "UNION"), 94 "KEEP": ("PLAN",), 95 "KEEPFIXED": ("PLAN",), 96 "LOOP": ("JOIN",), 97 "MERGE": ("JOIN", "UNION"), 98 "OPTIMIZE": (("FOR", "UNKNOWN"),), 99 "ORDER": ("GROUP",), 100 "PARAMETERIZATION": ("FORCED", "SIMPLE"), 101 "ROBUST": ("PLAN",), 102 "USE": ("PLAN",), 103} 104 105 106XML_OPTIONS: parser.OPTIONS_TYPE = { 107 **dict.fromkeys( 108 ( 109 "AUTO", 110 "EXPLICIT", 111 "TYPE", 112 ), 113 tuple(), 114 ), 115 "ELEMENTS": ( 116 "XSINIL", 117 "ABSENT", 118 ), 119 "BINARY": ("BASE64",), 120} 121 122 123OPTIONS_THAT_REQUIRE_EQUAL = ("MAX_GRANT_PERCENT", "MIN_GRANT_PERCENT", "LABEL") 124 125 126def _build_formatted_time( 127 exp_class: t.Type[E], full_format_mapping: t.Optional[bool] = None 128) -> t.Callable[[t.List], E]: 129 def _builder(args: t.List) -> E: 130 fmt = seq_get(args, 0) 131 if isinstance(fmt, exp.Expression): 132 fmt = exp.Literal.string( 133 format_time( 134 fmt.name.lower(), 135 ( 136 {**TSQL.TIME_MAPPING, **FULL_FORMAT_TIME_MAPPING} 137 if full_format_mapping 138 else TSQL.TIME_MAPPING 139 ), 140 ) 141 ) 142 143 this = seq_get(args, 1) 144 if isinstance(this, exp.Expression): 145 this = exp.cast(this, exp.DataType.Type.DATETIME2) 146 147 return exp_class(this=this, format=fmt) 148 149 return _builder 150 151 152def _build_format(args: t.List) -> exp.NumberToStr | exp.TimeToStr: 153 this = seq_get(args, 0) 154 fmt = seq_get(args, 1) 155 culture = seq_get(args, 2) 156 157 number_fmt = fmt and (fmt.name in TRANSPILE_SAFE_NUMBER_FMT or not DATE_FMT_RE.search(fmt.name)) 158 159 if number_fmt: 160 return exp.NumberToStr(this=this, format=fmt, culture=culture) 161 162 if fmt: 163 fmt = exp.Literal.string( 164 format_time(fmt.name, TSQL.FORMAT_TIME_MAPPING) 165 if len(fmt.name) == 1 166 else format_time(fmt.name, TSQL.TIME_MAPPING) 167 ) 168 169 return exp.TimeToStr(this=this, format=fmt, culture=culture) 170 171 172def _build_eomonth(args: t.List) -> exp.LastDay: 173 date = exp.TsOrDsToDate(this=seq_get(args, 0)) 174 month_lag = seq_get(args, 1) 175 176 if month_lag is None: 177 this: exp.Expression = date 178 else: 179 unit = DATE_DELTA_INTERVAL.get("month") 180 this = exp.DateAdd(this=date, expression=month_lag, unit=unit and exp.var(unit)) 181 182 return exp.LastDay(this=this) 183 184 185def _build_hashbytes(args: t.List) -> exp.Expression: 186 kind, data = args 187 kind = kind.name.upper() if kind.is_string else "" 188 189 if kind == "MD5": 190 args.pop(0) 191 return exp.MD5(this=data) 192 if kind in ("SHA", "SHA1"): 193 args.pop(0) 194 return exp.SHA(this=data) 195 if kind == "SHA2_256": 196 return exp.SHA2(this=data, length=exp.Literal.number(256)) 197 if kind == "SHA2_512": 198 return exp.SHA2(this=data, length=exp.Literal.number(512)) 199 200 return exp.func("HASHBYTES", *args) 201 202 203DATEPART_ONLY_FORMATS = {"DW", "WK", "HOUR", "QUARTER"} 204 205 206def _format_sql(self: TSQL.Generator, expression: exp.NumberToStr | exp.TimeToStr) -> str: 207 fmt = expression.args["format"] 208 209 if not isinstance(expression, exp.NumberToStr): 210 if fmt.is_string: 211 mapped_fmt = format_time(fmt.name, TSQL.INVERSE_TIME_MAPPING) 212 213 name = (mapped_fmt or "").upper() 214 if name in DATEPART_ONLY_FORMATS: 215 return self.func("DATEPART", name, expression.this) 216 217 fmt_sql = self.sql(exp.Literal.string(mapped_fmt)) 218 else: 219 fmt_sql = self.format_time(expression) or self.sql(fmt) 220 else: 221 fmt_sql = self.sql(fmt) 222 223 return self.func("FORMAT", expression.this, fmt_sql, expression.args.get("culture")) 224 225 226def _string_agg_sql(self: TSQL.Generator, expression: exp.GroupConcat) -> str: 227 this = expression.this 228 distinct = expression.find(exp.Distinct) 229 if distinct: 230 # exp.Distinct can appear below an exp.Order or an exp.GroupConcat expression 231 self.unsupported("T-SQL STRING_AGG doesn't support DISTINCT.") 232 this = distinct.pop().expressions[0] 233 234 order = "" 235 if isinstance(expression.this, exp.Order): 236 if expression.this.this: 237 this = expression.this.this.pop() 238 # Order has a leading space 239 order = f" WITHIN GROUP ({self.sql(expression.this)[1:]})" 240 241 separator = expression.args.get("separator") or exp.Literal.string(",") 242 return f"STRING_AGG({self.format_args(this, separator)}){order}" 243 244 245def _build_date_delta( 246 exp_class: t.Type[E], unit_mapping: t.Optional[t.Dict[str, str]] = None 247) -> t.Callable[[t.List], E]: 248 def _builder(args: t.List) -> E: 249 unit = seq_get(args, 0) 250 if unit and unit_mapping: 251 unit = exp.var(unit_mapping.get(unit.name.lower(), unit.name)) 252 253 start_date = seq_get(args, 1) 254 if start_date and start_date.is_number: 255 # Numeric types are valid DATETIME values 256 if start_date.is_int: 257 adds = DEFAULT_START_DATE + datetime.timedelta(days=start_date.to_py()) 258 start_date = exp.Literal.string(adds.strftime("%F")) 259 else: 260 # We currently don't handle float values, i.e. they're not converted to equivalent DATETIMEs. 261 # This is not a problem when generating T-SQL code, it is when transpiling to other dialects. 262 return exp_class(this=seq_get(args, 2), expression=start_date, unit=unit) 263 264 return exp_class( 265 this=exp.TimeStrToTime(this=seq_get(args, 2)), 266 expression=exp.TimeStrToTime(this=start_date), 267 unit=unit, 268 ) 269 270 return _builder 271 272 273def qualify_derived_table_outputs(expression: exp.Expression) -> exp.Expression: 274 """Ensures all (unnamed) output columns are aliased for CTEs and Subqueries.""" 275 alias = expression.args.get("alias") 276 277 if ( 278 isinstance(expression, (exp.CTE, exp.Subquery)) 279 and isinstance(alias, exp.TableAlias) 280 and not alias.columns 281 ): 282 from sqlglot.optimizer.qualify_columns import qualify_outputs 283 284 # We keep track of the unaliased column projection indexes instead of the expressions 285 # themselves, because the latter are going to be replaced by new nodes when the aliases 286 # are added and hence we won't be able to reach these newly added Alias parents 287 query = expression.this 288 unaliased_column_indexes = ( 289 i for i, c in enumerate(query.selects) if isinstance(c, exp.Column) and not c.alias 290 ) 291 292 qualify_outputs(query) 293 294 # Preserve the quoting information of columns for newly added Alias nodes 295 query_selects = query.selects 296 for select_index in unaliased_column_indexes: 297 alias = query_selects[select_index] 298 column = alias.this 299 if isinstance(column.this, exp.Identifier): 300 alias.args["alias"].set("quoted", column.this.quoted) 301 302 return expression 303 304 305# https://learn.microsoft.com/en-us/sql/t-sql/functions/datetimefromparts-transact-sql?view=sql-server-ver16#syntax 306def _build_datetimefromparts(args: t.List) -> exp.TimestampFromParts: 307 return exp.TimestampFromParts( 308 year=seq_get(args, 0), 309 month=seq_get(args, 1), 310 day=seq_get(args, 2), 311 hour=seq_get(args, 3), 312 min=seq_get(args, 4), 313 sec=seq_get(args, 5), 314 milli=seq_get(args, 6), 315 ) 316 317 318# https://learn.microsoft.com/en-us/sql/t-sql/functions/timefromparts-transact-sql?view=sql-server-ver16#syntax 319def _build_timefromparts(args: t.List) -> exp.TimeFromParts: 320 return exp.TimeFromParts( 321 hour=seq_get(args, 0), 322 min=seq_get(args, 1), 323 sec=seq_get(args, 2), 324 fractions=seq_get(args, 3), 325 precision=seq_get(args, 4), 326 ) 327 328 329def _build_with_arg_as_text( 330 klass: t.Type[exp.Expression], 331) -> t.Callable[[t.List[exp.Expression]], exp.Expression]: 332 def _parse(args: t.List[exp.Expression]) -> exp.Expression: 333 this = seq_get(args, 0) 334 335 if this and not this.is_string: 336 this = exp.cast(this, exp.DataType.Type.TEXT) 337 338 expression = seq_get(args, 1) 339 kwargs = {"this": this} 340 341 if expression: 342 kwargs["expression"] = expression 343 344 return klass(**kwargs) 345 346 return _parse 347 348 349# https://learn.microsoft.com/en-us/sql/t-sql/functions/parsename-transact-sql?view=sql-server-ver16 350def _build_parsename(args: t.List) -> exp.SplitPart | exp.Anonymous: 351 # PARSENAME(...) will be stored into exp.SplitPart if: 352 # - All args are literals 353 # - The part index (2nd arg) is <= 4 (max valid value, otherwise TSQL returns NULL) 354 if len(args) == 2 and all(isinstance(arg, exp.Literal) for arg in args): 355 this = args[0] 356 part_index = args[1] 357 split_count = len(this.name.split(".")) 358 if split_count <= 4: 359 return exp.SplitPart( 360 this=this, 361 delimiter=exp.Literal.string("."), 362 part_index=exp.Literal.number(split_count + 1 - part_index.to_py()), 363 ) 364 365 return exp.Anonymous(this="PARSENAME", expressions=args) 366 367 368def _build_json_query(args: t.List, dialect: Dialect) -> exp.JSONExtract: 369 if len(args) == 1: 370 # The default value for path is '$'. As a result, if you don't provide a 371 # value for path, JSON_QUERY returns the input expression. 372 args.append(exp.Literal.string("$")) 373 374 return parser.build_extract_json_with_path(exp.JSONExtract)(args, dialect) 375 376 377def _json_extract_sql( 378 self: TSQL.Generator, expression: exp.JSONExtract | exp.JSONExtractScalar 379) -> str: 380 json_query = self.func("JSON_QUERY", expression.this, expression.expression) 381 json_value = self.func("JSON_VALUE", expression.this, expression.expression) 382 return self.func("ISNULL", json_query, json_value) 383 384 385def _timestrtotime_sql(self: TSQL.Generator, expression: exp.TimeStrToTime): 386 sql = timestrtotime_sql(self, expression) 387 if expression.args.get("zone"): 388 # If there is a timezone, produce an expression like: 389 # CAST('2020-01-01 12:13:14-08:00' AS DATETIMEOFFSET) AT TIME ZONE 'UTC' 390 # If you dont have AT TIME ZONE 'UTC', wrapping that expression in another cast back to DATETIME2 just drops the timezone information 391 return self.sql(exp.AtTimeZone(this=sql, zone=exp.Literal.string("UTC"))) 392 return sql 393 394 395def _build_datetrunc(args: t.List) -> exp.TimestampTrunc: 396 unit = seq_get(args, 0) 397 this = seq_get(args, 1) 398 399 if this and this.is_string: 400 this = exp.cast(this, exp.DataType.Type.DATETIME2) 401 402 return exp.TimestampTrunc(this=this, unit=unit) 403 404 405class TSQL(Dialect): 406 SUPPORTS_SEMI_ANTI_JOIN = False 407 LOG_BASE_FIRST = False 408 TYPED_DIVISION = True 409 CONCAT_COALESCE = True 410 NORMALIZATION_STRATEGY = NormalizationStrategy.CASE_INSENSITIVE 411 ALTER_TABLE_ADD_REQUIRED_FOR_EACH_COLUMN = False 412 413 TIME_FORMAT = "'yyyy-mm-dd hh:mm:ss'" 414 415 TIME_MAPPING = { 416 "year": "%Y", 417 "dayofyear": "%j", 418 "day": "%d", 419 "dy": "%d", 420 "y": "%Y", 421 "week": "%W", 422 "ww": "%W", 423 "wk": "%W", 424 "hour": "%h", 425 "hh": "%I", 426 "minute": "%M", 427 "mi": "%M", 428 "n": "%M", 429 "second": "%S", 430 "ss": "%S", 431 "s": "%-S", 432 "millisecond": "%f", 433 "ms": "%f", 434 "weekday": "%w", 435 "dw": "%w", 436 "month": "%m", 437 "mm": "%M", 438 "m": "%-M", 439 "Y": "%Y", 440 "YYYY": "%Y", 441 "YY": "%y", 442 "MMMM": "%B", 443 "MMM": "%b", 444 "MM": "%m", 445 "M": "%-m", 446 "dddd": "%A", 447 "dd": "%d", 448 "d": "%-d", 449 "HH": "%H", 450 "H": "%-H", 451 "h": "%-I", 452 "ffffff": "%f", 453 "yyyy": "%Y", 454 "yy": "%y", 455 } 456 457 CONVERT_FORMAT_MAPPING = { 458 "0": "%b %d %Y %-I:%M%p", 459 "1": "%m/%d/%y", 460 "2": "%y.%m.%d", 461 "3": "%d/%m/%y", 462 "4": "%d.%m.%y", 463 "5": "%d-%m-%y", 464 "6": "%d %b %y", 465 "7": "%b %d, %y", 466 "8": "%H:%M:%S", 467 "9": "%b %d %Y %-I:%M:%S:%f%p", 468 "10": "mm-dd-yy", 469 "11": "yy/mm/dd", 470 "12": "yymmdd", 471 "13": "%d %b %Y %H:%M:ss:%f", 472 "14": "%H:%M:%S:%f", 473 "20": "%Y-%m-%d %H:%M:%S", 474 "21": "%Y-%m-%d %H:%M:%S.%f", 475 "22": "%m/%d/%y %-I:%M:%S %p", 476 "23": "%Y-%m-%d", 477 "24": "%H:%M:%S", 478 "25": "%Y-%m-%d %H:%M:%S.%f", 479 "100": "%b %d %Y %-I:%M%p", 480 "101": "%m/%d/%Y", 481 "102": "%Y.%m.%d", 482 "103": "%d/%m/%Y", 483 "104": "%d.%m.%Y", 484 "105": "%d-%m-%Y", 485 "106": "%d %b %Y", 486 "107": "%b %d, %Y", 487 "108": "%H:%M:%S", 488 "109": "%b %d %Y %-I:%M:%S:%f%p", 489 "110": "%m-%d-%Y", 490 "111": "%Y/%m/%d", 491 "112": "%Y%m%d", 492 "113": "%d %b %Y %H:%M:%S:%f", 493 "114": "%H:%M:%S:%f", 494 "120": "%Y-%m-%d %H:%M:%S", 495 "121": "%Y-%m-%d %H:%M:%S.%f", 496 "126": "%Y-%m-%dT%H:%M:%S.%f", 497 } 498 499 FORMAT_TIME_MAPPING = { 500 "y": "%B %Y", 501 "d": "%m/%d/%Y", 502 "H": "%-H", 503 "h": "%-I", 504 "s": "%Y-%m-%d %H:%M:%S", 505 "D": "%A,%B,%Y", 506 "f": "%A,%B,%Y %-I:%M %p", 507 "F": "%A,%B,%Y %-I:%M:%S %p", 508 "g": "%m/%d/%Y %-I:%M %p", 509 "G": "%m/%d/%Y %-I:%M:%S %p", 510 "M": "%B %-d", 511 "m": "%B %-d", 512 "O": "%Y-%m-%dT%H:%M:%S", 513 "u": "%Y-%M-%D %H:%M:%S%z", 514 "U": "%A, %B %D, %Y %H:%M:%S%z", 515 "T": "%-I:%M:%S %p", 516 "t": "%-I:%M", 517 "Y": "%a %Y", 518 } 519 520 class Tokenizer(tokens.Tokenizer): 521 IDENTIFIERS = [("[", "]"), '"'] 522 QUOTES = ["'", '"'] 523 HEX_STRINGS = [("0x", ""), ("0X", "")] 524 VAR_SINGLE_TOKENS = {"@", "$", "#"} 525 526 KEYWORDS = { 527 **tokens.Tokenizer.KEYWORDS, 528 "CLUSTERED INDEX": TokenType.INDEX, 529 "DATETIME2": TokenType.DATETIME2, 530 "DATETIMEOFFSET": TokenType.TIMESTAMPTZ, 531 "DECLARE": TokenType.DECLARE, 532 "EXEC": TokenType.COMMAND, 533 "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT, 534 "GO": TokenType.COMMAND, 535 "IMAGE": TokenType.IMAGE, 536 "MONEY": TokenType.MONEY, 537 "NONCLUSTERED INDEX": TokenType.INDEX, 538 "NTEXT": TokenType.TEXT, 539 "OPTION": TokenType.OPTION, 540 "OUTPUT": TokenType.RETURNING, 541 "PRINT": TokenType.COMMAND, 542 "PROC": TokenType.PROCEDURE, 543 "REAL": TokenType.FLOAT, 544 "ROWVERSION": TokenType.ROWVERSION, 545 "SMALLDATETIME": TokenType.SMALLDATETIME, 546 "SMALLMONEY": TokenType.SMALLMONEY, 547 "SQL_VARIANT": TokenType.VARIANT, 548 "SYSTEM_USER": TokenType.CURRENT_USER, 549 "TOP": TokenType.TOP, 550 "TIMESTAMP": TokenType.ROWVERSION, 551 "TINYINT": TokenType.UTINYINT, 552 "UNIQUEIDENTIFIER": TokenType.UUID, 553 "UPDATE STATISTICS": TokenType.COMMAND, 554 "XML": TokenType.XML, 555 } 556 KEYWORDS.pop("/*+") 557 558 COMMANDS = {*tokens.Tokenizer.COMMANDS, TokenType.END} 559 560 class Parser(parser.Parser): 561 SET_REQUIRES_ASSIGNMENT_DELIMITER = False 562 LOG_DEFAULTS_TO_LN = True 563 STRING_ALIASES = True 564 NO_PAREN_IF_COMMANDS = False 565 566 QUERY_MODIFIER_PARSERS = { 567 **parser.Parser.QUERY_MODIFIER_PARSERS, 568 TokenType.OPTION: lambda self: ("options", self._parse_options()), 569 TokenType.FOR: lambda self: ("for", self._parse_for()), 570 } 571 572 # T-SQL does not allow BEGIN to be used as an identifier 573 ID_VAR_TOKENS = parser.Parser.ID_VAR_TOKENS - {TokenType.BEGIN} 574 ALIAS_TOKENS = parser.Parser.ALIAS_TOKENS - {TokenType.BEGIN} 575 TABLE_ALIAS_TOKENS = parser.Parser.TABLE_ALIAS_TOKENS - {TokenType.BEGIN} 576 COMMENT_TABLE_ALIAS_TOKENS = parser.Parser.COMMENT_TABLE_ALIAS_TOKENS - {TokenType.BEGIN} 577 UPDATE_ALIAS_TOKENS = parser.Parser.UPDATE_ALIAS_TOKENS - {TokenType.BEGIN} 578 579 FUNCTIONS = { 580 **parser.Parser.FUNCTIONS, 581 "CHARINDEX": lambda args: exp.StrPosition( 582 this=seq_get(args, 1), 583 substr=seq_get(args, 0), 584 position=seq_get(args, 2), 585 ), 586 "COUNT": lambda args: exp.Count( 587 this=seq_get(args, 0), expressions=args[1:], big_int=False 588 ), 589 "COUNT_BIG": lambda args: exp.Count( 590 this=seq_get(args, 0), expressions=args[1:], big_int=True 591 ), 592 "DATEADD": build_date_delta(exp.DateAdd, unit_mapping=DATE_DELTA_INTERVAL), 593 "DATEDIFF": _build_date_delta(exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL), 594 "DATENAME": _build_formatted_time(exp.TimeToStr, full_format_mapping=True), 595 "DATEPART": _build_formatted_time(exp.TimeToStr), 596 "DATETIMEFROMPARTS": _build_datetimefromparts, 597 "EOMONTH": _build_eomonth, 598 "FORMAT": _build_format, 599 "GETDATE": exp.CurrentTimestamp.from_arg_list, 600 "HASHBYTES": _build_hashbytes, 601 "ISNULL": lambda args: build_coalesce(args=args, is_null=True), 602 "JSON_QUERY": _build_json_query, 603 "JSON_VALUE": parser.build_extract_json_with_path(exp.JSONExtractScalar), 604 "LEN": _build_with_arg_as_text(exp.Length), 605 "LEFT": _build_with_arg_as_text(exp.Left), 606 "NEWID": exp.Uuid.from_arg_list, 607 "RIGHT": _build_with_arg_as_text(exp.Right), 608 "PARSENAME": _build_parsename, 609 "REPLICATE": exp.Repeat.from_arg_list, 610 "SCHEMA_NAME": exp.CurrentSchema.from_arg_list, 611 "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)), 612 "SYSDATETIME": exp.CurrentTimestamp.from_arg_list, 613 "SUSER_NAME": exp.CurrentUser.from_arg_list, 614 "SUSER_SNAME": exp.CurrentUser.from_arg_list, 615 "SYSDATETIMEOFFSET": exp.CurrentTimestampLTZ.from_arg_list, 616 "SYSTEM_USER": exp.CurrentUser.from_arg_list, 617 "TIMEFROMPARTS": _build_timefromparts, 618 "DATETRUNC": _build_datetrunc, 619 } 620 621 JOIN_HINTS = {"LOOP", "HASH", "MERGE", "REMOTE"} 622 623 PROCEDURE_OPTIONS = dict.fromkeys( 624 ("ENCRYPTION", "RECOMPILE", "SCHEMABINDING", "NATIVE_COMPILATION", "EXECUTE"), tuple() 625 ) 626 627 COLUMN_DEFINITION_MODES = {"OUT", "OUTPUT", "READ_ONLY"} 628 629 RETURNS_TABLE_TOKENS = parser.Parser.ID_VAR_TOKENS - { 630 TokenType.TABLE, 631 *parser.Parser.TYPE_TOKENS, 632 } 633 634 STATEMENT_PARSERS = { 635 **parser.Parser.STATEMENT_PARSERS, 636 TokenType.DECLARE: lambda self: self._parse_declare(), 637 } 638 639 RANGE_PARSERS = { 640 **parser.Parser.RANGE_PARSERS, 641 TokenType.DCOLON: lambda self, this: self.expression( 642 exp.ScopeResolution, 643 this=this, 644 expression=self._parse_function() or self._parse_var(any_token=True), 645 ), 646 } 647 648 NO_PAREN_FUNCTION_PARSERS = { 649 **parser.Parser.NO_PAREN_FUNCTION_PARSERS, 650 "NEXT": lambda self: self._parse_next_value_for(), 651 } 652 653 # The DCOLON (::) operator serves as a scope resolution (exp.ScopeResolution) operator in T-SQL 654 COLUMN_OPERATORS = { 655 **parser.Parser.COLUMN_OPERATORS, 656 TokenType.DCOLON: lambda self, this, to: self.expression(exp.Cast, this=this, to=to) 657 if isinstance(to, exp.DataType) and to.this != exp.DataType.Type.USERDEFINED 658 else self.expression(exp.ScopeResolution, this=this, expression=to), 659 } 660 661 def _parse_alter_table_set(self) -> exp.AlterSet: 662 return self._parse_wrapped(super()._parse_alter_table_set) 663 664 def _parse_wrapped_select(self, table: bool = False) -> t.Optional[exp.Expression]: 665 if self._match(TokenType.MERGE): 666 comments = self._prev_comments 667 merge = self._parse_merge() 668 merge.add_comments(comments, prepend=True) 669 return merge 670 671 return super()._parse_wrapped_select(table=table) 672 673 def _parse_dcolon(self) -> t.Optional[exp.Expression]: 674 # We want to use _parse_types() if the first token after :: is a known type, 675 # otherwise we could parse something like x::varchar(max) into a function 676 if self._match_set(self.TYPE_TOKENS, advance=False): 677 return self._parse_types() 678 679 return self._parse_function() or self._parse_types() 680 681 def _parse_options(self) -> t.Optional[t.List[exp.Expression]]: 682 if not self._match(TokenType.OPTION): 683 return None 684 685 def _parse_option() -> t.Optional[exp.Expression]: 686 option = self._parse_var_from_options(OPTIONS) 687 if not option: 688 return None 689 690 self._match(TokenType.EQ) 691 return self.expression( 692 exp.QueryOption, this=option, expression=self._parse_primary_or_var() 693 ) 694 695 return self._parse_wrapped_csv(_parse_option) 696 697 def _parse_xml_key_value_option(self) -> exp.XMLKeyValueOption: 698 this = self._parse_primary_or_var() 699 if self._match(TokenType.L_PAREN, advance=False): 700 expression = self._parse_wrapped(self._parse_string) 701 else: 702 expression = None 703 704 return exp.XMLKeyValueOption(this=this, expression=expression) 705 706 def _parse_for(self) -> t.Optional[t.List[exp.Expression]]: 707 if not self._match_pair(TokenType.FOR, TokenType.XML): 708 return None 709 710 def _parse_for_xml() -> t.Optional[exp.Expression]: 711 return self.expression( 712 exp.QueryOption, 713 this=self._parse_var_from_options(XML_OPTIONS, raise_unmatched=False) 714 or self._parse_xml_key_value_option(), 715 ) 716 717 return self._parse_csv(_parse_for_xml) 718 719 def _parse_projections(self) -> t.List[exp.Expression]: 720 """ 721 T-SQL supports the syntax alias = expression in the SELECT's projection list, 722 so we transform all parsed Selects to convert their EQ projections into Aliases. 723 724 See: https://learn.microsoft.com/en-us/sql/t-sql/queries/select-clause-transact-sql?view=sql-server-ver16#syntax 725 """ 726 return [ 727 ( 728 exp.alias_(projection.expression, projection.this.this, copy=False) 729 if isinstance(projection, exp.EQ) and isinstance(projection.this, exp.Column) 730 else projection 731 ) 732 for projection in super()._parse_projections() 733 ] 734 735 def _parse_commit_or_rollback(self) -> exp.Commit | exp.Rollback: 736 """Applies to SQL Server and Azure SQL Database 737 COMMIT [ { TRAN | TRANSACTION } 738 [ transaction_name | @tran_name_variable ] ] 739 [ WITH ( DELAYED_DURABILITY = { OFF | ON } ) ] 740 741 ROLLBACK { TRAN | TRANSACTION } 742 [ transaction_name | @tran_name_variable 743 | savepoint_name | @savepoint_variable ] 744 """ 745 rollback = self._prev.token_type == TokenType.ROLLBACK 746 747 self._match_texts(("TRAN", "TRANSACTION")) 748 this = self._parse_id_var() 749 750 if rollback: 751 return self.expression(exp.Rollback, this=this) 752 753 durability = None 754 if self._match_pair(TokenType.WITH, TokenType.L_PAREN): 755 self._match_text_seq("DELAYED_DURABILITY") 756 self._match(TokenType.EQ) 757 758 if self._match_text_seq("OFF"): 759 durability = False 760 else: 761 self._match(TokenType.ON) 762 durability = True 763 764 self._match_r_paren() 765 766 return self.expression(exp.Commit, this=this, durability=durability) 767 768 def _parse_transaction(self) -> exp.Transaction | exp.Command: 769 """Applies to SQL Server and Azure SQL Database 770 BEGIN { TRAN | TRANSACTION } 771 [ { transaction_name | @tran_name_variable } 772 [ WITH MARK [ 'description' ] ] 773 ] 774 """ 775 if self._match_texts(("TRAN", "TRANSACTION")): 776 transaction = self.expression(exp.Transaction, this=self._parse_id_var()) 777 if self._match_text_seq("WITH", "MARK"): 778 transaction.set("mark", self._parse_string()) 779 780 return transaction 781 782 return self._parse_as_command(self._prev) 783 784 def _parse_returns(self) -> exp.ReturnsProperty: 785 table = self._parse_id_var(any_token=False, tokens=self.RETURNS_TABLE_TOKENS) 786 returns = super()._parse_returns() 787 returns.set("table", table) 788 return returns 789 790 def _parse_convert( 791 self, strict: bool, safe: t.Optional[bool] = None 792 ) -> t.Optional[exp.Expression]: 793 this = self._parse_types() 794 self._match(TokenType.COMMA) 795 args = [this, *self._parse_csv(self._parse_assignment)] 796 convert = exp.Convert.from_arg_list(args) 797 convert.set("safe", safe) 798 convert.set("strict", strict) 799 return convert 800 801 def _parse_column_def( 802 self, this: t.Optional[exp.Expression], computed_column: bool = True 803 ) -> t.Optional[exp.Expression]: 804 this = super()._parse_column_def(this=this, computed_column=computed_column) 805 if not this: 806 return None 807 if self._match(TokenType.EQ): 808 this.set("default", self._parse_disjunction()) 809 if self._match_texts(self.COLUMN_DEFINITION_MODES): 810 this.set("output", self._prev.text) 811 return this 812 813 def _parse_user_defined_function( 814 self, kind: t.Optional[TokenType] = None 815 ) -> t.Optional[exp.Expression]: 816 this = super()._parse_user_defined_function(kind=kind) 817 818 if ( 819 kind == TokenType.FUNCTION 820 or isinstance(this, exp.UserDefinedFunction) 821 or self._match(TokenType.ALIAS, advance=False) 822 ): 823 return this 824 825 if not self._match(TokenType.WITH, advance=False): 826 expressions = self._parse_csv(self._parse_function_parameter) 827 else: 828 expressions = None 829 830 return self.expression(exp.UserDefinedFunction, this=this, expressions=expressions) 831 832 def _parse_into(self) -> t.Optional[exp.Into]: 833 into = super()._parse_into() 834 835 table = isinstance(into, exp.Into) and into.find(exp.Table) 836 if isinstance(table, exp.Table): 837 table_identifier = table.this 838 if table_identifier.args.get("temporary"): 839 # Promote the temporary property from the Identifier to the Into expression 840 t.cast(exp.Into, into).set("temporary", True) 841 842 return into 843 844 def _parse_id_var( 845 self, 846 any_token: bool = True, 847 tokens: t.Optional[t.Collection[TokenType]] = None, 848 ) -> t.Optional[exp.Expression]: 849 is_temporary = self._match(TokenType.HASH) 850 is_global = is_temporary and self._match(TokenType.HASH) 851 852 this = super()._parse_id_var(any_token=any_token, tokens=tokens) 853 if this: 854 if is_global: 855 this.set("global", True) 856 elif is_temporary: 857 this.set("temporary", True) 858 859 return this 860 861 def _parse_create(self) -> exp.Create | exp.Command: 862 create = super()._parse_create() 863 864 if isinstance(create, exp.Create): 865 table = create.this.this if isinstance(create.this, exp.Schema) else create.this 866 if isinstance(table, exp.Table) and table.this and table.this.args.get("temporary"): 867 if not create.args.get("properties"): 868 create.set("properties", exp.Properties(expressions=[])) 869 870 create.args["properties"].append("expressions", exp.TemporaryProperty()) 871 872 return create 873 874 def _parse_if(self) -> t.Optional[exp.Expression]: 875 index = self._index 876 877 if self._match_text_seq("OBJECT_ID"): 878 self._parse_wrapped_csv(self._parse_string) 879 if self._match_text_seq("IS", "NOT", "NULL") and self._match(TokenType.DROP): 880 return self._parse_drop(exists=True) 881 self._retreat(index) 882 883 return super()._parse_if() 884 885 def _parse_unique(self) -> exp.UniqueColumnConstraint: 886 if self._match_texts(("CLUSTERED", "NONCLUSTERED")): 887 this = self.CONSTRAINT_PARSERS[self._prev.text.upper()](self) 888 else: 889 this = self._parse_schema(self._parse_id_var(any_token=False)) 890 891 return self.expression(exp.UniqueColumnConstraint, this=this) 892 893 def _parse_partition(self) -> t.Optional[exp.Partition]: 894 if not self._match_text_seq("WITH", "(", "PARTITIONS"): 895 return None 896 897 def parse_range(): 898 low = self._parse_bitwise() 899 high = self._parse_bitwise() if self._match_text_seq("TO") else None 900 901 return ( 902 self.expression(exp.PartitionRange, this=low, expression=high) if high else low 903 ) 904 905 partition = self.expression( 906 exp.Partition, expressions=self._parse_wrapped_csv(parse_range) 907 ) 908 909 self._match_r_paren() 910 911 return partition 912 913 def _parse_declare(self) -> exp.Declare | exp.Command: 914 index = self._index 915 expressions = self._try_parse(partial(self._parse_csv, self._parse_declareitem)) 916 917 if not expressions or self._curr: 918 self._retreat(index) 919 return self._parse_as_command(self._prev) 920 921 return self.expression(exp.Declare, expressions=expressions) 922 923 def _parse_declareitem(self) -> t.Optional[exp.DeclareItem]: 924 var = self._parse_id_var() 925 if not var: 926 return None 927 928 value = None 929 self._match(TokenType.ALIAS) 930 if self._match(TokenType.TABLE): 931 data_type = self._parse_schema() 932 else: 933 data_type = self._parse_types() 934 if self._match(TokenType.EQ): 935 value = self._parse_bitwise() 936 937 return self.expression(exp.DeclareItem, this=var, kind=data_type, default=value) 938 939 def _parse_alter_table_alter(self) -> t.Optional[exp.Expression]: 940 expression = super()._parse_alter_table_alter() 941 942 if expression is not None: 943 collation = expression.args.get("collate") 944 if isinstance(collation, exp.Column) and isinstance(collation.this, exp.Identifier): 945 identifier = collation.this 946 collation.set("this", exp.Var(this=identifier.name)) 947 948 return expression 949 950 class Generator(generator.Generator): 951 LIMIT_IS_TOP = True 952 QUERY_HINTS = False 953 RETURNING_END = False 954 NVL2_SUPPORTED = False 955 ALTER_TABLE_INCLUDE_COLUMN_KEYWORD = False 956 LIMIT_FETCH = "FETCH" 957 COMPUTED_COLUMN_WITH_TYPE = False 958 CTE_RECURSIVE_KEYWORD_REQUIRED = False 959 ENSURE_BOOLS = True 960 NULL_ORDERING_SUPPORTED = None 961 SUPPORTS_SINGLE_ARG_CONCAT = False 962 TABLESAMPLE_SEED_KEYWORD = "REPEATABLE" 963 SUPPORTS_SELECT_INTO = True 964 JSON_PATH_BRACKETED_KEY_SUPPORTED = False 965 SUPPORTS_TO_NUMBER = False 966 SET_OP_MODIFIERS = False 967 COPY_PARAMS_EQ_REQUIRED = True 968 PARSE_JSON_NAME = None 969 EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE = False 970 ALTER_SET_WRAPPED = True 971 ALTER_SET_TYPE = "" 972 973 EXPRESSIONS_WITHOUT_NESTED_CTES = { 974 exp.Create, 975 exp.Delete, 976 exp.Insert, 977 exp.Intersect, 978 exp.Except, 979 exp.Merge, 980 exp.Select, 981 exp.Subquery, 982 exp.Union, 983 exp.Update, 984 } 985 986 SUPPORTED_JSON_PATH_PARTS = { 987 exp.JSONPathKey, 988 exp.JSONPathRoot, 989 exp.JSONPathSubscript, 990 } 991 992 TYPE_MAPPING = { 993 **generator.Generator.TYPE_MAPPING, 994 exp.DataType.Type.BOOLEAN: "BIT", 995 exp.DataType.Type.DATETIME2: "DATETIME2", 996 exp.DataType.Type.DECIMAL: "NUMERIC", 997 exp.DataType.Type.DOUBLE: "FLOAT", 998 exp.DataType.Type.INT: "INTEGER", 999 exp.DataType.Type.ROWVERSION: "ROWVERSION", 1000 exp.DataType.Type.TEXT: "VARCHAR(MAX)", 1001 exp.DataType.Type.TIMESTAMP: "DATETIME2", 1002 exp.DataType.Type.TIMESTAMPNTZ: "DATETIME2", 1003 exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET", 1004 exp.DataType.Type.SMALLDATETIME: "SMALLDATETIME", 1005 exp.DataType.Type.UTINYINT: "TINYINT", 1006 exp.DataType.Type.VARIANT: "SQL_VARIANT", 1007 exp.DataType.Type.UUID: "UNIQUEIDENTIFIER", 1008 } 1009 1010 TYPE_MAPPING.pop(exp.DataType.Type.NCHAR) 1011 TYPE_MAPPING.pop(exp.DataType.Type.NVARCHAR) 1012 1013 TRANSFORMS = { 1014 **generator.Generator.TRANSFORMS, 1015 exp.AnyValue: any_value_to_max_sql, 1016 exp.ArrayToString: rename_func("STRING_AGG"), 1017 exp.AutoIncrementColumnConstraint: lambda *_: "IDENTITY", 1018 exp.Chr: rename_func("CHAR"), 1019 exp.DateAdd: date_delta_sql("DATEADD"), 1020 exp.DateDiff: date_delta_sql("DATEDIFF"), 1021 exp.CTE: transforms.preprocess([qualify_derived_table_outputs]), 1022 exp.CurrentDate: rename_func("GETDATE"), 1023 exp.CurrentTimestamp: rename_func("GETDATE"), 1024 exp.CurrentTimestampLTZ: rename_func("SYSDATETIMEOFFSET"), 1025 exp.DateStrToDate: datestrtodate_sql, 1026 exp.Extract: rename_func("DATEPART"), 1027 exp.GeneratedAsIdentityColumnConstraint: generatedasidentitycolumnconstraint_sql, 1028 exp.GroupConcat: _string_agg_sql, 1029 exp.If: rename_func("IIF"), 1030 exp.JSONExtract: _json_extract_sql, 1031 exp.JSONExtractScalar: _json_extract_sql, 1032 exp.LastDay: lambda self, e: self.func("EOMONTH", e.this), 1033 exp.Ln: rename_func("LOG"), 1034 exp.Max: max_or_greatest, 1035 exp.MD5: lambda self, e: self.func("HASHBYTES", exp.Literal.string("MD5"), e.this), 1036 exp.Min: min_or_least, 1037 exp.NumberToStr: _format_sql, 1038 exp.Repeat: rename_func("REPLICATE"), 1039 exp.CurrentSchema: rename_func("SCHEMA_NAME"), 1040 exp.Select: transforms.preprocess( 1041 [ 1042 transforms.eliminate_distinct_on, 1043 transforms.eliminate_semi_and_anti_joins, 1044 transforms.eliminate_qualify, 1045 transforms.unnest_generate_date_array_using_recursive_cte, 1046 ] 1047 ), 1048 exp.Stddev: rename_func("STDEV"), 1049 exp.StrPosition: lambda self, e: strposition_sql( 1050 self, e, func_name="CHARINDEX", supports_position=True 1051 ), 1052 exp.Subquery: transforms.preprocess([qualify_derived_table_outputs]), 1053 exp.SHA: lambda self, e: self.func("HASHBYTES", exp.Literal.string("SHA1"), e.this), 1054 exp.SHA2: lambda self, e: self.func( 1055 "HASHBYTES", exp.Literal.string(f"SHA2_{e.args.get('length', 256)}"), e.this 1056 ), 1057 exp.TemporaryProperty: lambda self, e: "", 1058 exp.TimeStrToTime: _timestrtotime_sql, 1059 exp.TimeToStr: _format_sql, 1060 exp.Trim: trim_sql, 1061 exp.TsOrDsAdd: date_delta_sql("DATEADD", cast=True), 1062 exp.TsOrDsDiff: date_delta_sql("DATEDIFF"), 1063 exp.TimestampTrunc: lambda self, e: self.func("DATETRUNC", e.unit, e.this), 1064 exp.Uuid: lambda *_: "NEWID()", 1065 exp.DateFromParts: rename_func("DATEFROMPARTS"), 1066 } 1067 1068 TRANSFORMS.pop(exp.ReturnsProperty) 1069 1070 PROPERTIES_LOCATION = { 1071 **generator.Generator.PROPERTIES_LOCATION, 1072 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 1073 } 1074 1075 def scope_resolution(self, rhs: str, scope_name: str) -> str: 1076 return f"{scope_name}::{rhs}" 1077 1078 def select_sql(self, expression: exp.Select) -> str: 1079 limit = expression.args.get("limit") 1080 offset = expression.args.get("offset") 1081 1082 if isinstance(limit, exp.Fetch) and not offset: 1083 # Dialects like Oracle can FETCH directly from a row set but 1084 # T-SQL requires an ORDER BY + OFFSET clause in order to FETCH 1085 offset = exp.Offset(expression=exp.Literal.number(0)) 1086 expression.set("offset", offset) 1087 1088 if offset: 1089 if not expression.args.get("order"): 1090 # ORDER BY is required in order to use OFFSET in a query, so we use 1091 # a noop order by, since we don't really care about the order. 1092 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 1093 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 1094 1095 if isinstance(limit, exp.Limit): 1096 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 1097 # we replace here because otherwise TOP would be generated in select_sql 1098 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 1099 1100 return super().select_sql(expression) 1101 1102 def convert_sql(self, expression: exp.Convert) -> str: 1103 name = "TRY_CONVERT" if expression.args.get("safe") else "CONVERT" 1104 return self.func( 1105 name, expression.this, expression.expression, expression.args.get("style") 1106 ) 1107 1108 def queryoption_sql(self, expression: exp.QueryOption) -> str: 1109 option = self.sql(expression, "this") 1110 value = self.sql(expression, "expression") 1111 if value: 1112 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 1113 return f"{option} {optional_equal_sign}{value}" 1114 return option 1115 1116 def lateral_op(self, expression: exp.Lateral) -> str: 1117 cross_apply = expression.args.get("cross_apply") 1118 if cross_apply is True: 1119 return "CROSS APPLY" 1120 if cross_apply is False: 1121 return "OUTER APPLY" 1122 1123 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 1124 self.unsupported("LATERAL clause is not supported.") 1125 return "LATERAL" 1126 1127 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 1128 this = expression.this 1129 split_count = len(this.name.split(".")) 1130 delimiter = expression.args.get("delimiter") 1131 part_index = expression.args.get("part_index") 1132 1133 if ( 1134 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 1135 or (delimiter and delimiter.name != ".") 1136 or not part_index 1137 or split_count > 4 1138 ): 1139 self.unsupported( 1140 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 1141 ) 1142 return "" 1143 1144 return self.func( 1145 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 1146 ) 1147 1148 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1149 nano = expression.args.get("nano") 1150 if nano is not None: 1151 nano.pop() 1152 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1153 1154 if expression.args.get("fractions") is None: 1155 expression.set("fractions", exp.Literal.number(0)) 1156 if expression.args.get("precision") is None: 1157 expression.set("precision", exp.Literal.number(0)) 1158 1159 return rename_func("TIMEFROMPARTS")(self, expression) 1160 1161 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1162 zone = expression.args.get("zone") 1163 if zone is not None: 1164 zone.pop() 1165 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1166 1167 nano = expression.args.get("nano") 1168 if nano is not None: 1169 nano.pop() 1170 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1171 1172 if expression.args.get("milli") is None: 1173 expression.set("milli", exp.Literal.number(0)) 1174 1175 return rename_func("DATETIMEFROMPARTS")(self, expression) 1176 1177 def setitem_sql(self, expression: exp.SetItem) -> str: 1178 this = expression.this 1179 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1180 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1181 return f"{self.sql(this.left)} {self.sql(this.right)}" 1182 1183 return super().setitem_sql(expression) 1184 1185 def boolean_sql(self, expression: exp.Boolean) -> str: 1186 if type(expression.parent) in BIT_TYPES or isinstance( 1187 expression.find_ancestor(exp.Values, exp.Select), exp.Values 1188 ): 1189 return "1" if expression.this else "0" 1190 1191 return "(1 = 1)" if expression.this else "(1 = 0)" 1192 1193 def is_sql(self, expression: exp.Is) -> str: 1194 if isinstance(expression.expression, exp.Boolean): 1195 return self.binary(expression, "=") 1196 return self.binary(expression, "IS") 1197 1198 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1199 sql = self.sql(expression, "this") 1200 properties = expression.args.get("properties") 1201 1202 if sql[:1] != "#" and any( 1203 isinstance(prop, exp.TemporaryProperty) 1204 for prop in (properties.expressions if properties else []) 1205 ): 1206 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1207 1208 return sql 1209 1210 def create_sql(self, expression: exp.Create) -> str: 1211 kind = expression.kind 1212 exists = expression.args.pop("exists", None) 1213 1214 like_property = expression.find(exp.LikeProperty) 1215 if like_property: 1216 ctas_expression = like_property.this 1217 else: 1218 ctas_expression = expression.expression 1219 1220 if kind == "VIEW": 1221 expression.this.set("catalog", None) 1222 with_ = expression.args.get("with") 1223 if ctas_expression and with_: 1224 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1225 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1226 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1227 ctas_expression.set("with", with_.pop()) 1228 1229 table = expression.find(exp.Table) 1230 1231 # Convert CTAS statement to SELECT .. INTO .. 1232 if kind == "TABLE" and ctas_expression: 1233 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1234 ctas_expression = ctas_expression.subquery() 1235 1236 properties = expression.args.get("properties") or exp.Properties() 1237 is_temp = any(isinstance(p, exp.TemporaryProperty) for p in properties.expressions) 1238 1239 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1240 select_into.set("into", exp.Into(this=table, temporary=is_temp)) 1241 1242 if like_property: 1243 select_into.limit(0, copy=False) 1244 1245 sql = self.sql(select_into) 1246 else: 1247 sql = super().create_sql(expression) 1248 1249 if exists: 1250 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1251 sql_with_ctes = self.prepend_ctes(expression, sql) 1252 sql_literal = self.sql(exp.Literal.string(sql_with_ctes)) 1253 if kind == "SCHEMA": 1254 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = {identifier}) EXEC({sql_literal})""" 1255 elif kind == "TABLE": 1256 assert table 1257 where = exp.and_( 1258 exp.column("TABLE_NAME").eq(table.name), 1259 exp.column("TABLE_SCHEMA").eq(table.db) if table.db else None, 1260 exp.column("TABLE_CATALOG").eq(table.catalog) if table.catalog else None, 1261 ) 1262 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE {where}) EXEC({sql_literal})""" 1263 elif kind == "INDEX": 1264 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1265 return f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql_literal})""" 1266 elif expression.args.get("replace"): 1267 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1268 1269 return self.prepend_ctes(expression, sql) 1270 1271 @generator.unsupported_args("unlogged", "expressions") 1272 def into_sql(self, expression: exp.Into) -> str: 1273 if expression.args.get("temporary"): 1274 # If the Into expression has a temporary property, push this down to the Identifier 1275 table = expression.find(exp.Table) 1276 if table and isinstance(table.this, exp.Identifier): 1277 table.this.set("temporary", True) 1278 1279 return f"{self.seg('INTO')} {self.sql(expression, 'this')}" 1280 1281 def count_sql(self, expression: exp.Count) -> str: 1282 func_name = "COUNT_BIG" if expression.args.get("big_int") else "COUNT" 1283 return rename_func(func_name)(self, expression) 1284 1285 def offset_sql(self, expression: exp.Offset) -> str: 1286 return f"{super().offset_sql(expression)} ROWS" 1287 1288 def version_sql(self, expression: exp.Version) -> str: 1289 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1290 this = f"FOR {name}" 1291 expr = expression.expression 1292 kind = expression.text("kind") 1293 if kind in ("FROM", "BETWEEN"): 1294 args = expr.expressions 1295 sep = "TO" if kind == "FROM" else "AND" 1296 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1297 else: 1298 expr_sql = self.sql(expr) 1299 1300 expr_sql = f" {expr_sql}" if expr_sql else "" 1301 return f"{this} {kind}{expr_sql}" 1302 1303 def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str: 1304 table = expression.args.get("table") 1305 table = f"{table} " if table else "" 1306 return f"RETURNS {table}{self.sql(expression, 'this')}" 1307 1308 def returning_sql(self, expression: exp.Returning) -> str: 1309 into = self.sql(expression, "into") 1310 into = self.seg(f"INTO {into}") if into else "" 1311 return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}" 1312 1313 def transaction_sql(self, expression: exp.Transaction) -> str: 1314 this = self.sql(expression, "this") 1315 this = f" {this}" if this else "" 1316 mark = self.sql(expression, "mark") 1317 mark = f" WITH MARK {mark}" if mark else "" 1318 return f"BEGIN TRANSACTION{this}{mark}" 1319 1320 def commit_sql(self, expression: exp.Commit) -> str: 1321 this = self.sql(expression, "this") 1322 this = f" {this}" if this else "" 1323 durability = expression.args.get("durability") 1324 durability = ( 1325 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1326 if durability is not None 1327 else "" 1328 ) 1329 return f"COMMIT TRANSACTION{this}{durability}" 1330 1331 def rollback_sql(self, expression: exp.Rollback) -> str: 1332 this = self.sql(expression, "this") 1333 this = f" {this}" if this else "" 1334 return f"ROLLBACK TRANSACTION{this}" 1335 1336 def identifier_sql(self, expression: exp.Identifier) -> str: 1337 identifier = super().identifier_sql(expression) 1338 1339 if expression.args.get("global"): 1340 identifier = f"##{identifier}" 1341 elif expression.args.get("temporary"): 1342 identifier = f"#{identifier}" 1343 1344 return identifier 1345 1346 def constraint_sql(self, expression: exp.Constraint) -> str: 1347 this = self.sql(expression, "this") 1348 expressions = self.expressions(expression, flat=True, sep=" ") 1349 return f"CONSTRAINT {this} {expressions}" 1350 1351 def length_sql(self, expression: exp.Length) -> str: 1352 return self._uncast_text(expression, "LEN") 1353 1354 def right_sql(self, expression: exp.Right) -> str: 1355 return self._uncast_text(expression, "RIGHT") 1356 1357 def left_sql(self, expression: exp.Left) -> str: 1358 return self._uncast_text(expression, "LEFT") 1359 1360 def _uncast_text(self, expression: exp.Expression, name: str) -> str: 1361 this = expression.this 1362 if isinstance(this, exp.Cast) and this.is_type(exp.DataType.Type.TEXT): 1363 this_sql = self.sql(this, "this") 1364 else: 1365 this_sql = self.sql(this) 1366 expression_sql = self.sql(expression, "expression") 1367 return self.func(name, this_sql, expression_sql if expression_sql else None) 1368 1369 def partition_sql(self, expression: exp.Partition) -> str: 1370 return f"WITH (PARTITIONS({self.expressions(expression, flat=True)}))" 1371 1372 def alter_sql(self, expression: exp.Alter) -> str: 1373 action = seq_get(expression.args.get("actions") or [], 0) 1374 if isinstance(action, exp.AlterRename): 1375 return f"EXEC sp_rename '{self.sql(expression.this)}', '{action.this.name}'" 1376 return super().alter_sql(expression) 1377 1378 def drop_sql(self, expression: exp.Drop) -> str: 1379 if expression.args["kind"] == "VIEW": 1380 expression.this.set("catalog", None) 1381 return super().drop_sql(expression) 1382 1383 def options_modifier(self, expression: exp.Expression) -> str: 1384 options = self.expressions(expression, key="options") 1385 return f" OPTION{self.wrap(options)}" if options else "" 1386 1387 def dpipe_sql(self, expression: exp.DPipe) -> str: 1388 return self.sql( 1389 reduce(lambda x, y: exp.Add(this=x, expression=y), expression.flatten()) 1390 ) 1391 1392 def isascii_sql(self, expression: exp.IsAscii) -> str: 1393 return f"(PATINDEX(CONVERT(VARCHAR(MAX), 0x255b5e002d7f5d25) COLLATE Latin1_General_BIN, {self.sql(expression.this)}) = 0)" 1394 1395 def columndef_sql(self, expression: exp.ColumnDef, sep: str = " ") -> str: 1396 this = super().columndef_sql(expression, sep) 1397 default = self.sql(expression, "default") 1398 default = f" = {default}" if default else "" 1399 output = self.sql(expression, "output") 1400 output = f" {output}" if output else "" 1401 return f"{this}{default}{output}" 1402 1403 def coalesce_sql(self, expression: exp.Coalesce) -> str: 1404 func_name = "ISNULL" if expression.args.get("is_null") else "COALESCE" 1405 return rename_func(func_name)(self, expression)
274def qualify_derived_table_outputs(expression: exp.Expression) -> exp.Expression: 275 """Ensures all (unnamed) output columns are aliased for CTEs and Subqueries.""" 276 alias = expression.args.get("alias") 277 278 if ( 279 isinstance(expression, (exp.CTE, exp.Subquery)) 280 and isinstance(alias, exp.TableAlias) 281 and not alias.columns 282 ): 283 from sqlglot.optimizer.qualify_columns import qualify_outputs 284 285 # We keep track of the unaliased column projection indexes instead of the expressions 286 # themselves, because the latter are going to be replaced by new nodes when the aliases 287 # are added and hence we won't be able to reach these newly added Alias parents 288 query = expression.this 289 unaliased_column_indexes = ( 290 i for i, c in enumerate(query.selects) if isinstance(c, exp.Column) and not c.alias 291 ) 292 293 qualify_outputs(query) 294 295 # Preserve the quoting information of columns for newly added Alias nodes 296 query_selects = query.selects 297 for select_index in unaliased_column_indexes: 298 alias = query_selects[select_index] 299 column = alias.this 300 if isinstance(column.this, exp.Identifier): 301 alias.args["alias"].set("quoted", column.this.quoted) 302 303 return expression
Ensures all (unnamed) output columns are aliased for CTEs and Subqueries.
406class TSQL(Dialect): 407 SUPPORTS_SEMI_ANTI_JOIN = False 408 LOG_BASE_FIRST = False 409 TYPED_DIVISION = True 410 CONCAT_COALESCE = True 411 NORMALIZATION_STRATEGY = NormalizationStrategy.CASE_INSENSITIVE 412 ALTER_TABLE_ADD_REQUIRED_FOR_EACH_COLUMN = False 413 414 TIME_FORMAT = "'yyyy-mm-dd hh:mm:ss'" 415 416 TIME_MAPPING = { 417 "year": "%Y", 418 "dayofyear": "%j", 419 "day": "%d", 420 "dy": "%d", 421 "y": "%Y", 422 "week": "%W", 423 "ww": "%W", 424 "wk": "%W", 425 "hour": "%h", 426 "hh": "%I", 427 "minute": "%M", 428 "mi": "%M", 429 "n": "%M", 430 "second": "%S", 431 "ss": "%S", 432 "s": "%-S", 433 "millisecond": "%f", 434 "ms": "%f", 435 "weekday": "%w", 436 "dw": "%w", 437 "month": "%m", 438 "mm": "%M", 439 "m": "%-M", 440 "Y": "%Y", 441 "YYYY": "%Y", 442 "YY": "%y", 443 "MMMM": "%B", 444 "MMM": "%b", 445 "MM": "%m", 446 "M": "%-m", 447 "dddd": "%A", 448 "dd": "%d", 449 "d": "%-d", 450 "HH": "%H", 451 "H": "%-H", 452 "h": "%-I", 453 "ffffff": "%f", 454 "yyyy": "%Y", 455 "yy": "%y", 456 } 457 458 CONVERT_FORMAT_MAPPING = { 459 "0": "%b %d %Y %-I:%M%p", 460 "1": "%m/%d/%y", 461 "2": "%y.%m.%d", 462 "3": "%d/%m/%y", 463 "4": "%d.%m.%y", 464 "5": "%d-%m-%y", 465 "6": "%d %b %y", 466 "7": "%b %d, %y", 467 "8": "%H:%M:%S", 468 "9": "%b %d %Y %-I:%M:%S:%f%p", 469 "10": "mm-dd-yy", 470 "11": "yy/mm/dd", 471 "12": "yymmdd", 472 "13": "%d %b %Y %H:%M:ss:%f", 473 "14": "%H:%M:%S:%f", 474 "20": "%Y-%m-%d %H:%M:%S", 475 "21": "%Y-%m-%d %H:%M:%S.%f", 476 "22": "%m/%d/%y %-I:%M:%S %p", 477 "23": "%Y-%m-%d", 478 "24": "%H:%M:%S", 479 "25": "%Y-%m-%d %H:%M:%S.%f", 480 "100": "%b %d %Y %-I:%M%p", 481 "101": "%m/%d/%Y", 482 "102": "%Y.%m.%d", 483 "103": "%d/%m/%Y", 484 "104": "%d.%m.%Y", 485 "105": "%d-%m-%Y", 486 "106": "%d %b %Y", 487 "107": "%b %d, %Y", 488 "108": "%H:%M:%S", 489 "109": "%b %d %Y %-I:%M:%S:%f%p", 490 "110": "%m-%d-%Y", 491 "111": "%Y/%m/%d", 492 "112": "%Y%m%d", 493 "113": "%d %b %Y %H:%M:%S:%f", 494 "114": "%H:%M:%S:%f", 495 "120": "%Y-%m-%d %H:%M:%S", 496 "121": "%Y-%m-%d %H:%M:%S.%f", 497 "126": "%Y-%m-%dT%H:%M:%S.%f", 498 } 499 500 FORMAT_TIME_MAPPING = { 501 "y": "%B %Y", 502 "d": "%m/%d/%Y", 503 "H": "%-H", 504 "h": "%-I", 505 "s": "%Y-%m-%d %H:%M:%S", 506 "D": "%A,%B,%Y", 507 "f": "%A,%B,%Y %-I:%M %p", 508 "F": "%A,%B,%Y %-I:%M:%S %p", 509 "g": "%m/%d/%Y %-I:%M %p", 510 "G": "%m/%d/%Y %-I:%M:%S %p", 511 "M": "%B %-d", 512 "m": "%B %-d", 513 "O": "%Y-%m-%dT%H:%M:%S", 514 "u": "%Y-%M-%D %H:%M:%S%z", 515 "U": "%A, %B %D, %Y %H:%M:%S%z", 516 "T": "%-I:%M:%S %p", 517 "t": "%-I:%M", 518 "Y": "%a %Y", 519 } 520 521 class Tokenizer(tokens.Tokenizer): 522 IDENTIFIERS = [("[", "]"), '"'] 523 QUOTES = ["'", '"'] 524 HEX_STRINGS = [("0x", ""), ("0X", "")] 525 VAR_SINGLE_TOKENS = {"@", "$", "#"} 526 527 KEYWORDS = { 528 **tokens.Tokenizer.KEYWORDS, 529 "CLUSTERED INDEX": TokenType.INDEX, 530 "DATETIME2": TokenType.DATETIME2, 531 "DATETIMEOFFSET": TokenType.TIMESTAMPTZ, 532 "DECLARE": TokenType.DECLARE, 533 "EXEC": TokenType.COMMAND, 534 "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT, 535 "GO": TokenType.COMMAND, 536 "IMAGE": TokenType.IMAGE, 537 "MONEY": TokenType.MONEY, 538 "NONCLUSTERED INDEX": TokenType.INDEX, 539 "NTEXT": TokenType.TEXT, 540 "OPTION": TokenType.OPTION, 541 "OUTPUT": TokenType.RETURNING, 542 "PRINT": TokenType.COMMAND, 543 "PROC": TokenType.PROCEDURE, 544 "REAL": TokenType.FLOAT, 545 "ROWVERSION": TokenType.ROWVERSION, 546 "SMALLDATETIME": TokenType.SMALLDATETIME, 547 "SMALLMONEY": TokenType.SMALLMONEY, 548 "SQL_VARIANT": TokenType.VARIANT, 549 "SYSTEM_USER": TokenType.CURRENT_USER, 550 "TOP": TokenType.TOP, 551 "TIMESTAMP": TokenType.ROWVERSION, 552 "TINYINT": TokenType.UTINYINT, 553 "UNIQUEIDENTIFIER": TokenType.UUID, 554 "UPDATE STATISTICS": TokenType.COMMAND, 555 "XML": TokenType.XML, 556 } 557 KEYWORDS.pop("/*+") 558 559 COMMANDS = {*tokens.Tokenizer.COMMANDS, TokenType.END} 560 561 class Parser(parser.Parser): 562 SET_REQUIRES_ASSIGNMENT_DELIMITER = False 563 LOG_DEFAULTS_TO_LN = True 564 STRING_ALIASES = True 565 NO_PAREN_IF_COMMANDS = False 566 567 QUERY_MODIFIER_PARSERS = { 568 **parser.Parser.QUERY_MODIFIER_PARSERS, 569 TokenType.OPTION: lambda self: ("options", self._parse_options()), 570 TokenType.FOR: lambda self: ("for", self._parse_for()), 571 } 572 573 # T-SQL does not allow BEGIN to be used as an identifier 574 ID_VAR_TOKENS = parser.Parser.ID_VAR_TOKENS - {TokenType.BEGIN} 575 ALIAS_TOKENS = parser.Parser.ALIAS_TOKENS - {TokenType.BEGIN} 576 TABLE_ALIAS_TOKENS = parser.Parser.TABLE_ALIAS_TOKENS - {TokenType.BEGIN} 577 COMMENT_TABLE_ALIAS_TOKENS = parser.Parser.COMMENT_TABLE_ALIAS_TOKENS - {TokenType.BEGIN} 578 UPDATE_ALIAS_TOKENS = parser.Parser.UPDATE_ALIAS_TOKENS - {TokenType.BEGIN} 579 580 FUNCTIONS = { 581 **parser.Parser.FUNCTIONS, 582 "CHARINDEX": lambda args: exp.StrPosition( 583 this=seq_get(args, 1), 584 substr=seq_get(args, 0), 585 position=seq_get(args, 2), 586 ), 587 "COUNT": lambda args: exp.Count( 588 this=seq_get(args, 0), expressions=args[1:], big_int=False 589 ), 590 "COUNT_BIG": lambda args: exp.Count( 591 this=seq_get(args, 0), expressions=args[1:], big_int=True 592 ), 593 "DATEADD": build_date_delta(exp.DateAdd, unit_mapping=DATE_DELTA_INTERVAL), 594 "DATEDIFF": _build_date_delta(exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL), 595 "DATENAME": _build_formatted_time(exp.TimeToStr, full_format_mapping=True), 596 "DATEPART": _build_formatted_time(exp.TimeToStr), 597 "DATETIMEFROMPARTS": _build_datetimefromparts, 598 "EOMONTH": _build_eomonth, 599 "FORMAT": _build_format, 600 "GETDATE": exp.CurrentTimestamp.from_arg_list, 601 "HASHBYTES": _build_hashbytes, 602 "ISNULL": lambda args: build_coalesce(args=args, is_null=True), 603 "JSON_QUERY": _build_json_query, 604 "JSON_VALUE": parser.build_extract_json_with_path(exp.JSONExtractScalar), 605 "LEN": _build_with_arg_as_text(exp.Length), 606 "LEFT": _build_with_arg_as_text(exp.Left), 607 "NEWID": exp.Uuid.from_arg_list, 608 "RIGHT": _build_with_arg_as_text(exp.Right), 609 "PARSENAME": _build_parsename, 610 "REPLICATE": exp.Repeat.from_arg_list, 611 "SCHEMA_NAME": exp.CurrentSchema.from_arg_list, 612 "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)), 613 "SYSDATETIME": exp.CurrentTimestamp.from_arg_list, 614 "SUSER_NAME": exp.CurrentUser.from_arg_list, 615 "SUSER_SNAME": exp.CurrentUser.from_arg_list, 616 "SYSDATETIMEOFFSET": exp.CurrentTimestampLTZ.from_arg_list, 617 "SYSTEM_USER": exp.CurrentUser.from_arg_list, 618 "TIMEFROMPARTS": _build_timefromparts, 619 "DATETRUNC": _build_datetrunc, 620 } 621 622 JOIN_HINTS = {"LOOP", "HASH", "MERGE", "REMOTE"} 623 624 PROCEDURE_OPTIONS = dict.fromkeys( 625 ("ENCRYPTION", "RECOMPILE", "SCHEMABINDING", "NATIVE_COMPILATION", "EXECUTE"), tuple() 626 ) 627 628 COLUMN_DEFINITION_MODES = {"OUT", "OUTPUT", "READ_ONLY"} 629 630 RETURNS_TABLE_TOKENS = parser.Parser.ID_VAR_TOKENS - { 631 TokenType.TABLE, 632 *parser.Parser.TYPE_TOKENS, 633 } 634 635 STATEMENT_PARSERS = { 636 **parser.Parser.STATEMENT_PARSERS, 637 TokenType.DECLARE: lambda self: self._parse_declare(), 638 } 639 640 RANGE_PARSERS = { 641 **parser.Parser.RANGE_PARSERS, 642 TokenType.DCOLON: lambda self, this: self.expression( 643 exp.ScopeResolution, 644 this=this, 645 expression=self._parse_function() or self._parse_var(any_token=True), 646 ), 647 } 648 649 NO_PAREN_FUNCTION_PARSERS = { 650 **parser.Parser.NO_PAREN_FUNCTION_PARSERS, 651 "NEXT": lambda self: self._parse_next_value_for(), 652 } 653 654 # The DCOLON (::) operator serves as a scope resolution (exp.ScopeResolution) operator in T-SQL 655 COLUMN_OPERATORS = { 656 **parser.Parser.COLUMN_OPERATORS, 657 TokenType.DCOLON: lambda self, this, to: self.expression(exp.Cast, this=this, to=to) 658 if isinstance(to, exp.DataType) and to.this != exp.DataType.Type.USERDEFINED 659 else self.expression(exp.ScopeResolution, this=this, expression=to), 660 } 661 662 def _parse_alter_table_set(self) -> exp.AlterSet: 663 return self._parse_wrapped(super()._parse_alter_table_set) 664 665 def _parse_wrapped_select(self, table: bool = False) -> t.Optional[exp.Expression]: 666 if self._match(TokenType.MERGE): 667 comments = self._prev_comments 668 merge = self._parse_merge() 669 merge.add_comments(comments, prepend=True) 670 return merge 671 672 return super()._parse_wrapped_select(table=table) 673 674 def _parse_dcolon(self) -> t.Optional[exp.Expression]: 675 # We want to use _parse_types() if the first token after :: is a known type, 676 # otherwise we could parse something like x::varchar(max) into a function 677 if self._match_set(self.TYPE_TOKENS, advance=False): 678 return self._parse_types() 679 680 return self._parse_function() or self._parse_types() 681 682 def _parse_options(self) -> t.Optional[t.List[exp.Expression]]: 683 if not self._match(TokenType.OPTION): 684 return None 685 686 def _parse_option() -> t.Optional[exp.Expression]: 687 option = self._parse_var_from_options(OPTIONS) 688 if not option: 689 return None 690 691 self._match(TokenType.EQ) 692 return self.expression( 693 exp.QueryOption, this=option, expression=self._parse_primary_or_var() 694 ) 695 696 return self._parse_wrapped_csv(_parse_option) 697 698 def _parse_xml_key_value_option(self) -> exp.XMLKeyValueOption: 699 this = self._parse_primary_or_var() 700 if self._match(TokenType.L_PAREN, advance=False): 701 expression = self._parse_wrapped(self._parse_string) 702 else: 703 expression = None 704 705 return exp.XMLKeyValueOption(this=this, expression=expression) 706 707 def _parse_for(self) -> t.Optional[t.List[exp.Expression]]: 708 if not self._match_pair(TokenType.FOR, TokenType.XML): 709 return None 710 711 def _parse_for_xml() -> t.Optional[exp.Expression]: 712 return self.expression( 713 exp.QueryOption, 714 this=self._parse_var_from_options(XML_OPTIONS, raise_unmatched=False) 715 or self._parse_xml_key_value_option(), 716 ) 717 718 return self._parse_csv(_parse_for_xml) 719 720 def _parse_projections(self) -> t.List[exp.Expression]: 721 """ 722 T-SQL supports the syntax alias = expression in the SELECT's projection list, 723 so we transform all parsed Selects to convert their EQ projections into Aliases. 724 725 See: https://learn.microsoft.com/en-us/sql/t-sql/queries/select-clause-transact-sql?view=sql-server-ver16#syntax 726 """ 727 return [ 728 ( 729 exp.alias_(projection.expression, projection.this.this, copy=False) 730 if isinstance(projection, exp.EQ) and isinstance(projection.this, exp.Column) 731 else projection 732 ) 733 for projection in super()._parse_projections() 734 ] 735 736 def _parse_commit_or_rollback(self) -> exp.Commit | exp.Rollback: 737 """Applies to SQL Server and Azure SQL Database 738 COMMIT [ { TRAN | TRANSACTION } 739 [ transaction_name | @tran_name_variable ] ] 740 [ WITH ( DELAYED_DURABILITY = { OFF | ON } ) ] 741 742 ROLLBACK { TRAN | TRANSACTION } 743 [ transaction_name | @tran_name_variable 744 | savepoint_name | @savepoint_variable ] 745 """ 746 rollback = self._prev.token_type == TokenType.ROLLBACK 747 748 self._match_texts(("TRAN", "TRANSACTION")) 749 this = self._parse_id_var() 750 751 if rollback: 752 return self.expression(exp.Rollback, this=this) 753 754 durability = None 755 if self._match_pair(TokenType.WITH, TokenType.L_PAREN): 756 self._match_text_seq("DELAYED_DURABILITY") 757 self._match(TokenType.EQ) 758 759 if self._match_text_seq("OFF"): 760 durability = False 761 else: 762 self._match(TokenType.ON) 763 durability = True 764 765 self._match_r_paren() 766 767 return self.expression(exp.Commit, this=this, durability=durability) 768 769 def _parse_transaction(self) -> exp.Transaction | exp.Command: 770 """Applies to SQL Server and Azure SQL Database 771 BEGIN { TRAN | TRANSACTION } 772 [ { transaction_name | @tran_name_variable } 773 [ WITH MARK [ 'description' ] ] 774 ] 775 """ 776 if self._match_texts(("TRAN", "TRANSACTION")): 777 transaction = self.expression(exp.Transaction, this=self._parse_id_var()) 778 if self._match_text_seq("WITH", "MARK"): 779 transaction.set("mark", self._parse_string()) 780 781 return transaction 782 783 return self._parse_as_command(self._prev) 784 785 def _parse_returns(self) -> exp.ReturnsProperty: 786 table = self._parse_id_var(any_token=False, tokens=self.RETURNS_TABLE_TOKENS) 787 returns = super()._parse_returns() 788 returns.set("table", table) 789 return returns 790 791 def _parse_convert( 792 self, strict: bool, safe: t.Optional[bool] = None 793 ) -> t.Optional[exp.Expression]: 794 this = self._parse_types() 795 self._match(TokenType.COMMA) 796 args = [this, *self._parse_csv(self._parse_assignment)] 797 convert = exp.Convert.from_arg_list(args) 798 convert.set("safe", safe) 799 convert.set("strict", strict) 800 return convert 801 802 def _parse_column_def( 803 self, this: t.Optional[exp.Expression], computed_column: bool = True 804 ) -> t.Optional[exp.Expression]: 805 this = super()._parse_column_def(this=this, computed_column=computed_column) 806 if not this: 807 return None 808 if self._match(TokenType.EQ): 809 this.set("default", self._parse_disjunction()) 810 if self._match_texts(self.COLUMN_DEFINITION_MODES): 811 this.set("output", self._prev.text) 812 return this 813 814 def _parse_user_defined_function( 815 self, kind: t.Optional[TokenType] = None 816 ) -> t.Optional[exp.Expression]: 817 this = super()._parse_user_defined_function(kind=kind) 818 819 if ( 820 kind == TokenType.FUNCTION 821 or isinstance(this, exp.UserDefinedFunction) 822 or self._match(TokenType.ALIAS, advance=False) 823 ): 824 return this 825 826 if not self._match(TokenType.WITH, advance=False): 827 expressions = self._parse_csv(self._parse_function_parameter) 828 else: 829 expressions = None 830 831 return self.expression(exp.UserDefinedFunction, this=this, expressions=expressions) 832 833 def _parse_into(self) -> t.Optional[exp.Into]: 834 into = super()._parse_into() 835 836 table = isinstance(into, exp.Into) and into.find(exp.Table) 837 if isinstance(table, exp.Table): 838 table_identifier = table.this 839 if table_identifier.args.get("temporary"): 840 # Promote the temporary property from the Identifier to the Into expression 841 t.cast(exp.Into, into).set("temporary", True) 842 843 return into 844 845 def _parse_id_var( 846 self, 847 any_token: bool = True, 848 tokens: t.Optional[t.Collection[TokenType]] = None, 849 ) -> t.Optional[exp.Expression]: 850 is_temporary = self._match(TokenType.HASH) 851 is_global = is_temporary and self._match(TokenType.HASH) 852 853 this = super()._parse_id_var(any_token=any_token, tokens=tokens) 854 if this: 855 if is_global: 856 this.set("global", True) 857 elif is_temporary: 858 this.set("temporary", True) 859 860 return this 861 862 def _parse_create(self) -> exp.Create | exp.Command: 863 create = super()._parse_create() 864 865 if isinstance(create, exp.Create): 866 table = create.this.this if isinstance(create.this, exp.Schema) else create.this 867 if isinstance(table, exp.Table) and table.this and table.this.args.get("temporary"): 868 if not create.args.get("properties"): 869 create.set("properties", exp.Properties(expressions=[])) 870 871 create.args["properties"].append("expressions", exp.TemporaryProperty()) 872 873 return create 874 875 def _parse_if(self) -> t.Optional[exp.Expression]: 876 index = self._index 877 878 if self._match_text_seq("OBJECT_ID"): 879 self._parse_wrapped_csv(self._parse_string) 880 if self._match_text_seq("IS", "NOT", "NULL") and self._match(TokenType.DROP): 881 return self._parse_drop(exists=True) 882 self._retreat(index) 883 884 return super()._parse_if() 885 886 def _parse_unique(self) -> exp.UniqueColumnConstraint: 887 if self._match_texts(("CLUSTERED", "NONCLUSTERED")): 888 this = self.CONSTRAINT_PARSERS[self._prev.text.upper()](self) 889 else: 890 this = self._parse_schema(self._parse_id_var(any_token=False)) 891 892 return self.expression(exp.UniqueColumnConstraint, this=this) 893 894 def _parse_partition(self) -> t.Optional[exp.Partition]: 895 if not self._match_text_seq("WITH", "(", "PARTITIONS"): 896 return None 897 898 def parse_range(): 899 low = self._parse_bitwise() 900 high = self._parse_bitwise() if self._match_text_seq("TO") else None 901 902 return ( 903 self.expression(exp.PartitionRange, this=low, expression=high) if high else low 904 ) 905 906 partition = self.expression( 907 exp.Partition, expressions=self._parse_wrapped_csv(parse_range) 908 ) 909 910 self._match_r_paren() 911 912 return partition 913 914 def _parse_declare(self) -> exp.Declare | exp.Command: 915 index = self._index 916 expressions = self._try_parse(partial(self._parse_csv, self._parse_declareitem)) 917 918 if not expressions or self._curr: 919 self._retreat(index) 920 return self._parse_as_command(self._prev) 921 922 return self.expression(exp.Declare, expressions=expressions) 923 924 def _parse_declareitem(self) -> t.Optional[exp.DeclareItem]: 925 var = self._parse_id_var() 926 if not var: 927 return None 928 929 value = None 930 self._match(TokenType.ALIAS) 931 if self._match(TokenType.TABLE): 932 data_type = self._parse_schema() 933 else: 934 data_type = self._parse_types() 935 if self._match(TokenType.EQ): 936 value = self._parse_bitwise() 937 938 return self.expression(exp.DeclareItem, this=var, kind=data_type, default=value) 939 940 def _parse_alter_table_alter(self) -> t.Optional[exp.Expression]: 941 expression = super()._parse_alter_table_alter() 942 943 if expression is not None: 944 collation = expression.args.get("collate") 945 if isinstance(collation, exp.Column) and isinstance(collation.this, exp.Identifier): 946 identifier = collation.this 947 collation.set("this", exp.Var(this=identifier.name)) 948 949 return expression 950 951 class Generator(generator.Generator): 952 LIMIT_IS_TOP = True 953 QUERY_HINTS = False 954 RETURNING_END = False 955 NVL2_SUPPORTED = False 956 ALTER_TABLE_INCLUDE_COLUMN_KEYWORD = False 957 LIMIT_FETCH = "FETCH" 958 COMPUTED_COLUMN_WITH_TYPE = False 959 CTE_RECURSIVE_KEYWORD_REQUIRED = False 960 ENSURE_BOOLS = True 961 NULL_ORDERING_SUPPORTED = None 962 SUPPORTS_SINGLE_ARG_CONCAT = False 963 TABLESAMPLE_SEED_KEYWORD = "REPEATABLE" 964 SUPPORTS_SELECT_INTO = True 965 JSON_PATH_BRACKETED_KEY_SUPPORTED = False 966 SUPPORTS_TO_NUMBER = False 967 SET_OP_MODIFIERS = False 968 COPY_PARAMS_EQ_REQUIRED = True 969 PARSE_JSON_NAME = None 970 EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE = False 971 ALTER_SET_WRAPPED = True 972 ALTER_SET_TYPE = "" 973 974 EXPRESSIONS_WITHOUT_NESTED_CTES = { 975 exp.Create, 976 exp.Delete, 977 exp.Insert, 978 exp.Intersect, 979 exp.Except, 980 exp.Merge, 981 exp.Select, 982 exp.Subquery, 983 exp.Union, 984 exp.Update, 985 } 986 987 SUPPORTED_JSON_PATH_PARTS = { 988 exp.JSONPathKey, 989 exp.JSONPathRoot, 990 exp.JSONPathSubscript, 991 } 992 993 TYPE_MAPPING = { 994 **generator.Generator.TYPE_MAPPING, 995 exp.DataType.Type.BOOLEAN: "BIT", 996 exp.DataType.Type.DATETIME2: "DATETIME2", 997 exp.DataType.Type.DECIMAL: "NUMERIC", 998 exp.DataType.Type.DOUBLE: "FLOAT", 999 exp.DataType.Type.INT: "INTEGER", 1000 exp.DataType.Type.ROWVERSION: "ROWVERSION", 1001 exp.DataType.Type.TEXT: "VARCHAR(MAX)", 1002 exp.DataType.Type.TIMESTAMP: "DATETIME2", 1003 exp.DataType.Type.TIMESTAMPNTZ: "DATETIME2", 1004 exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET", 1005 exp.DataType.Type.SMALLDATETIME: "SMALLDATETIME", 1006 exp.DataType.Type.UTINYINT: "TINYINT", 1007 exp.DataType.Type.VARIANT: "SQL_VARIANT", 1008 exp.DataType.Type.UUID: "UNIQUEIDENTIFIER", 1009 } 1010 1011 TYPE_MAPPING.pop(exp.DataType.Type.NCHAR) 1012 TYPE_MAPPING.pop(exp.DataType.Type.NVARCHAR) 1013 1014 TRANSFORMS = { 1015 **generator.Generator.TRANSFORMS, 1016 exp.AnyValue: any_value_to_max_sql, 1017 exp.ArrayToString: rename_func("STRING_AGG"), 1018 exp.AutoIncrementColumnConstraint: lambda *_: "IDENTITY", 1019 exp.Chr: rename_func("CHAR"), 1020 exp.DateAdd: date_delta_sql("DATEADD"), 1021 exp.DateDiff: date_delta_sql("DATEDIFF"), 1022 exp.CTE: transforms.preprocess([qualify_derived_table_outputs]), 1023 exp.CurrentDate: rename_func("GETDATE"), 1024 exp.CurrentTimestamp: rename_func("GETDATE"), 1025 exp.CurrentTimestampLTZ: rename_func("SYSDATETIMEOFFSET"), 1026 exp.DateStrToDate: datestrtodate_sql, 1027 exp.Extract: rename_func("DATEPART"), 1028 exp.GeneratedAsIdentityColumnConstraint: generatedasidentitycolumnconstraint_sql, 1029 exp.GroupConcat: _string_agg_sql, 1030 exp.If: rename_func("IIF"), 1031 exp.JSONExtract: _json_extract_sql, 1032 exp.JSONExtractScalar: _json_extract_sql, 1033 exp.LastDay: lambda self, e: self.func("EOMONTH", e.this), 1034 exp.Ln: rename_func("LOG"), 1035 exp.Max: max_or_greatest, 1036 exp.MD5: lambda self, e: self.func("HASHBYTES", exp.Literal.string("MD5"), e.this), 1037 exp.Min: min_or_least, 1038 exp.NumberToStr: _format_sql, 1039 exp.Repeat: rename_func("REPLICATE"), 1040 exp.CurrentSchema: rename_func("SCHEMA_NAME"), 1041 exp.Select: transforms.preprocess( 1042 [ 1043 transforms.eliminate_distinct_on, 1044 transforms.eliminate_semi_and_anti_joins, 1045 transforms.eliminate_qualify, 1046 transforms.unnest_generate_date_array_using_recursive_cte, 1047 ] 1048 ), 1049 exp.Stddev: rename_func("STDEV"), 1050 exp.StrPosition: lambda self, e: strposition_sql( 1051 self, e, func_name="CHARINDEX", supports_position=True 1052 ), 1053 exp.Subquery: transforms.preprocess([qualify_derived_table_outputs]), 1054 exp.SHA: lambda self, e: self.func("HASHBYTES", exp.Literal.string("SHA1"), e.this), 1055 exp.SHA2: lambda self, e: self.func( 1056 "HASHBYTES", exp.Literal.string(f"SHA2_{e.args.get('length', 256)}"), e.this 1057 ), 1058 exp.TemporaryProperty: lambda self, e: "", 1059 exp.TimeStrToTime: _timestrtotime_sql, 1060 exp.TimeToStr: _format_sql, 1061 exp.Trim: trim_sql, 1062 exp.TsOrDsAdd: date_delta_sql("DATEADD", cast=True), 1063 exp.TsOrDsDiff: date_delta_sql("DATEDIFF"), 1064 exp.TimestampTrunc: lambda self, e: self.func("DATETRUNC", e.unit, e.this), 1065 exp.Uuid: lambda *_: "NEWID()", 1066 exp.DateFromParts: rename_func("DATEFROMPARTS"), 1067 } 1068 1069 TRANSFORMS.pop(exp.ReturnsProperty) 1070 1071 PROPERTIES_LOCATION = { 1072 **generator.Generator.PROPERTIES_LOCATION, 1073 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 1074 } 1075 1076 def scope_resolution(self, rhs: str, scope_name: str) -> str: 1077 return f"{scope_name}::{rhs}" 1078 1079 def select_sql(self, expression: exp.Select) -> str: 1080 limit = expression.args.get("limit") 1081 offset = expression.args.get("offset") 1082 1083 if isinstance(limit, exp.Fetch) and not offset: 1084 # Dialects like Oracle can FETCH directly from a row set but 1085 # T-SQL requires an ORDER BY + OFFSET clause in order to FETCH 1086 offset = exp.Offset(expression=exp.Literal.number(0)) 1087 expression.set("offset", offset) 1088 1089 if offset: 1090 if not expression.args.get("order"): 1091 # ORDER BY is required in order to use OFFSET in a query, so we use 1092 # a noop order by, since we don't really care about the order. 1093 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 1094 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 1095 1096 if isinstance(limit, exp.Limit): 1097 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 1098 # we replace here because otherwise TOP would be generated in select_sql 1099 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 1100 1101 return super().select_sql(expression) 1102 1103 def convert_sql(self, expression: exp.Convert) -> str: 1104 name = "TRY_CONVERT" if expression.args.get("safe") else "CONVERT" 1105 return self.func( 1106 name, expression.this, expression.expression, expression.args.get("style") 1107 ) 1108 1109 def queryoption_sql(self, expression: exp.QueryOption) -> str: 1110 option = self.sql(expression, "this") 1111 value = self.sql(expression, "expression") 1112 if value: 1113 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 1114 return f"{option} {optional_equal_sign}{value}" 1115 return option 1116 1117 def lateral_op(self, expression: exp.Lateral) -> str: 1118 cross_apply = expression.args.get("cross_apply") 1119 if cross_apply is True: 1120 return "CROSS APPLY" 1121 if cross_apply is False: 1122 return "OUTER APPLY" 1123 1124 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 1125 self.unsupported("LATERAL clause is not supported.") 1126 return "LATERAL" 1127 1128 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 1129 this = expression.this 1130 split_count = len(this.name.split(".")) 1131 delimiter = expression.args.get("delimiter") 1132 part_index = expression.args.get("part_index") 1133 1134 if ( 1135 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 1136 or (delimiter and delimiter.name != ".") 1137 or not part_index 1138 or split_count > 4 1139 ): 1140 self.unsupported( 1141 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 1142 ) 1143 return "" 1144 1145 return self.func( 1146 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 1147 ) 1148 1149 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1150 nano = expression.args.get("nano") 1151 if nano is not None: 1152 nano.pop() 1153 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1154 1155 if expression.args.get("fractions") is None: 1156 expression.set("fractions", exp.Literal.number(0)) 1157 if expression.args.get("precision") is None: 1158 expression.set("precision", exp.Literal.number(0)) 1159 1160 return rename_func("TIMEFROMPARTS")(self, expression) 1161 1162 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1163 zone = expression.args.get("zone") 1164 if zone is not None: 1165 zone.pop() 1166 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1167 1168 nano = expression.args.get("nano") 1169 if nano is not None: 1170 nano.pop() 1171 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1172 1173 if expression.args.get("milli") is None: 1174 expression.set("milli", exp.Literal.number(0)) 1175 1176 return rename_func("DATETIMEFROMPARTS")(self, expression) 1177 1178 def setitem_sql(self, expression: exp.SetItem) -> str: 1179 this = expression.this 1180 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1181 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1182 return f"{self.sql(this.left)} {self.sql(this.right)}" 1183 1184 return super().setitem_sql(expression) 1185 1186 def boolean_sql(self, expression: exp.Boolean) -> str: 1187 if type(expression.parent) in BIT_TYPES or isinstance( 1188 expression.find_ancestor(exp.Values, exp.Select), exp.Values 1189 ): 1190 return "1" if expression.this else "0" 1191 1192 return "(1 = 1)" if expression.this else "(1 = 0)" 1193 1194 def is_sql(self, expression: exp.Is) -> str: 1195 if isinstance(expression.expression, exp.Boolean): 1196 return self.binary(expression, "=") 1197 return self.binary(expression, "IS") 1198 1199 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1200 sql = self.sql(expression, "this") 1201 properties = expression.args.get("properties") 1202 1203 if sql[:1] != "#" and any( 1204 isinstance(prop, exp.TemporaryProperty) 1205 for prop in (properties.expressions if properties else []) 1206 ): 1207 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1208 1209 return sql 1210 1211 def create_sql(self, expression: exp.Create) -> str: 1212 kind = expression.kind 1213 exists = expression.args.pop("exists", None) 1214 1215 like_property = expression.find(exp.LikeProperty) 1216 if like_property: 1217 ctas_expression = like_property.this 1218 else: 1219 ctas_expression = expression.expression 1220 1221 if kind == "VIEW": 1222 expression.this.set("catalog", None) 1223 with_ = expression.args.get("with") 1224 if ctas_expression and with_: 1225 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1226 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1227 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1228 ctas_expression.set("with", with_.pop()) 1229 1230 table = expression.find(exp.Table) 1231 1232 # Convert CTAS statement to SELECT .. INTO .. 1233 if kind == "TABLE" and ctas_expression: 1234 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1235 ctas_expression = ctas_expression.subquery() 1236 1237 properties = expression.args.get("properties") or exp.Properties() 1238 is_temp = any(isinstance(p, exp.TemporaryProperty) for p in properties.expressions) 1239 1240 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1241 select_into.set("into", exp.Into(this=table, temporary=is_temp)) 1242 1243 if like_property: 1244 select_into.limit(0, copy=False) 1245 1246 sql = self.sql(select_into) 1247 else: 1248 sql = super().create_sql(expression) 1249 1250 if exists: 1251 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1252 sql_with_ctes = self.prepend_ctes(expression, sql) 1253 sql_literal = self.sql(exp.Literal.string(sql_with_ctes)) 1254 if kind == "SCHEMA": 1255 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = {identifier}) EXEC({sql_literal})""" 1256 elif kind == "TABLE": 1257 assert table 1258 where = exp.and_( 1259 exp.column("TABLE_NAME").eq(table.name), 1260 exp.column("TABLE_SCHEMA").eq(table.db) if table.db else None, 1261 exp.column("TABLE_CATALOG").eq(table.catalog) if table.catalog else None, 1262 ) 1263 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE {where}) EXEC({sql_literal})""" 1264 elif kind == "INDEX": 1265 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1266 return f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql_literal})""" 1267 elif expression.args.get("replace"): 1268 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1269 1270 return self.prepend_ctes(expression, sql) 1271 1272 @generator.unsupported_args("unlogged", "expressions") 1273 def into_sql(self, expression: exp.Into) -> str: 1274 if expression.args.get("temporary"): 1275 # If the Into expression has a temporary property, push this down to the Identifier 1276 table = expression.find(exp.Table) 1277 if table and isinstance(table.this, exp.Identifier): 1278 table.this.set("temporary", True) 1279 1280 return f"{self.seg('INTO')} {self.sql(expression, 'this')}" 1281 1282 def count_sql(self, expression: exp.Count) -> str: 1283 func_name = "COUNT_BIG" if expression.args.get("big_int") else "COUNT" 1284 return rename_func(func_name)(self, expression) 1285 1286 def offset_sql(self, expression: exp.Offset) -> str: 1287 return f"{super().offset_sql(expression)} ROWS" 1288 1289 def version_sql(self, expression: exp.Version) -> str: 1290 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1291 this = f"FOR {name}" 1292 expr = expression.expression 1293 kind = expression.text("kind") 1294 if kind in ("FROM", "BETWEEN"): 1295 args = expr.expressions 1296 sep = "TO" if kind == "FROM" else "AND" 1297 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1298 else: 1299 expr_sql = self.sql(expr) 1300 1301 expr_sql = f" {expr_sql}" if expr_sql else "" 1302 return f"{this} {kind}{expr_sql}" 1303 1304 def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str: 1305 table = expression.args.get("table") 1306 table = f"{table} " if table else "" 1307 return f"RETURNS {table}{self.sql(expression, 'this')}" 1308 1309 def returning_sql(self, expression: exp.Returning) -> str: 1310 into = self.sql(expression, "into") 1311 into = self.seg(f"INTO {into}") if into else "" 1312 return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}" 1313 1314 def transaction_sql(self, expression: exp.Transaction) -> str: 1315 this = self.sql(expression, "this") 1316 this = f" {this}" if this else "" 1317 mark = self.sql(expression, "mark") 1318 mark = f" WITH MARK {mark}" if mark else "" 1319 return f"BEGIN TRANSACTION{this}{mark}" 1320 1321 def commit_sql(self, expression: exp.Commit) -> str: 1322 this = self.sql(expression, "this") 1323 this = f" {this}" if this else "" 1324 durability = expression.args.get("durability") 1325 durability = ( 1326 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1327 if durability is not None 1328 else "" 1329 ) 1330 return f"COMMIT TRANSACTION{this}{durability}" 1331 1332 def rollback_sql(self, expression: exp.Rollback) -> str: 1333 this = self.sql(expression, "this") 1334 this = f" {this}" if this else "" 1335 return f"ROLLBACK TRANSACTION{this}" 1336 1337 def identifier_sql(self, expression: exp.Identifier) -> str: 1338 identifier = super().identifier_sql(expression) 1339 1340 if expression.args.get("global"): 1341 identifier = f"##{identifier}" 1342 elif expression.args.get("temporary"): 1343 identifier = f"#{identifier}" 1344 1345 return identifier 1346 1347 def constraint_sql(self, expression: exp.Constraint) -> str: 1348 this = self.sql(expression, "this") 1349 expressions = self.expressions(expression, flat=True, sep=" ") 1350 return f"CONSTRAINT {this} {expressions}" 1351 1352 def length_sql(self, expression: exp.Length) -> str: 1353 return self._uncast_text(expression, "LEN") 1354 1355 def right_sql(self, expression: exp.Right) -> str: 1356 return self._uncast_text(expression, "RIGHT") 1357 1358 def left_sql(self, expression: exp.Left) -> str: 1359 return self._uncast_text(expression, "LEFT") 1360 1361 def _uncast_text(self, expression: exp.Expression, name: str) -> str: 1362 this = expression.this 1363 if isinstance(this, exp.Cast) and this.is_type(exp.DataType.Type.TEXT): 1364 this_sql = self.sql(this, "this") 1365 else: 1366 this_sql = self.sql(this) 1367 expression_sql = self.sql(expression, "expression") 1368 return self.func(name, this_sql, expression_sql if expression_sql else None) 1369 1370 def partition_sql(self, expression: exp.Partition) -> str: 1371 return f"WITH (PARTITIONS({self.expressions(expression, flat=True)}))" 1372 1373 def alter_sql(self, expression: exp.Alter) -> str: 1374 action = seq_get(expression.args.get("actions") or [], 0) 1375 if isinstance(action, exp.AlterRename): 1376 return f"EXEC sp_rename '{self.sql(expression.this)}', '{action.this.name}'" 1377 return super().alter_sql(expression) 1378 1379 def drop_sql(self, expression: exp.Drop) -> str: 1380 if expression.args["kind"] == "VIEW": 1381 expression.this.set("catalog", None) 1382 return super().drop_sql(expression) 1383 1384 def options_modifier(self, expression: exp.Expression) -> str: 1385 options = self.expressions(expression, key="options") 1386 return f" OPTION{self.wrap(options)}" if options else "" 1387 1388 def dpipe_sql(self, expression: exp.DPipe) -> str: 1389 return self.sql( 1390 reduce(lambda x, y: exp.Add(this=x, expression=y), expression.flatten()) 1391 ) 1392 1393 def isascii_sql(self, expression: exp.IsAscii) -> str: 1394 return f"(PATINDEX(CONVERT(VARCHAR(MAX), 0x255b5e002d7f5d25) COLLATE Latin1_General_BIN, {self.sql(expression.this)}) = 0)" 1395 1396 def columndef_sql(self, expression: exp.ColumnDef, sep: str = " ") -> str: 1397 this = super().columndef_sql(expression, sep) 1398 default = self.sql(expression, "default") 1399 default = f" = {default}" if default else "" 1400 output = self.sql(expression, "output") 1401 output = f" {output}" if output else "" 1402 return f"{this}{default}{output}" 1403 1404 def coalesce_sql(self, expression: exp.Coalesce) -> str: 1405 func_name = "ISNULL" if expression.args.get("is_null") else "COALESCE" 1406 return rename_func(func_name)(self, expression)
Whether the base comes first in the LOG
function.
Possible values: True
, False
, None
(two arguments are not supported by LOG
)
Whether the behavior of a / b
depends on the types of a
and b
.
False means a / b
is always float division.
True means a / b
is integer division if both a
and b
are integers.
A NULL
arg in CONCAT
yields NULL
by default, but in some dialects it yields an empty string.
Specifies the strategy according to which identifiers should be normalized.
Associates this dialect's time formats with their equivalent Python strftime
formats.
521 class Tokenizer(tokens.Tokenizer): 522 IDENTIFIERS = [("[", "]"), '"'] 523 QUOTES = ["'", '"'] 524 HEX_STRINGS = [("0x", ""), ("0X", "")] 525 VAR_SINGLE_TOKENS = {"@", "$", "#"} 526 527 KEYWORDS = { 528 **tokens.Tokenizer.KEYWORDS, 529 "CLUSTERED INDEX": TokenType.INDEX, 530 "DATETIME2": TokenType.DATETIME2, 531 "DATETIMEOFFSET": TokenType.TIMESTAMPTZ, 532 "DECLARE": TokenType.DECLARE, 533 "EXEC": TokenType.COMMAND, 534 "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT, 535 "GO": TokenType.COMMAND, 536 "IMAGE": TokenType.IMAGE, 537 "MONEY": TokenType.MONEY, 538 "NONCLUSTERED INDEX": TokenType.INDEX, 539 "NTEXT": TokenType.TEXT, 540 "OPTION": TokenType.OPTION, 541 "OUTPUT": TokenType.RETURNING, 542 "PRINT": TokenType.COMMAND, 543 "PROC": TokenType.PROCEDURE, 544 "REAL": TokenType.FLOAT, 545 "ROWVERSION": TokenType.ROWVERSION, 546 "SMALLDATETIME": TokenType.SMALLDATETIME, 547 "SMALLMONEY": TokenType.SMALLMONEY, 548 "SQL_VARIANT": TokenType.VARIANT, 549 "SYSTEM_USER": TokenType.CURRENT_USER, 550 "TOP": TokenType.TOP, 551 "TIMESTAMP": TokenType.ROWVERSION, 552 "TINYINT": TokenType.UTINYINT, 553 "UNIQUEIDENTIFIER": TokenType.UUID, 554 "UPDATE STATISTICS": TokenType.COMMAND, 555 "XML": TokenType.XML, 556 } 557 KEYWORDS.pop("/*+") 558 559 COMMANDS = {*tokens.Tokenizer.COMMANDS, TokenType.END}
Inherited Members
- sqlglot.tokens.Tokenizer
- Tokenizer
- SINGLE_TOKENS
- BIT_STRINGS
- BYTE_STRINGS
- RAW_STRINGS
- HEREDOC_STRINGS
- UNICODE_STRINGS
- STRING_ESCAPES
- IDENTIFIER_ESCAPES
- HEREDOC_TAG_IS_IDENTIFIER
- HEREDOC_STRING_ALTERNATIVE
- STRING_ESCAPES_ALLOWED_IN_RAW_STRINGS
- NESTED_COMMENTS
- HINT_START
- TOKENS_PRECEDING_HINT
- WHITE_SPACE
- COMMAND_PREFIX_TOKENS
- NUMERIC_LITERALS
- COMMENTS
- dialect
- use_rs_tokenizer
- reset
- tokenize
- tokenize_rs
- size
- sql
- tokens
561 class Parser(parser.Parser): 562 SET_REQUIRES_ASSIGNMENT_DELIMITER = False 563 LOG_DEFAULTS_TO_LN = True 564 STRING_ALIASES = True 565 NO_PAREN_IF_COMMANDS = False 566 567 QUERY_MODIFIER_PARSERS = { 568 **parser.Parser.QUERY_MODIFIER_PARSERS, 569 TokenType.OPTION: lambda self: ("options", self._parse_options()), 570 TokenType.FOR: lambda self: ("for", self._parse_for()), 571 } 572 573 # T-SQL does not allow BEGIN to be used as an identifier 574 ID_VAR_TOKENS = parser.Parser.ID_VAR_TOKENS - {TokenType.BEGIN} 575 ALIAS_TOKENS = parser.Parser.ALIAS_TOKENS - {TokenType.BEGIN} 576 TABLE_ALIAS_TOKENS = parser.Parser.TABLE_ALIAS_TOKENS - {TokenType.BEGIN} 577 COMMENT_TABLE_ALIAS_TOKENS = parser.Parser.COMMENT_TABLE_ALIAS_TOKENS - {TokenType.BEGIN} 578 UPDATE_ALIAS_TOKENS = parser.Parser.UPDATE_ALIAS_TOKENS - {TokenType.BEGIN} 579 580 FUNCTIONS = { 581 **parser.Parser.FUNCTIONS, 582 "CHARINDEX": lambda args: exp.StrPosition( 583 this=seq_get(args, 1), 584 substr=seq_get(args, 0), 585 position=seq_get(args, 2), 586 ), 587 "COUNT": lambda args: exp.Count( 588 this=seq_get(args, 0), expressions=args[1:], big_int=False 589 ), 590 "COUNT_BIG": lambda args: exp.Count( 591 this=seq_get(args, 0), expressions=args[1:], big_int=True 592 ), 593 "DATEADD": build_date_delta(exp.DateAdd, unit_mapping=DATE_DELTA_INTERVAL), 594 "DATEDIFF": _build_date_delta(exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL), 595 "DATENAME": _build_formatted_time(exp.TimeToStr, full_format_mapping=True), 596 "DATEPART": _build_formatted_time(exp.TimeToStr), 597 "DATETIMEFROMPARTS": _build_datetimefromparts, 598 "EOMONTH": _build_eomonth, 599 "FORMAT": _build_format, 600 "GETDATE": exp.CurrentTimestamp.from_arg_list, 601 "HASHBYTES": _build_hashbytes, 602 "ISNULL": lambda args: build_coalesce(args=args, is_null=True), 603 "JSON_QUERY": _build_json_query, 604 "JSON_VALUE": parser.build_extract_json_with_path(exp.JSONExtractScalar), 605 "LEN": _build_with_arg_as_text(exp.Length), 606 "LEFT": _build_with_arg_as_text(exp.Left), 607 "NEWID": exp.Uuid.from_arg_list, 608 "RIGHT": _build_with_arg_as_text(exp.Right), 609 "PARSENAME": _build_parsename, 610 "REPLICATE": exp.Repeat.from_arg_list, 611 "SCHEMA_NAME": exp.CurrentSchema.from_arg_list, 612 "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)), 613 "SYSDATETIME": exp.CurrentTimestamp.from_arg_list, 614 "SUSER_NAME": exp.CurrentUser.from_arg_list, 615 "SUSER_SNAME": exp.CurrentUser.from_arg_list, 616 "SYSDATETIMEOFFSET": exp.CurrentTimestampLTZ.from_arg_list, 617 "SYSTEM_USER": exp.CurrentUser.from_arg_list, 618 "TIMEFROMPARTS": _build_timefromparts, 619 "DATETRUNC": _build_datetrunc, 620 } 621 622 JOIN_HINTS = {"LOOP", "HASH", "MERGE", "REMOTE"} 623 624 PROCEDURE_OPTIONS = dict.fromkeys( 625 ("ENCRYPTION", "RECOMPILE", "SCHEMABINDING", "NATIVE_COMPILATION", "EXECUTE"), tuple() 626 ) 627 628 COLUMN_DEFINITION_MODES = {"OUT", "OUTPUT", "READ_ONLY"} 629 630 RETURNS_TABLE_TOKENS = parser.Parser.ID_VAR_TOKENS - { 631 TokenType.TABLE, 632 *parser.Parser.TYPE_TOKENS, 633 } 634 635 STATEMENT_PARSERS = { 636 **parser.Parser.STATEMENT_PARSERS, 637 TokenType.DECLARE: lambda self: self._parse_declare(), 638 } 639 640 RANGE_PARSERS = { 641 **parser.Parser.RANGE_PARSERS, 642 TokenType.DCOLON: lambda self, this: self.expression( 643 exp.ScopeResolution, 644 this=this, 645 expression=self._parse_function() or self._parse_var(any_token=True), 646 ), 647 } 648 649 NO_PAREN_FUNCTION_PARSERS = { 650 **parser.Parser.NO_PAREN_FUNCTION_PARSERS, 651 "NEXT": lambda self: self._parse_next_value_for(), 652 } 653 654 # The DCOLON (::) operator serves as a scope resolution (exp.ScopeResolution) operator in T-SQL 655 COLUMN_OPERATORS = { 656 **parser.Parser.COLUMN_OPERATORS, 657 TokenType.DCOLON: lambda self, this, to: self.expression(exp.Cast, this=this, to=to) 658 if isinstance(to, exp.DataType) and to.this != exp.DataType.Type.USERDEFINED 659 else self.expression(exp.ScopeResolution, this=this, expression=to), 660 } 661 662 def _parse_alter_table_set(self) -> exp.AlterSet: 663 return self._parse_wrapped(super()._parse_alter_table_set) 664 665 def _parse_wrapped_select(self, table: bool = False) -> t.Optional[exp.Expression]: 666 if self._match(TokenType.MERGE): 667 comments = self._prev_comments 668 merge = self._parse_merge() 669 merge.add_comments(comments, prepend=True) 670 return merge 671 672 return super()._parse_wrapped_select(table=table) 673 674 def _parse_dcolon(self) -> t.Optional[exp.Expression]: 675 # We want to use _parse_types() if the first token after :: is a known type, 676 # otherwise we could parse something like x::varchar(max) into a function 677 if self._match_set(self.TYPE_TOKENS, advance=False): 678 return self._parse_types() 679 680 return self._parse_function() or self._parse_types() 681 682 def _parse_options(self) -> t.Optional[t.List[exp.Expression]]: 683 if not self._match(TokenType.OPTION): 684 return None 685 686 def _parse_option() -> t.Optional[exp.Expression]: 687 option = self._parse_var_from_options(OPTIONS) 688 if not option: 689 return None 690 691 self._match(TokenType.EQ) 692 return self.expression( 693 exp.QueryOption, this=option, expression=self._parse_primary_or_var() 694 ) 695 696 return self._parse_wrapped_csv(_parse_option) 697 698 def _parse_xml_key_value_option(self) -> exp.XMLKeyValueOption: 699 this = self._parse_primary_or_var() 700 if self._match(TokenType.L_PAREN, advance=False): 701 expression = self._parse_wrapped(self._parse_string) 702 else: 703 expression = None 704 705 return exp.XMLKeyValueOption(this=this, expression=expression) 706 707 def _parse_for(self) -> t.Optional[t.List[exp.Expression]]: 708 if not self._match_pair(TokenType.FOR, TokenType.XML): 709 return None 710 711 def _parse_for_xml() -> t.Optional[exp.Expression]: 712 return self.expression( 713 exp.QueryOption, 714 this=self._parse_var_from_options(XML_OPTIONS, raise_unmatched=False) 715 or self._parse_xml_key_value_option(), 716 ) 717 718 return self._parse_csv(_parse_for_xml) 719 720 def _parse_projections(self) -> t.List[exp.Expression]: 721 """ 722 T-SQL supports the syntax alias = expression in the SELECT's projection list, 723 so we transform all parsed Selects to convert their EQ projections into Aliases. 724 725 See: https://learn.microsoft.com/en-us/sql/t-sql/queries/select-clause-transact-sql?view=sql-server-ver16#syntax 726 """ 727 return [ 728 ( 729 exp.alias_(projection.expression, projection.this.this, copy=False) 730 if isinstance(projection, exp.EQ) and isinstance(projection.this, exp.Column) 731 else projection 732 ) 733 for projection in super()._parse_projections() 734 ] 735 736 def _parse_commit_or_rollback(self) -> exp.Commit | exp.Rollback: 737 """Applies to SQL Server and Azure SQL Database 738 COMMIT [ { TRAN | TRANSACTION } 739 [ transaction_name | @tran_name_variable ] ] 740 [ WITH ( DELAYED_DURABILITY = { OFF | ON } ) ] 741 742 ROLLBACK { TRAN | TRANSACTION } 743 [ transaction_name | @tran_name_variable 744 | savepoint_name | @savepoint_variable ] 745 """ 746 rollback = self._prev.token_type == TokenType.ROLLBACK 747 748 self._match_texts(("TRAN", "TRANSACTION")) 749 this = self._parse_id_var() 750 751 if rollback: 752 return self.expression(exp.Rollback, this=this) 753 754 durability = None 755 if self._match_pair(TokenType.WITH, TokenType.L_PAREN): 756 self._match_text_seq("DELAYED_DURABILITY") 757 self._match(TokenType.EQ) 758 759 if self._match_text_seq("OFF"): 760 durability = False 761 else: 762 self._match(TokenType.ON) 763 durability = True 764 765 self._match_r_paren() 766 767 return self.expression(exp.Commit, this=this, durability=durability) 768 769 def _parse_transaction(self) -> exp.Transaction | exp.Command: 770 """Applies to SQL Server and Azure SQL Database 771 BEGIN { TRAN | TRANSACTION } 772 [ { transaction_name | @tran_name_variable } 773 [ WITH MARK [ 'description' ] ] 774 ] 775 """ 776 if self._match_texts(("TRAN", "TRANSACTION")): 777 transaction = self.expression(exp.Transaction, this=self._parse_id_var()) 778 if self._match_text_seq("WITH", "MARK"): 779 transaction.set("mark", self._parse_string()) 780 781 return transaction 782 783 return self._parse_as_command(self._prev) 784 785 def _parse_returns(self) -> exp.ReturnsProperty: 786 table = self._parse_id_var(any_token=False, tokens=self.RETURNS_TABLE_TOKENS) 787 returns = super()._parse_returns() 788 returns.set("table", table) 789 return returns 790 791 def _parse_convert( 792 self, strict: bool, safe: t.Optional[bool] = None 793 ) -> t.Optional[exp.Expression]: 794 this = self._parse_types() 795 self._match(TokenType.COMMA) 796 args = [this, *self._parse_csv(self._parse_assignment)] 797 convert = exp.Convert.from_arg_list(args) 798 convert.set("safe", safe) 799 convert.set("strict", strict) 800 return convert 801 802 def _parse_column_def( 803 self, this: t.Optional[exp.Expression], computed_column: bool = True 804 ) -> t.Optional[exp.Expression]: 805 this = super()._parse_column_def(this=this, computed_column=computed_column) 806 if not this: 807 return None 808 if self._match(TokenType.EQ): 809 this.set("default", self._parse_disjunction()) 810 if self._match_texts(self.COLUMN_DEFINITION_MODES): 811 this.set("output", self._prev.text) 812 return this 813 814 def _parse_user_defined_function( 815 self, kind: t.Optional[TokenType] = None 816 ) -> t.Optional[exp.Expression]: 817 this = super()._parse_user_defined_function(kind=kind) 818 819 if ( 820 kind == TokenType.FUNCTION 821 or isinstance(this, exp.UserDefinedFunction) 822 or self._match(TokenType.ALIAS, advance=False) 823 ): 824 return this 825 826 if not self._match(TokenType.WITH, advance=False): 827 expressions = self._parse_csv(self._parse_function_parameter) 828 else: 829 expressions = None 830 831 return self.expression(exp.UserDefinedFunction, this=this, expressions=expressions) 832 833 def _parse_into(self) -> t.Optional[exp.Into]: 834 into = super()._parse_into() 835 836 table = isinstance(into, exp.Into) and into.find(exp.Table) 837 if isinstance(table, exp.Table): 838 table_identifier = table.this 839 if table_identifier.args.get("temporary"): 840 # Promote the temporary property from the Identifier to the Into expression 841 t.cast(exp.Into, into).set("temporary", True) 842 843 return into 844 845 def _parse_id_var( 846 self, 847 any_token: bool = True, 848 tokens: t.Optional[t.Collection[TokenType]] = None, 849 ) -> t.Optional[exp.Expression]: 850 is_temporary = self._match(TokenType.HASH) 851 is_global = is_temporary and self._match(TokenType.HASH) 852 853 this = super()._parse_id_var(any_token=any_token, tokens=tokens) 854 if this: 855 if is_global: 856 this.set("global", True) 857 elif is_temporary: 858 this.set("temporary", True) 859 860 return this 861 862 def _parse_create(self) -> exp.Create | exp.Command: 863 create = super()._parse_create() 864 865 if isinstance(create, exp.Create): 866 table = create.this.this if isinstance(create.this, exp.Schema) else create.this 867 if isinstance(table, exp.Table) and table.this and table.this.args.get("temporary"): 868 if not create.args.get("properties"): 869 create.set("properties", exp.Properties(expressions=[])) 870 871 create.args["properties"].append("expressions", exp.TemporaryProperty()) 872 873 return create 874 875 def _parse_if(self) -> t.Optional[exp.Expression]: 876 index = self._index 877 878 if self._match_text_seq("OBJECT_ID"): 879 self._parse_wrapped_csv(self._parse_string) 880 if self._match_text_seq("IS", "NOT", "NULL") and self._match(TokenType.DROP): 881 return self._parse_drop(exists=True) 882 self._retreat(index) 883 884 return super()._parse_if() 885 886 def _parse_unique(self) -> exp.UniqueColumnConstraint: 887 if self._match_texts(("CLUSTERED", "NONCLUSTERED")): 888 this = self.CONSTRAINT_PARSERS[self._prev.text.upper()](self) 889 else: 890 this = self._parse_schema(self._parse_id_var(any_token=False)) 891 892 return self.expression(exp.UniqueColumnConstraint, this=this) 893 894 def _parse_partition(self) -> t.Optional[exp.Partition]: 895 if not self._match_text_seq("WITH", "(", "PARTITIONS"): 896 return None 897 898 def parse_range(): 899 low = self._parse_bitwise() 900 high = self._parse_bitwise() if self._match_text_seq("TO") else None 901 902 return ( 903 self.expression(exp.PartitionRange, this=low, expression=high) if high else low 904 ) 905 906 partition = self.expression( 907 exp.Partition, expressions=self._parse_wrapped_csv(parse_range) 908 ) 909 910 self._match_r_paren() 911 912 return partition 913 914 def _parse_declare(self) -> exp.Declare | exp.Command: 915 index = self._index 916 expressions = self._try_parse(partial(self._parse_csv, self._parse_declareitem)) 917 918 if not expressions or self._curr: 919 self._retreat(index) 920 return self._parse_as_command(self._prev) 921 922 return self.expression(exp.Declare, expressions=expressions) 923 924 def _parse_declareitem(self) -> t.Optional[exp.DeclareItem]: 925 var = self._parse_id_var() 926 if not var: 927 return None 928 929 value = None 930 self._match(TokenType.ALIAS) 931 if self._match(TokenType.TABLE): 932 data_type = self._parse_schema() 933 else: 934 data_type = self._parse_types() 935 if self._match(TokenType.EQ): 936 value = self._parse_bitwise() 937 938 return self.expression(exp.DeclareItem, this=var, kind=data_type, default=value) 939 940 def _parse_alter_table_alter(self) -> t.Optional[exp.Expression]: 941 expression = super()._parse_alter_table_alter() 942 943 if expression is not None: 944 collation = expression.args.get("collate") 945 if isinstance(collation, exp.Column) and isinstance(collation.this, exp.Identifier): 946 identifier = collation.this 947 collation.set("this", exp.Var(this=identifier.name)) 948 949 return expression
Parser consumes a list of tokens produced by the Tokenizer and produces a parsed syntax tree.
Arguments:
- error_level: The desired error level. Default: ErrorLevel.IMMEDIATE
- error_message_context: The amount of context to capture from a query string when displaying the error message (in number of characters). Default: 100
- max_errors: Maximum number of error messages to include in a raised ParseError. This is only relevant if error_level is ErrorLevel.RAISE. Default: 3
Inherited Members
- sqlglot.parser.Parser
- Parser
- NO_PAREN_FUNCTIONS
- STRUCT_TYPE_TOKENS
- NESTED_TYPE_TOKENS
- ENUM_TYPE_TOKENS
- AGGREGATE_TYPE_TOKENS
- TYPE_TOKENS
- SIGNED_TO_UNSIGNED_TYPE_TOKEN
- SUBQUERY_PREDICATES
- RESERVED_TOKENS
- DB_CREATABLES
- CREATABLES
- ALTERABLES
- COLON_PLACEHOLDER_TOKENS
- ARRAY_CONSTRUCTORS
- TRIM_TYPES
- FUNC_TOKENS
- CONJUNCTION
- ASSIGNMENT
- DISJUNCTION
- EQUALITY
- COMPARISON
- BITWISE
- TERM
- FACTOR
- EXPONENT
- TIMES
- TIMESTAMPS
- SET_OPERATIONS
- JOIN_METHODS
- JOIN_SIDES
- JOIN_KINDS
- LAMBDAS
- EXPRESSION_PARSERS
- UNARY_PARSERS
- STRING_PARSERS
- NUMERIC_PARSERS
- PRIMARY_PARSERS
- PLACEHOLDER_PARSERS
- PIPE_SYNTAX_TRANSFORM_PARSERS
- PROPERTY_PARSERS
- CONSTRAINT_PARSERS
- ALTER_PARSERS
- ALTER_ALTER_PARSERS
- SCHEMA_UNNAMED_CONSTRAINTS
- INVALID_FUNC_NAME_TOKENS
- FUNCTIONS_WITH_ALIASED_ARGS
- KEY_VALUE_DEFINITIONS
- FUNCTION_PARSERS
- SET_PARSERS
- SHOW_PARSERS
- TYPE_LITERAL_PARSERS
- TYPE_CONVERTERS
- DDL_SELECT_TOKENS
- PRE_VOLATILE_TOKENS
- TRANSACTION_KIND
- TRANSACTION_CHARACTERISTICS
- CONFLICT_ACTIONS
- CREATE_SEQUENCE
- ISOLATED_LOADING_OPTIONS
- USABLES
- CAST_ACTIONS
- SCHEMA_BINDING_OPTIONS
- EXECUTE_AS_OPTIONS
- KEY_CONSTRAINT_OPTIONS
- WINDOW_EXCLUDE_OPTIONS
- INSERT_ALTERNATIVES
- CLONE_KEYWORDS
- HISTORICAL_DATA_PREFIX
- HISTORICAL_DATA_KIND
- OPCLASS_FOLLOW_KEYWORDS
- OPTYPE_FOLLOW_TOKENS
- TABLE_INDEX_HINT_TOKENS
- VIEW_ATTRIBUTES
- WINDOW_ALIAS_TOKENS
- WINDOW_BEFORE_PAREN_TOKENS
- WINDOW_SIDES
- JSON_KEY_VALUE_SEPARATOR_TOKENS
- FETCH_TOKENS
- ADD_CONSTRAINT_TOKENS
- DISTINCT_TOKENS
- NULL_TOKENS
- UNNEST_OFFSET_ALIAS_TOKENS
- SELECT_START_TOKENS
- COPY_INTO_VARLEN_OPTIONS
- IS_JSON_PREDICATE_KIND
- ODBC_DATETIME_LITERALS
- ON_CONDITION_TOKENS
- PRIVILEGE_FOLLOW_TOKENS
- DESCRIBE_STYLES
- ANALYZE_STYLES
- ANALYZE_EXPRESSION_PARSERS
- PARTITION_KEYWORDS
- AMBIGUOUS_ALIAS_TOKENS
- OPERATION_MODIFIERS
- RECURSIVE_CTE_SEARCH_KIND
- MODIFIABLES
- STRICT_CAST
- PREFIXED_PIVOT_COLUMNS
- IDENTIFY_PIVOT_STRINGS
- TABLESAMPLE_CSV
- DEFAULT_SAMPLING_METHOD
- TRIM_PATTERN_FIRST
- MODIFIERS_ATTACHED_TO_SET_OP
- SET_OP_MODIFIERS
- JSON_ARROWS_REQUIRE_JSON_TYPE
- COLON_IS_VARIANT_EXTRACT
- VALUES_FOLLOWED_BY_PAREN
- SUPPORTS_IMPLICIT_UNNEST
- INTERVAL_SPANS
- SUPPORTS_PARTITION_SELECTION
- WRAPPED_TRANSFORM_COLUMN_CONSTRAINT
- OPTIONAL_ALIAS_TOKEN_CTE
- ALTER_RENAME_REQUIRES_COLUMN
- JOINS_HAVE_EQUAL_PRECEDENCE
- ZONE_AWARE_TIMESTAMP_CONSTRUCTOR
- error_level
- error_message_context
- max_errors
- dialect
- reset
- parse
- parse_into
- check_errors
- raise_error
- expression
- validate_expression
- parse_set_operation
- errors
- sql
951 class Generator(generator.Generator): 952 LIMIT_IS_TOP = True 953 QUERY_HINTS = False 954 RETURNING_END = False 955 NVL2_SUPPORTED = False 956 ALTER_TABLE_INCLUDE_COLUMN_KEYWORD = False 957 LIMIT_FETCH = "FETCH" 958 COMPUTED_COLUMN_WITH_TYPE = False 959 CTE_RECURSIVE_KEYWORD_REQUIRED = False 960 ENSURE_BOOLS = True 961 NULL_ORDERING_SUPPORTED = None 962 SUPPORTS_SINGLE_ARG_CONCAT = False 963 TABLESAMPLE_SEED_KEYWORD = "REPEATABLE" 964 SUPPORTS_SELECT_INTO = True 965 JSON_PATH_BRACKETED_KEY_SUPPORTED = False 966 SUPPORTS_TO_NUMBER = False 967 SET_OP_MODIFIERS = False 968 COPY_PARAMS_EQ_REQUIRED = True 969 PARSE_JSON_NAME = None 970 EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE = False 971 ALTER_SET_WRAPPED = True 972 ALTER_SET_TYPE = "" 973 974 EXPRESSIONS_WITHOUT_NESTED_CTES = { 975 exp.Create, 976 exp.Delete, 977 exp.Insert, 978 exp.Intersect, 979 exp.Except, 980 exp.Merge, 981 exp.Select, 982 exp.Subquery, 983 exp.Union, 984 exp.Update, 985 } 986 987 SUPPORTED_JSON_PATH_PARTS = { 988 exp.JSONPathKey, 989 exp.JSONPathRoot, 990 exp.JSONPathSubscript, 991 } 992 993 TYPE_MAPPING = { 994 **generator.Generator.TYPE_MAPPING, 995 exp.DataType.Type.BOOLEAN: "BIT", 996 exp.DataType.Type.DATETIME2: "DATETIME2", 997 exp.DataType.Type.DECIMAL: "NUMERIC", 998 exp.DataType.Type.DOUBLE: "FLOAT", 999 exp.DataType.Type.INT: "INTEGER", 1000 exp.DataType.Type.ROWVERSION: "ROWVERSION", 1001 exp.DataType.Type.TEXT: "VARCHAR(MAX)", 1002 exp.DataType.Type.TIMESTAMP: "DATETIME2", 1003 exp.DataType.Type.TIMESTAMPNTZ: "DATETIME2", 1004 exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET", 1005 exp.DataType.Type.SMALLDATETIME: "SMALLDATETIME", 1006 exp.DataType.Type.UTINYINT: "TINYINT", 1007 exp.DataType.Type.VARIANT: "SQL_VARIANT", 1008 exp.DataType.Type.UUID: "UNIQUEIDENTIFIER", 1009 } 1010 1011 TYPE_MAPPING.pop(exp.DataType.Type.NCHAR) 1012 TYPE_MAPPING.pop(exp.DataType.Type.NVARCHAR) 1013 1014 TRANSFORMS = { 1015 **generator.Generator.TRANSFORMS, 1016 exp.AnyValue: any_value_to_max_sql, 1017 exp.ArrayToString: rename_func("STRING_AGG"), 1018 exp.AutoIncrementColumnConstraint: lambda *_: "IDENTITY", 1019 exp.Chr: rename_func("CHAR"), 1020 exp.DateAdd: date_delta_sql("DATEADD"), 1021 exp.DateDiff: date_delta_sql("DATEDIFF"), 1022 exp.CTE: transforms.preprocess([qualify_derived_table_outputs]), 1023 exp.CurrentDate: rename_func("GETDATE"), 1024 exp.CurrentTimestamp: rename_func("GETDATE"), 1025 exp.CurrentTimestampLTZ: rename_func("SYSDATETIMEOFFSET"), 1026 exp.DateStrToDate: datestrtodate_sql, 1027 exp.Extract: rename_func("DATEPART"), 1028 exp.GeneratedAsIdentityColumnConstraint: generatedasidentitycolumnconstraint_sql, 1029 exp.GroupConcat: _string_agg_sql, 1030 exp.If: rename_func("IIF"), 1031 exp.JSONExtract: _json_extract_sql, 1032 exp.JSONExtractScalar: _json_extract_sql, 1033 exp.LastDay: lambda self, e: self.func("EOMONTH", e.this), 1034 exp.Ln: rename_func("LOG"), 1035 exp.Max: max_or_greatest, 1036 exp.MD5: lambda self, e: self.func("HASHBYTES", exp.Literal.string("MD5"), e.this), 1037 exp.Min: min_or_least, 1038 exp.NumberToStr: _format_sql, 1039 exp.Repeat: rename_func("REPLICATE"), 1040 exp.CurrentSchema: rename_func("SCHEMA_NAME"), 1041 exp.Select: transforms.preprocess( 1042 [ 1043 transforms.eliminate_distinct_on, 1044 transforms.eliminate_semi_and_anti_joins, 1045 transforms.eliminate_qualify, 1046 transforms.unnest_generate_date_array_using_recursive_cte, 1047 ] 1048 ), 1049 exp.Stddev: rename_func("STDEV"), 1050 exp.StrPosition: lambda self, e: strposition_sql( 1051 self, e, func_name="CHARINDEX", supports_position=True 1052 ), 1053 exp.Subquery: transforms.preprocess([qualify_derived_table_outputs]), 1054 exp.SHA: lambda self, e: self.func("HASHBYTES", exp.Literal.string("SHA1"), e.this), 1055 exp.SHA2: lambda self, e: self.func( 1056 "HASHBYTES", exp.Literal.string(f"SHA2_{e.args.get('length', 256)}"), e.this 1057 ), 1058 exp.TemporaryProperty: lambda self, e: "", 1059 exp.TimeStrToTime: _timestrtotime_sql, 1060 exp.TimeToStr: _format_sql, 1061 exp.Trim: trim_sql, 1062 exp.TsOrDsAdd: date_delta_sql("DATEADD", cast=True), 1063 exp.TsOrDsDiff: date_delta_sql("DATEDIFF"), 1064 exp.TimestampTrunc: lambda self, e: self.func("DATETRUNC", e.unit, e.this), 1065 exp.Uuid: lambda *_: "NEWID()", 1066 exp.DateFromParts: rename_func("DATEFROMPARTS"), 1067 } 1068 1069 TRANSFORMS.pop(exp.ReturnsProperty) 1070 1071 PROPERTIES_LOCATION = { 1072 **generator.Generator.PROPERTIES_LOCATION, 1073 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 1074 } 1075 1076 def scope_resolution(self, rhs: str, scope_name: str) -> str: 1077 return f"{scope_name}::{rhs}" 1078 1079 def select_sql(self, expression: exp.Select) -> str: 1080 limit = expression.args.get("limit") 1081 offset = expression.args.get("offset") 1082 1083 if isinstance(limit, exp.Fetch) and not offset: 1084 # Dialects like Oracle can FETCH directly from a row set but 1085 # T-SQL requires an ORDER BY + OFFSET clause in order to FETCH 1086 offset = exp.Offset(expression=exp.Literal.number(0)) 1087 expression.set("offset", offset) 1088 1089 if offset: 1090 if not expression.args.get("order"): 1091 # ORDER BY is required in order to use OFFSET in a query, so we use 1092 # a noop order by, since we don't really care about the order. 1093 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 1094 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 1095 1096 if isinstance(limit, exp.Limit): 1097 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 1098 # we replace here because otherwise TOP would be generated in select_sql 1099 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 1100 1101 return super().select_sql(expression) 1102 1103 def convert_sql(self, expression: exp.Convert) -> str: 1104 name = "TRY_CONVERT" if expression.args.get("safe") else "CONVERT" 1105 return self.func( 1106 name, expression.this, expression.expression, expression.args.get("style") 1107 ) 1108 1109 def queryoption_sql(self, expression: exp.QueryOption) -> str: 1110 option = self.sql(expression, "this") 1111 value = self.sql(expression, "expression") 1112 if value: 1113 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 1114 return f"{option} {optional_equal_sign}{value}" 1115 return option 1116 1117 def lateral_op(self, expression: exp.Lateral) -> str: 1118 cross_apply = expression.args.get("cross_apply") 1119 if cross_apply is True: 1120 return "CROSS APPLY" 1121 if cross_apply is False: 1122 return "OUTER APPLY" 1123 1124 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 1125 self.unsupported("LATERAL clause is not supported.") 1126 return "LATERAL" 1127 1128 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 1129 this = expression.this 1130 split_count = len(this.name.split(".")) 1131 delimiter = expression.args.get("delimiter") 1132 part_index = expression.args.get("part_index") 1133 1134 if ( 1135 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 1136 or (delimiter and delimiter.name != ".") 1137 or not part_index 1138 or split_count > 4 1139 ): 1140 self.unsupported( 1141 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 1142 ) 1143 return "" 1144 1145 return self.func( 1146 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 1147 ) 1148 1149 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1150 nano = expression.args.get("nano") 1151 if nano is not None: 1152 nano.pop() 1153 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1154 1155 if expression.args.get("fractions") is None: 1156 expression.set("fractions", exp.Literal.number(0)) 1157 if expression.args.get("precision") is None: 1158 expression.set("precision", exp.Literal.number(0)) 1159 1160 return rename_func("TIMEFROMPARTS")(self, expression) 1161 1162 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1163 zone = expression.args.get("zone") 1164 if zone is not None: 1165 zone.pop() 1166 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1167 1168 nano = expression.args.get("nano") 1169 if nano is not None: 1170 nano.pop() 1171 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1172 1173 if expression.args.get("milli") is None: 1174 expression.set("milli", exp.Literal.number(0)) 1175 1176 return rename_func("DATETIMEFROMPARTS")(self, expression) 1177 1178 def setitem_sql(self, expression: exp.SetItem) -> str: 1179 this = expression.this 1180 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1181 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1182 return f"{self.sql(this.left)} {self.sql(this.right)}" 1183 1184 return super().setitem_sql(expression) 1185 1186 def boolean_sql(self, expression: exp.Boolean) -> str: 1187 if type(expression.parent) in BIT_TYPES or isinstance( 1188 expression.find_ancestor(exp.Values, exp.Select), exp.Values 1189 ): 1190 return "1" if expression.this else "0" 1191 1192 return "(1 = 1)" if expression.this else "(1 = 0)" 1193 1194 def is_sql(self, expression: exp.Is) -> str: 1195 if isinstance(expression.expression, exp.Boolean): 1196 return self.binary(expression, "=") 1197 return self.binary(expression, "IS") 1198 1199 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1200 sql = self.sql(expression, "this") 1201 properties = expression.args.get("properties") 1202 1203 if sql[:1] != "#" and any( 1204 isinstance(prop, exp.TemporaryProperty) 1205 for prop in (properties.expressions if properties else []) 1206 ): 1207 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1208 1209 return sql 1210 1211 def create_sql(self, expression: exp.Create) -> str: 1212 kind = expression.kind 1213 exists = expression.args.pop("exists", None) 1214 1215 like_property = expression.find(exp.LikeProperty) 1216 if like_property: 1217 ctas_expression = like_property.this 1218 else: 1219 ctas_expression = expression.expression 1220 1221 if kind == "VIEW": 1222 expression.this.set("catalog", None) 1223 with_ = expression.args.get("with") 1224 if ctas_expression and with_: 1225 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1226 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1227 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1228 ctas_expression.set("with", with_.pop()) 1229 1230 table = expression.find(exp.Table) 1231 1232 # Convert CTAS statement to SELECT .. INTO .. 1233 if kind == "TABLE" and ctas_expression: 1234 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1235 ctas_expression = ctas_expression.subquery() 1236 1237 properties = expression.args.get("properties") or exp.Properties() 1238 is_temp = any(isinstance(p, exp.TemporaryProperty) for p in properties.expressions) 1239 1240 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1241 select_into.set("into", exp.Into(this=table, temporary=is_temp)) 1242 1243 if like_property: 1244 select_into.limit(0, copy=False) 1245 1246 sql = self.sql(select_into) 1247 else: 1248 sql = super().create_sql(expression) 1249 1250 if exists: 1251 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1252 sql_with_ctes = self.prepend_ctes(expression, sql) 1253 sql_literal = self.sql(exp.Literal.string(sql_with_ctes)) 1254 if kind == "SCHEMA": 1255 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = {identifier}) EXEC({sql_literal})""" 1256 elif kind == "TABLE": 1257 assert table 1258 where = exp.and_( 1259 exp.column("TABLE_NAME").eq(table.name), 1260 exp.column("TABLE_SCHEMA").eq(table.db) if table.db else None, 1261 exp.column("TABLE_CATALOG").eq(table.catalog) if table.catalog else None, 1262 ) 1263 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE {where}) EXEC({sql_literal})""" 1264 elif kind == "INDEX": 1265 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1266 return f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql_literal})""" 1267 elif expression.args.get("replace"): 1268 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1269 1270 return self.prepend_ctes(expression, sql) 1271 1272 @generator.unsupported_args("unlogged", "expressions") 1273 def into_sql(self, expression: exp.Into) -> str: 1274 if expression.args.get("temporary"): 1275 # If the Into expression has a temporary property, push this down to the Identifier 1276 table = expression.find(exp.Table) 1277 if table and isinstance(table.this, exp.Identifier): 1278 table.this.set("temporary", True) 1279 1280 return f"{self.seg('INTO')} {self.sql(expression, 'this')}" 1281 1282 def count_sql(self, expression: exp.Count) -> str: 1283 func_name = "COUNT_BIG" if expression.args.get("big_int") else "COUNT" 1284 return rename_func(func_name)(self, expression) 1285 1286 def offset_sql(self, expression: exp.Offset) -> str: 1287 return f"{super().offset_sql(expression)} ROWS" 1288 1289 def version_sql(self, expression: exp.Version) -> str: 1290 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1291 this = f"FOR {name}" 1292 expr = expression.expression 1293 kind = expression.text("kind") 1294 if kind in ("FROM", "BETWEEN"): 1295 args = expr.expressions 1296 sep = "TO" if kind == "FROM" else "AND" 1297 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1298 else: 1299 expr_sql = self.sql(expr) 1300 1301 expr_sql = f" {expr_sql}" if expr_sql else "" 1302 return f"{this} {kind}{expr_sql}" 1303 1304 def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str: 1305 table = expression.args.get("table") 1306 table = f"{table} " if table else "" 1307 return f"RETURNS {table}{self.sql(expression, 'this')}" 1308 1309 def returning_sql(self, expression: exp.Returning) -> str: 1310 into = self.sql(expression, "into") 1311 into = self.seg(f"INTO {into}") if into else "" 1312 return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}" 1313 1314 def transaction_sql(self, expression: exp.Transaction) -> str: 1315 this = self.sql(expression, "this") 1316 this = f" {this}" if this else "" 1317 mark = self.sql(expression, "mark") 1318 mark = f" WITH MARK {mark}" if mark else "" 1319 return f"BEGIN TRANSACTION{this}{mark}" 1320 1321 def commit_sql(self, expression: exp.Commit) -> str: 1322 this = self.sql(expression, "this") 1323 this = f" {this}" if this else "" 1324 durability = expression.args.get("durability") 1325 durability = ( 1326 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1327 if durability is not None 1328 else "" 1329 ) 1330 return f"COMMIT TRANSACTION{this}{durability}" 1331 1332 def rollback_sql(self, expression: exp.Rollback) -> str: 1333 this = self.sql(expression, "this") 1334 this = f" {this}" if this else "" 1335 return f"ROLLBACK TRANSACTION{this}" 1336 1337 def identifier_sql(self, expression: exp.Identifier) -> str: 1338 identifier = super().identifier_sql(expression) 1339 1340 if expression.args.get("global"): 1341 identifier = f"##{identifier}" 1342 elif expression.args.get("temporary"): 1343 identifier = f"#{identifier}" 1344 1345 return identifier 1346 1347 def constraint_sql(self, expression: exp.Constraint) -> str: 1348 this = self.sql(expression, "this") 1349 expressions = self.expressions(expression, flat=True, sep=" ") 1350 return f"CONSTRAINT {this} {expressions}" 1351 1352 def length_sql(self, expression: exp.Length) -> str: 1353 return self._uncast_text(expression, "LEN") 1354 1355 def right_sql(self, expression: exp.Right) -> str: 1356 return self._uncast_text(expression, "RIGHT") 1357 1358 def left_sql(self, expression: exp.Left) -> str: 1359 return self._uncast_text(expression, "LEFT") 1360 1361 def _uncast_text(self, expression: exp.Expression, name: str) -> str: 1362 this = expression.this 1363 if isinstance(this, exp.Cast) and this.is_type(exp.DataType.Type.TEXT): 1364 this_sql = self.sql(this, "this") 1365 else: 1366 this_sql = self.sql(this) 1367 expression_sql = self.sql(expression, "expression") 1368 return self.func(name, this_sql, expression_sql if expression_sql else None) 1369 1370 def partition_sql(self, expression: exp.Partition) -> str: 1371 return f"WITH (PARTITIONS({self.expressions(expression, flat=True)}))" 1372 1373 def alter_sql(self, expression: exp.Alter) -> str: 1374 action = seq_get(expression.args.get("actions") or [], 0) 1375 if isinstance(action, exp.AlterRename): 1376 return f"EXEC sp_rename '{self.sql(expression.this)}', '{action.this.name}'" 1377 return super().alter_sql(expression) 1378 1379 def drop_sql(self, expression: exp.Drop) -> str: 1380 if expression.args["kind"] == "VIEW": 1381 expression.this.set("catalog", None) 1382 return super().drop_sql(expression) 1383 1384 def options_modifier(self, expression: exp.Expression) -> str: 1385 options = self.expressions(expression, key="options") 1386 return f" OPTION{self.wrap(options)}" if options else "" 1387 1388 def dpipe_sql(self, expression: exp.DPipe) -> str: 1389 return self.sql( 1390 reduce(lambda x, y: exp.Add(this=x, expression=y), expression.flatten()) 1391 ) 1392 1393 def isascii_sql(self, expression: exp.IsAscii) -> str: 1394 return f"(PATINDEX(CONVERT(VARCHAR(MAX), 0x255b5e002d7f5d25) COLLATE Latin1_General_BIN, {self.sql(expression.this)}) = 0)" 1395 1396 def columndef_sql(self, expression: exp.ColumnDef, sep: str = " ") -> str: 1397 this = super().columndef_sql(expression, sep) 1398 default = self.sql(expression, "default") 1399 default = f" = {default}" if default else "" 1400 output = self.sql(expression, "output") 1401 output = f" {output}" if output else "" 1402 return f"{this}{default}{output}" 1403 1404 def coalesce_sql(self, expression: exp.Coalesce) -> str: 1405 func_name = "ISNULL" if expression.args.get("is_null") else "COALESCE" 1406 return rename_func(func_name)(self, expression)
Generator converts a given syntax tree to the corresponding SQL string.
Arguments:
- pretty: Whether to format the produced SQL string. Default: False.
- identify: Determines when an identifier should be quoted. Possible values are: False (default): Never quote, except in cases where it's mandatory by the dialect. True or 'always': Always quote. 'safe': Only quote identifiers that are case insensitive.
- normalize: Whether to normalize identifiers to lowercase. Default: False.
- pad: The pad size in a formatted string. For example, this affects the indentation of a projection in a query, relative to its nesting level. Default: 2.
- indent: The indentation size in a formatted string. For example, this affects the
indentation of subqueries and filters under a
WHERE
clause. Default: 2. - normalize_functions: How to normalize function names. Possible values are: "upper" or True (default): Convert names to uppercase. "lower": Convert names to lowercase. False: Disables function name normalization.
- unsupported_level: Determines the generator's behavior when it encounters unsupported expressions. Default ErrorLevel.WARN.
- max_unsupported: Maximum number of unsupported messages to include in a raised UnsupportedError. This is only relevant if unsupported_level is ErrorLevel.RAISE. Default: 3
- leading_comma: Whether the comma is leading or trailing in select expressions. This is only relevant when generating in pretty mode. Default: False
- max_text_width: The max number of characters in a segment before creating new lines in pretty mode. The default is on the smaller end because the length only represents a segment and not the true line length. Default: 80
- comments: Whether to preserve comments in the output SQL code. Default: True
1079 def select_sql(self, expression: exp.Select) -> str: 1080 limit = expression.args.get("limit") 1081 offset = expression.args.get("offset") 1082 1083 if isinstance(limit, exp.Fetch) and not offset: 1084 # Dialects like Oracle can FETCH directly from a row set but 1085 # T-SQL requires an ORDER BY + OFFSET clause in order to FETCH 1086 offset = exp.Offset(expression=exp.Literal.number(0)) 1087 expression.set("offset", offset) 1088 1089 if offset: 1090 if not expression.args.get("order"): 1091 # ORDER BY is required in order to use OFFSET in a query, so we use 1092 # a noop order by, since we don't really care about the order. 1093 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 1094 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 1095 1096 if isinstance(limit, exp.Limit): 1097 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 1098 # we replace here because otherwise TOP would be generated in select_sql 1099 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 1100 1101 return super().select_sql(expression)
1109 def queryoption_sql(self, expression: exp.QueryOption) -> str: 1110 option = self.sql(expression, "this") 1111 value = self.sql(expression, "expression") 1112 if value: 1113 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 1114 return f"{option} {optional_equal_sign}{value}" 1115 return option
1117 def lateral_op(self, expression: exp.Lateral) -> str: 1118 cross_apply = expression.args.get("cross_apply") 1119 if cross_apply is True: 1120 return "CROSS APPLY" 1121 if cross_apply is False: 1122 return "OUTER APPLY" 1123 1124 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 1125 self.unsupported("LATERAL clause is not supported.") 1126 return "LATERAL"
1128 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 1129 this = expression.this 1130 split_count = len(this.name.split(".")) 1131 delimiter = expression.args.get("delimiter") 1132 part_index = expression.args.get("part_index") 1133 1134 if ( 1135 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 1136 or (delimiter and delimiter.name != ".") 1137 or not part_index 1138 or split_count > 4 1139 ): 1140 self.unsupported( 1141 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 1142 ) 1143 return "" 1144 1145 return self.func( 1146 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 1147 )
1149 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1150 nano = expression.args.get("nano") 1151 if nano is not None: 1152 nano.pop() 1153 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1154 1155 if expression.args.get("fractions") is None: 1156 expression.set("fractions", exp.Literal.number(0)) 1157 if expression.args.get("precision") is None: 1158 expression.set("precision", exp.Literal.number(0)) 1159 1160 return rename_func("TIMEFROMPARTS")(self, expression)
1162 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1163 zone = expression.args.get("zone") 1164 if zone is not None: 1165 zone.pop() 1166 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1167 1168 nano = expression.args.get("nano") 1169 if nano is not None: 1170 nano.pop() 1171 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1172 1173 if expression.args.get("milli") is None: 1174 expression.set("milli", exp.Literal.number(0)) 1175 1176 return rename_func("DATETIMEFROMPARTS")(self, expression)
1178 def setitem_sql(self, expression: exp.SetItem) -> str: 1179 this = expression.this 1180 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1181 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1182 return f"{self.sql(this.left)} {self.sql(this.right)}" 1183 1184 return super().setitem_sql(expression)
1199 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1200 sql = self.sql(expression, "this") 1201 properties = expression.args.get("properties") 1202 1203 if sql[:1] != "#" and any( 1204 isinstance(prop, exp.TemporaryProperty) 1205 for prop in (properties.expressions if properties else []) 1206 ): 1207 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1208 1209 return sql
1211 def create_sql(self, expression: exp.Create) -> str: 1212 kind = expression.kind 1213 exists = expression.args.pop("exists", None) 1214 1215 like_property = expression.find(exp.LikeProperty) 1216 if like_property: 1217 ctas_expression = like_property.this 1218 else: 1219 ctas_expression = expression.expression 1220 1221 if kind == "VIEW": 1222 expression.this.set("catalog", None) 1223 with_ = expression.args.get("with") 1224 if ctas_expression and with_: 1225 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1226 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1227 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1228 ctas_expression.set("with", with_.pop()) 1229 1230 table = expression.find(exp.Table) 1231 1232 # Convert CTAS statement to SELECT .. INTO .. 1233 if kind == "TABLE" and ctas_expression: 1234 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1235 ctas_expression = ctas_expression.subquery() 1236 1237 properties = expression.args.get("properties") or exp.Properties() 1238 is_temp = any(isinstance(p, exp.TemporaryProperty) for p in properties.expressions) 1239 1240 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1241 select_into.set("into", exp.Into(this=table, temporary=is_temp)) 1242 1243 if like_property: 1244 select_into.limit(0, copy=False) 1245 1246 sql = self.sql(select_into) 1247 else: 1248 sql = super().create_sql(expression) 1249 1250 if exists: 1251 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1252 sql_with_ctes = self.prepend_ctes(expression, sql) 1253 sql_literal = self.sql(exp.Literal.string(sql_with_ctes)) 1254 if kind == "SCHEMA": 1255 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = {identifier}) EXEC({sql_literal})""" 1256 elif kind == "TABLE": 1257 assert table 1258 where = exp.and_( 1259 exp.column("TABLE_NAME").eq(table.name), 1260 exp.column("TABLE_SCHEMA").eq(table.db) if table.db else None, 1261 exp.column("TABLE_CATALOG").eq(table.catalog) if table.catalog else None, 1262 ) 1263 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE {where}) EXEC({sql_literal})""" 1264 elif kind == "INDEX": 1265 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1266 return f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql_literal})""" 1267 elif expression.args.get("replace"): 1268 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1269 1270 return self.prepend_ctes(expression, sql)
1272 @generator.unsupported_args("unlogged", "expressions") 1273 def into_sql(self, expression: exp.Into) -> str: 1274 if expression.args.get("temporary"): 1275 # If the Into expression has a temporary property, push this down to the Identifier 1276 table = expression.find(exp.Table) 1277 if table and isinstance(table.this, exp.Identifier): 1278 table.this.set("temporary", True) 1279 1280 return f"{self.seg('INTO')} {self.sql(expression, 'this')}"
1289 def version_sql(self, expression: exp.Version) -> str: 1290 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1291 this = f"FOR {name}" 1292 expr = expression.expression 1293 kind = expression.text("kind") 1294 if kind in ("FROM", "BETWEEN"): 1295 args = expr.expressions 1296 sep = "TO" if kind == "FROM" else "AND" 1297 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1298 else: 1299 expr_sql = self.sql(expr) 1300 1301 expr_sql = f" {expr_sql}" if expr_sql else "" 1302 return f"{this} {kind}{expr_sql}"
1321 def commit_sql(self, expression: exp.Commit) -> str: 1322 this = self.sql(expression, "this") 1323 this = f" {this}" if this else "" 1324 durability = expression.args.get("durability") 1325 durability = ( 1326 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1327 if durability is not None 1328 else "" 1329 ) 1330 return f"COMMIT TRANSACTION{this}{durability}"
1337 def identifier_sql(self, expression: exp.Identifier) -> str: 1338 identifier = super().identifier_sql(expression) 1339 1340 if expression.args.get("global"): 1341 identifier = f"##{identifier}" 1342 elif expression.args.get("temporary"): 1343 identifier = f"#{identifier}" 1344 1345 return identifier
1396 def columndef_sql(self, expression: exp.ColumnDef, sep: str = " ") -> str: 1397 this = super().columndef_sql(expression, sep) 1398 default = self.sql(expression, "default") 1399 default = f" = {default}" if default else "" 1400 output = self.sql(expression, "output") 1401 output = f" {output}" if output else "" 1402 return f"{this}{default}{output}"
Inherited Members
- sqlglot.generator.Generator
- Generator
- IGNORE_NULLS_IN_FUNC
- LOCKING_READS_SUPPORTED
- WRAP_DERIVED_VALUES
- CREATE_FUNCTION_RETURN_AS
- MATCHED_BY_SOURCE
- SINGLE_STRING_INTERVAL
- INTERVAL_ALLOWS_PLURAL_FORM
- LIMIT_ONLY_LITERALS
- RENAME_TABLE_WITH_DB
- GROUPINGS_SEP
- INDEX_ON
- JOIN_HINTS
- TABLE_HINTS
- QUERY_HINT_SEP
- IS_BOOL_ALLOWED
- DUPLICATE_KEY_UPDATE_WITH_SET
- EXTRACT_ALLOWS_QUOTES
- TZ_TO_WITH_TIME_ZONE
- VALUES_AS_TABLE
- UNNEST_WITH_ORDINALITY
- AGGREGATE_FILTER_SUPPORTED
- SEMI_ANTI_JOIN_WITH_SIDE
- SUPPORTS_TABLE_COPY
- TABLESAMPLE_REQUIRES_PARENS
- TABLESAMPLE_SIZE_IS_ROWS
- TABLESAMPLE_KEYWORDS
- TABLESAMPLE_WITH_METHOD
- COLLATE_IS_FUNC
- DATA_TYPE_SPECIFIERS_ALLOWED
- LAST_DAY_SUPPORTS_DATE_PART
- SUPPORTS_TABLE_ALIAS_COLUMNS
- UNPIVOT_ALIASES_ARE_IDENTIFIERS
- JSON_KEY_VALUE_PAIR_SEP
- INSERT_OVERWRITE
- SUPPORTS_UNLOGGED_TABLES
- SUPPORTS_CREATE_TABLE_LIKE
- LIKE_PROPERTY_INSIDE_SCHEMA
- MULTI_ARG_DISTINCT
- JSON_TYPE_REQUIRED_FOR_EXTRACTION
- JSON_PATH_SINGLE_QUOTE_ESCAPE
- CAN_IMPLEMENT_ARRAY_ANY
- SUPPORTS_WINDOW_EXCLUDE
- COPY_PARAMS_ARE_WRAPPED
- COPY_HAS_INTO_KEYWORD
- STAR_EXCEPT
- HEX_FUNC
- WITH_PROPERTIES_PREFIX
- QUOTE_JSON_PATH
- PAD_FILL_PATTERN_IS_REQUIRED
- SUPPORTS_EXPLODING_PROJECTIONS
- ARRAY_CONCAT_IS_VAR_LEN
- SUPPORTS_CONVERT_TIMEZONE
- SUPPORTS_MEDIAN
- SUPPORTS_UNIX_SECONDS
- NORMALIZE_EXTRACT_DATE_PARTS
- ARRAY_SIZE_NAME
- ARRAY_SIZE_DIM_REQUIRED
- TIME_PART_SINGULARS
- TOKEN_MAPPING
- STRUCT_DELIMITER
- PARAMETER_TOKEN
- NAMED_PLACEHOLDER_TOKEN
- EXPRESSION_PRECEDES_PROPERTIES_CREATABLES
- RESERVED_KEYWORDS
- WITH_SEPARATED_COMMENTS
- EXCLUDE_COMMENTS
- UNWRAPPED_INTERVAL_VALUES
- PARAMETERIZABLE_TEXT_TYPES
- RESPECT_IGNORE_NULLS_UNSUPPORTED_EXPRESSIONS
- SENTINEL_LINE_BREAK
- pretty
- identify
- normalize
- pad
- unsupported_level
- max_unsupported
- leading_comma
- max_text_width
- comments
- dialect
- normalize_functions
- unsupported_messages
- generate
- preprocess
- unsupported
- sep
- seg
- sanitize_comment
- maybe_comment
- wrap
- no_identify
- normalize_func
- indent
- sql
- uncache_sql
- cache_sql
- characterset_sql
- column_parts
- column_sql
- columnposition_sql
- columnconstraint_sql
- computedcolumnconstraint_sql
- autoincrementcolumnconstraint_sql
- compresscolumnconstraint_sql
- generatedasidentitycolumnconstraint_sql
- generatedasrowcolumnconstraint_sql
- periodforsystemtimeconstraint_sql
- notnullcolumnconstraint_sql
- primarykeycolumnconstraint_sql
- uniquecolumnconstraint_sql
- sequenceproperties_sql
- clone_sql
- describe_sql
- heredoc_sql
- prepend_ctes
- with_sql
- cte_sql
- tablealias_sql
- bitstring_sql
- hexstring_sql
- bytestring_sql
- unicodestring_sql
- rawstring_sql
- datatypeparam_sql
- datatype_sql
- directory_sql
- delete_sql
- set_operation
- set_operations
- fetch_sql
- limitoptions_sql
- filter_sql
- hint_sql
- indexparameters_sql
- index_sql
- hex_sql
- lowerhex_sql
- inputoutputformat_sql
- national_sql
- properties_sql
- root_properties
- properties
- with_properties
- locate_properties
- property_name
- property_sql
- likeproperty_sql
- fallbackproperty_sql
- journalproperty_sql
- freespaceproperty_sql
- checksumproperty_sql
- mergeblockratioproperty_sql
- datablocksizeproperty_sql
- blockcompressionproperty_sql
- isolatedloadingproperty_sql
- partitionboundspec_sql
- partitionedofproperty_sql
- lockingproperty_sql
- withdataproperty_sql
- withsystemversioningproperty_sql
- insert_sql
- introducer_sql
- kill_sql
- pseudotype_sql
- objectidentifier_sql
- onconflict_sql
- rowformatdelimitedproperty_sql
- withtablehint_sql
- indextablehint_sql
- historicaldata_sql
- table_parts
- table_sql
- tablefromrows_sql
- tablesample_sql
- pivot_sql
- tuple_sql
- update_sql
- values_sql
- var_sql
- from_sql
- groupingsets_sql
- rollup_sql
- cube_sql
- group_sql
- having_sql
- connect_sql
- prior_sql
- join_sql
- lambda_sql
- lateral_sql
- limit_sql
- set_sql
- pragma_sql
- lock_sql
- literal_sql
- escape_str
- loaddata_sql
- null_sql
- order_sql
- withfill_sql
- cluster_sql
- distribute_sql
- sort_sql
- ordered_sql
- matchrecognizemeasure_sql
- matchrecognize_sql
- query_modifiers
- for_modifiers
- offset_limit_modifiers
- after_limit_modifiers
- schema_sql
- schema_columns_sql
- star_sql
- parameter_sql
- sessionparameter_sql
- placeholder_sql
- subquery_sql
- qualify_sql
- unnest_sql
- prewhere_sql
- where_sql
- window_sql
- partition_by_sql
- windowspec_sql
- withingroup_sql
- between_sql
- bracket_offset_expressions
- bracket_sql
- all_sql
- any_sql
- exists_sql
- case_sql
- nextvaluefor_sql
- extract_sql
- trim_sql
- convert_concat_args
- concat_sql
- concatws_sql
- check_sql
- foreignkey_sql
- primarykey_sql
- if_sql
- matchagainst_sql
- jsonkeyvalue_sql
- jsonpath_sql
- json_path_part
- formatjson_sql
- jsonobject_sql
- jsonobjectagg_sql
- jsonarray_sql
- jsonarrayagg_sql
- jsoncolumndef_sql
- jsonschema_sql
- jsontable_sql
- openjsoncolumndef_sql
- openjson_sql
- in_sql
- in_unnest_op
- interval_sql
- return_sql
- reference_sql
- anonymous_sql
- paren_sql
- neg_sql
- not_sql
- alias_sql
- pivotalias_sql
- aliases_sql
- atindex_sql
- attimezone_sql
- fromtimezone_sql
- add_sql
- and_sql
- or_sql
- xor_sql
- connector_sql
- bitwiseand_sql
- bitwiseleftshift_sql
- bitwisenot_sql
- bitwiseor_sql
- bitwiserightshift_sql
- bitwisexor_sql
- cast_sql
- currentdate_sql
- collate_sql
- command_sql
- comment_sql
- mergetreettlaction_sql
- mergetreettl_sql
- altercolumn_sql
- alterindex_sql
- alterdiststyle_sql
- altersortkey_sql
- alterrename_sql
- renamecolumn_sql
- alterset_sql
- add_column_sql
- droppartition_sql
- addconstraint_sql
- addpartition_sql
- distinct_sql
- ignorenulls_sql
- respectnulls_sql
- havingmax_sql
- intdiv_sql
- div_sql
- safedivide_sql
- overlaps_sql
- distance_sql
- dot_sql
- eq_sql
- propertyeq_sql
- escape_sql
- glob_sql
- gt_sql
- gte_sql
- ilike_sql
- ilikeany_sql
- like_sql
- likeany_sql
- similarto_sql
- lt_sql
- lte_sql
- mod_sql
- mul_sql
- neq_sql
- nullsafeeq_sql
- nullsafeneq_sql
- slice_sql
- sub_sql
- trycast_sql
- jsoncast_sql
- try_sql
- log_sql
- use_sql
- binary
- ceil_floor
- function_fallback_sql
- func
- format_args
- too_wide
- format_time
- expressions
- op_expressions
- naked_property
- tag_sql
- token_sql
- userdefinedfunction_sql
- joinhint_sql
- kwarg_sql
- when_sql
- whens_sql
- merge_sql
- tochar_sql
- tonumber_sql
- dictproperty_sql
- dictrange_sql
- dictsubproperty_sql
- duplicatekeyproperty_sql
- uniquekeyproperty_sql
- distributedbyproperty_sql
- oncluster_sql
- clusteredbyproperty_sql
- anyvalue_sql
- querytransform_sql
- indexconstraintoption_sql
- checkcolumnconstraint_sql
- indexcolumnconstraint_sql
- nvl2_sql
- comprehension_sql
- columnprefix_sql
- opclass_sql
- predict_sql
- forin_sql
- refresh_sql
- toarray_sql
- tsordstotime_sql
- tsordstotimestamp_sql
- tsordstodatetime_sql
- tsordstodate_sql
- unixdate_sql
- lastday_sql
- dateadd_sql
- arrayany_sql
- struct_sql
- partitionrange_sql
- truncatetable_sql
- copyparameter_sql
- credentials_sql
- copy_sql
- semicolon_sql
- datadeletionproperty_sql
- maskingpolicycolumnconstraint_sql
- gapfill_sql
- scoperesolution_sql
- parsejson_sql
- rand_sql
- changes_sql
- pad_sql
- summarize_sql
- explodinggenerateseries_sql
- arrayconcat_sql
- converttimezone_sql
- json_sql
- jsonvalue_sql
- conditionalinsert_sql
- multitableinserts_sql
- oncondition_sql
- jsonextractquote_sql
- jsonexists_sql
- arrayagg_sql
- apply_sql
- grant_sql
- grantprivilege_sql
- grantprincipal_sql
- columns_sql
- overlay_sql
- todouble_sql
- string_sql
- median_sql
- overflowtruncatebehavior_sql
- unixseconds_sql
- arraysize_sql
- attach_sql
- detach_sql
- attachoption_sql
- featuresattime_sql
- watermarkcolumnconstraint_sql
- encodeproperty_sql
- includeproperty_sql
- xmlelement_sql
- xmlkeyvalueoption_sql
- partitionbyrangeproperty_sql
- partitionbyrangepropertydynamic_sql
- unpivotcolumns_sql
- analyzesample_sql
- analyzestatistics_sql
- analyzehistogram_sql
- analyzedelete_sql
- analyzelistchainedrows_sql
- analyzevalidate_sql
- analyze_sql
- xmltable_sql
- xmlnamespace_sql
- export_sql
- declare_sql
- declareitem_sql
- recursivewithsearch_sql
- parameterizedagg_sql
- anonymousaggfunc_sql
- combinedaggfunc_sql
- combinedparameterizedagg_sql
- show_sql
- get_put_sql
- translatecharacters_sql