sqlglot.dialects.tsql
1from __future__ import annotations 2 3import datetime 4import re 5import typing as t 6from functools import partial, reduce 7 8from sqlglot import exp, generator, parser, tokens, transforms 9from sqlglot.dialects.dialect import ( 10 Dialect, 11 NormalizationStrategy, 12 any_value_to_max_sql, 13 date_delta_sql, 14 datestrtodate_sql, 15 generatedasidentitycolumnconstraint_sql, 16 max_or_greatest, 17 min_or_least, 18 build_date_delta, 19 rename_func, 20 strposition_sql, 21 trim_sql, 22 timestrtotime_sql, 23) 24from sqlglot.helper import seq_get 25from sqlglot.parser import build_coalesce 26from sqlglot.time import format_time 27from sqlglot.tokens import TokenType 28 29if t.TYPE_CHECKING: 30 from sqlglot._typing import E 31 32FULL_FORMAT_TIME_MAPPING = { 33 "weekday": "%A", 34 "dw": "%A", 35 "w": "%A", 36 "month": "%B", 37 "mm": "%B", 38 "m": "%B", 39} 40 41DATE_DELTA_INTERVAL = { 42 "year": "year", 43 "yyyy": "year", 44 "yy": "year", 45 "quarter": "quarter", 46 "qq": "quarter", 47 "q": "quarter", 48 "month": "month", 49 "mm": "month", 50 "m": "month", 51 "week": "week", 52 "ww": "week", 53 "wk": "week", 54 "day": "day", 55 "dd": "day", 56 "d": "day", 57} 58 59 60DATE_FMT_RE = re.compile("([dD]{1,2})|([mM]{1,2})|([yY]{1,4})|([hH]{1,2})|([sS]{1,2})") 61 62# N = Numeric, C=Currency 63TRANSPILE_SAFE_NUMBER_FMT = {"N", "C"} 64 65DEFAULT_START_DATE = datetime.date(1900, 1, 1) 66 67BIT_TYPES = {exp.EQ, exp.NEQ, exp.Is, exp.In, exp.Select, exp.Alias} 68 69# Unsupported options: 70# - OPTIMIZE FOR ( @variable_name { UNKNOWN | = <literal_constant> } [ , ...n ] ) 71# - TABLE HINT 72OPTIONS: parser.OPTIONS_TYPE = { 73 **dict.fromkeys( 74 ( 75 "DISABLE_OPTIMIZED_PLAN_FORCING", 76 "FAST", 77 "IGNORE_NONCLUSTERED_COLUMNSTORE_INDEX", 78 "LABEL", 79 "MAXDOP", 80 "MAXRECURSION", 81 "MAX_GRANT_PERCENT", 82 "MIN_GRANT_PERCENT", 83 "NO_PERFORMANCE_SPOOL", 84 "QUERYTRACEON", 85 "RECOMPILE", 86 ), 87 tuple(), 88 ), 89 "CONCAT": ("UNION",), 90 "DISABLE": ("EXTERNALPUSHDOWN", "SCALEOUTEXECUTION"), 91 "EXPAND": ("VIEWS",), 92 "FORCE": ("EXTERNALPUSHDOWN", "ORDER", "SCALEOUTEXECUTION"), 93 "HASH": ("GROUP", "JOIN", "UNION"), 94 "KEEP": ("PLAN",), 95 "KEEPFIXED": ("PLAN",), 96 "LOOP": ("JOIN",), 97 "MERGE": ("JOIN", "UNION"), 98 "OPTIMIZE": (("FOR", "UNKNOWN"),), 99 "ORDER": ("GROUP",), 100 "PARAMETERIZATION": ("FORCED", "SIMPLE"), 101 "ROBUST": ("PLAN",), 102 "USE": ("PLAN",), 103} 104 105OPTIONS_THAT_REQUIRE_EQUAL = ("MAX_GRANT_PERCENT", "MIN_GRANT_PERCENT", "LABEL") 106 107 108def _build_formatted_time( 109 exp_class: t.Type[E], full_format_mapping: t.Optional[bool] = None 110) -> t.Callable[[t.List], E]: 111 def _builder(args: t.List) -> E: 112 assert len(args) == 2 113 114 return exp_class( 115 this=exp.cast(args[1], exp.DataType.Type.DATETIME2), 116 format=exp.Literal.string( 117 format_time( 118 args[0].name.lower(), 119 ( 120 {**TSQL.TIME_MAPPING, **FULL_FORMAT_TIME_MAPPING} 121 if full_format_mapping 122 else TSQL.TIME_MAPPING 123 ), 124 ) 125 ), 126 ) 127 128 return _builder 129 130 131def _build_format(args: t.List) -> exp.NumberToStr | exp.TimeToStr: 132 this = seq_get(args, 0) 133 fmt = seq_get(args, 1) 134 culture = seq_get(args, 2) 135 136 number_fmt = fmt and (fmt.name in TRANSPILE_SAFE_NUMBER_FMT or not DATE_FMT_RE.search(fmt.name)) 137 138 if number_fmt: 139 return exp.NumberToStr(this=this, format=fmt, culture=culture) 140 141 if fmt: 142 fmt = exp.Literal.string( 143 format_time(fmt.name, TSQL.FORMAT_TIME_MAPPING) 144 if len(fmt.name) == 1 145 else format_time(fmt.name, TSQL.TIME_MAPPING) 146 ) 147 148 return exp.TimeToStr(this=this, format=fmt, culture=culture) 149 150 151def _build_eomonth(args: t.List) -> exp.LastDay: 152 date = exp.TsOrDsToDate(this=seq_get(args, 0)) 153 month_lag = seq_get(args, 1) 154 155 if month_lag is None: 156 this: exp.Expression = date 157 else: 158 unit = DATE_DELTA_INTERVAL.get("month") 159 this = exp.DateAdd(this=date, expression=month_lag, unit=unit and exp.var(unit)) 160 161 return exp.LastDay(this=this) 162 163 164def _build_hashbytes(args: t.List) -> exp.Expression: 165 kind, data = args 166 kind = kind.name.upper() if kind.is_string else "" 167 168 if kind == "MD5": 169 args.pop(0) 170 return exp.MD5(this=data) 171 if kind in ("SHA", "SHA1"): 172 args.pop(0) 173 return exp.SHA(this=data) 174 if kind == "SHA2_256": 175 return exp.SHA2(this=data, length=exp.Literal.number(256)) 176 if kind == "SHA2_512": 177 return exp.SHA2(this=data, length=exp.Literal.number(512)) 178 179 return exp.func("HASHBYTES", *args) 180 181 182DATEPART_ONLY_FORMATS = {"DW", "WK", "HOUR", "QUARTER"} 183 184 185def _format_sql(self: TSQL.Generator, expression: exp.NumberToStr | exp.TimeToStr) -> str: 186 fmt = expression.args["format"] 187 188 if not isinstance(expression, exp.NumberToStr): 189 if fmt.is_string: 190 mapped_fmt = format_time(fmt.name, TSQL.INVERSE_TIME_MAPPING) 191 192 name = (mapped_fmt or "").upper() 193 if name in DATEPART_ONLY_FORMATS: 194 return self.func("DATEPART", name, expression.this) 195 196 fmt_sql = self.sql(exp.Literal.string(mapped_fmt)) 197 else: 198 fmt_sql = self.format_time(expression) or self.sql(fmt) 199 else: 200 fmt_sql = self.sql(fmt) 201 202 return self.func("FORMAT", expression.this, fmt_sql, expression.args.get("culture")) 203 204 205def _string_agg_sql(self: TSQL.Generator, expression: exp.GroupConcat) -> str: 206 this = expression.this 207 distinct = expression.find(exp.Distinct) 208 if distinct: 209 # exp.Distinct can appear below an exp.Order or an exp.GroupConcat expression 210 self.unsupported("T-SQL STRING_AGG doesn't support DISTINCT.") 211 this = distinct.pop().expressions[0] 212 213 order = "" 214 if isinstance(expression.this, exp.Order): 215 if expression.this.this: 216 this = expression.this.this.pop() 217 # Order has a leading space 218 order = f" WITHIN GROUP ({self.sql(expression.this)[1:]})" 219 220 separator = expression.args.get("separator") or exp.Literal.string(",") 221 return f"STRING_AGG({self.format_args(this, separator)}){order}" 222 223 224def _build_date_delta( 225 exp_class: t.Type[E], unit_mapping: t.Optional[t.Dict[str, str]] = None 226) -> t.Callable[[t.List], E]: 227 def _builder(args: t.List) -> E: 228 unit = seq_get(args, 0) 229 if unit and unit_mapping: 230 unit = exp.var(unit_mapping.get(unit.name.lower(), unit.name)) 231 232 start_date = seq_get(args, 1) 233 if start_date and start_date.is_number: 234 # Numeric types are valid DATETIME values 235 if start_date.is_int: 236 adds = DEFAULT_START_DATE + datetime.timedelta(days=start_date.to_py()) 237 start_date = exp.Literal.string(adds.strftime("%F")) 238 else: 239 # We currently don't handle float values, i.e. they're not converted to equivalent DATETIMEs. 240 # This is not a problem when generating T-SQL code, it is when transpiling to other dialects. 241 return exp_class(this=seq_get(args, 2), expression=start_date, unit=unit) 242 243 return exp_class( 244 this=exp.TimeStrToTime(this=seq_get(args, 2)), 245 expression=exp.TimeStrToTime(this=start_date), 246 unit=unit, 247 ) 248 249 return _builder 250 251 252def qualify_derived_table_outputs(expression: exp.Expression) -> exp.Expression: 253 """Ensures all (unnamed) output columns are aliased for CTEs and Subqueries.""" 254 alias = expression.args.get("alias") 255 256 if ( 257 isinstance(expression, (exp.CTE, exp.Subquery)) 258 and isinstance(alias, exp.TableAlias) 259 and not alias.columns 260 ): 261 from sqlglot.optimizer.qualify_columns import qualify_outputs 262 263 # We keep track of the unaliased column projection indexes instead of the expressions 264 # themselves, because the latter are going to be replaced by new nodes when the aliases 265 # are added and hence we won't be able to reach these newly added Alias parents 266 query = expression.this 267 unaliased_column_indexes = ( 268 i for i, c in enumerate(query.selects) if isinstance(c, exp.Column) and not c.alias 269 ) 270 271 qualify_outputs(query) 272 273 # Preserve the quoting information of columns for newly added Alias nodes 274 query_selects = query.selects 275 for select_index in unaliased_column_indexes: 276 alias = query_selects[select_index] 277 column = alias.this 278 if isinstance(column.this, exp.Identifier): 279 alias.args["alias"].set("quoted", column.this.quoted) 280 281 return expression 282 283 284# https://learn.microsoft.com/en-us/sql/t-sql/functions/datetimefromparts-transact-sql?view=sql-server-ver16#syntax 285def _build_datetimefromparts(args: t.List) -> exp.TimestampFromParts: 286 return exp.TimestampFromParts( 287 year=seq_get(args, 0), 288 month=seq_get(args, 1), 289 day=seq_get(args, 2), 290 hour=seq_get(args, 3), 291 min=seq_get(args, 4), 292 sec=seq_get(args, 5), 293 milli=seq_get(args, 6), 294 ) 295 296 297# https://learn.microsoft.com/en-us/sql/t-sql/functions/timefromparts-transact-sql?view=sql-server-ver16#syntax 298def _build_timefromparts(args: t.List) -> exp.TimeFromParts: 299 return exp.TimeFromParts( 300 hour=seq_get(args, 0), 301 min=seq_get(args, 1), 302 sec=seq_get(args, 2), 303 fractions=seq_get(args, 3), 304 precision=seq_get(args, 4), 305 ) 306 307 308def _build_with_arg_as_text( 309 klass: t.Type[exp.Expression], 310) -> t.Callable[[t.List[exp.Expression]], exp.Expression]: 311 def _parse(args: t.List[exp.Expression]) -> exp.Expression: 312 this = seq_get(args, 0) 313 314 if this and not this.is_string: 315 this = exp.cast(this, exp.DataType.Type.TEXT) 316 317 expression = seq_get(args, 1) 318 kwargs = {"this": this} 319 320 if expression: 321 kwargs["expression"] = expression 322 323 return klass(**kwargs) 324 325 return _parse 326 327 328# https://learn.microsoft.com/en-us/sql/t-sql/functions/parsename-transact-sql?view=sql-server-ver16 329def _build_parsename(args: t.List) -> exp.SplitPart | exp.Anonymous: 330 # PARSENAME(...) will be stored into exp.SplitPart if: 331 # - All args are literals 332 # - The part index (2nd arg) is <= 4 (max valid value, otherwise TSQL returns NULL) 333 if len(args) == 2 and all(isinstance(arg, exp.Literal) for arg in args): 334 this = args[0] 335 part_index = args[1] 336 split_count = len(this.name.split(".")) 337 if split_count <= 4: 338 return exp.SplitPart( 339 this=this, 340 delimiter=exp.Literal.string("."), 341 part_index=exp.Literal.number(split_count + 1 - part_index.to_py()), 342 ) 343 344 return exp.Anonymous(this="PARSENAME", expressions=args) 345 346 347def _build_json_query(args: t.List, dialect: Dialect) -> exp.JSONExtract: 348 if len(args) == 1: 349 # The default value for path is '$'. As a result, if you don't provide a 350 # value for path, JSON_QUERY returns the input expression. 351 args.append(exp.Literal.string("$")) 352 353 return parser.build_extract_json_with_path(exp.JSONExtract)(args, dialect) 354 355 356def _json_extract_sql( 357 self: TSQL.Generator, expression: exp.JSONExtract | exp.JSONExtractScalar 358) -> str: 359 json_query = self.func("JSON_QUERY", expression.this, expression.expression) 360 json_value = self.func("JSON_VALUE", expression.this, expression.expression) 361 return self.func("ISNULL", json_query, json_value) 362 363 364def _timestrtotime_sql(self: TSQL.Generator, expression: exp.TimeStrToTime): 365 sql = timestrtotime_sql(self, expression) 366 if expression.args.get("zone"): 367 # If there is a timezone, produce an expression like: 368 # CAST('2020-01-01 12:13:14-08:00' AS DATETIMEOFFSET) AT TIME ZONE 'UTC' 369 # If you dont have AT TIME ZONE 'UTC', wrapping that expression in another cast back to DATETIME2 just drops the timezone information 370 return self.sql(exp.AtTimeZone(this=sql, zone=exp.Literal.string("UTC"))) 371 return sql 372 373 374def _build_datetrunc(args: t.List) -> exp.TimestampTrunc: 375 unit = seq_get(args, 0) 376 this = seq_get(args, 1) 377 378 if this and this.is_string: 379 this = exp.cast(this, exp.DataType.Type.DATETIME2) 380 381 return exp.TimestampTrunc(this=this, unit=unit) 382 383 384class TSQL(Dialect): 385 SUPPORTS_SEMI_ANTI_JOIN = False 386 LOG_BASE_FIRST = False 387 TYPED_DIVISION = True 388 CONCAT_COALESCE = True 389 NORMALIZATION_STRATEGY = NormalizationStrategy.CASE_INSENSITIVE 390 391 TIME_FORMAT = "'yyyy-mm-dd hh:mm:ss'" 392 393 TIME_MAPPING = { 394 "year": "%Y", 395 "dayofyear": "%j", 396 "day": "%d", 397 "dy": "%d", 398 "y": "%Y", 399 "week": "%W", 400 "ww": "%W", 401 "wk": "%W", 402 "hour": "%h", 403 "hh": "%I", 404 "minute": "%M", 405 "mi": "%M", 406 "n": "%M", 407 "second": "%S", 408 "ss": "%S", 409 "s": "%-S", 410 "millisecond": "%f", 411 "ms": "%f", 412 "weekday": "%w", 413 "dw": "%w", 414 "month": "%m", 415 "mm": "%M", 416 "m": "%-M", 417 "Y": "%Y", 418 "YYYY": "%Y", 419 "YY": "%y", 420 "MMMM": "%B", 421 "MMM": "%b", 422 "MM": "%m", 423 "M": "%-m", 424 "dddd": "%A", 425 "dd": "%d", 426 "d": "%-d", 427 "HH": "%H", 428 "H": "%-H", 429 "h": "%-I", 430 "ffffff": "%f", 431 "yyyy": "%Y", 432 "yy": "%y", 433 } 434 435 CONVERT_FORMAT_MAPPING = { 436 "0": "%b %d %Y %-I:%M%p", 437 "1": "%m/%d/%y", 438 "2": "%y.%m.%d", 439 "3": "%d/%m/%y", 440 "4": "%d.%m.%y", 441 "5": "%d-%m-%y", 442 "6": "%d %b %y", 443 "7": "%b %d, %y", 444 "8": "%H:%M:%S", 445 "9": "%b %d %Y %-I:%M:%S:%f%p", 446 "10": "mm-dd-yy", 447 "11": "yy/mm/dd", 448 "12": "yymmdd", 449 "13": "%d %b %Y %H:%M:ss:%f", 450 "14": "%H:%M:%S:%f", 451 "20": "%Y-%m-%d %H:%M:%S", 452 "21": "%Y-%m-%d %H:%M:%S.%f", 453 "22": "%m/%d/%y %-I:%M:%S %p", 454 "23": "%Y-%m-%d", 455 "24": "%H:%M:%S", 456 "25": "%Y-%m-%d %H:%M:%S.%f", 457 "100": "%b %d %Y %-I:%M%p", 458 "101": "%m/%d/%Y", 459 "102": "%Y.%m.%d", 460 "103": "%d/%m/%Y", 461 "104": "%d.%m.%Y", 462 "105": "%d-%m-%Y", 463 "106": "%d %b %Y", 464 "107": "%b %d, %Y", 465 "108": "%H:%M:%S", 466 "109": "%b %d %Y %-I:%M:%S:%f%p", 467 "110": "%m-%d-%Y", 468 "111": "%Y/%m/%d", 469 "112": "%Y%m%d", 470 "113": "%d %b %Y %H:%M:%S:%f", 471 "114": "%H:%M:%S:%f", 472 "120": "%Y-%m-%d %H:%M:%S", 473 "121": "%Y-%m-%d %H:%M:%S.%f", 474 } 475 476 FORMAT_TIME_MAPPING = { 477 "y": "%B %Y", 478 "d": "%m/%d/%Y", 479 "H": "%-H", 480 "h": "%-I", 481 "s": "%Y-%m-%d %H:%M:%S", 482 "D": "%A,%B,%Y", 483 "f": "%A,%B,%Y %-I:%M %p", 484 "F": "%A,%B,%Y %-I:%M:%S %p", 485 "g": "%m/%d/%Y %-I:%M %p", 486 "G": "%m/%d/%Y %-I:%M:%S %p", 487 "M": "%B %-d", 488 "m": "%B %-d", 489 "O": "%Y-%m-%dT%H:%M:%S", 490 "u": "%Y-%M-%D %H:%M:%S%z", 491 "U": "%A, %B %D, %Y %H:%M:%S%z", 492 "T": "%-I:%M:%S %p", 493 "t": "%-I:%M", 494 "Y": "%a %Y", 495 } 496 497 class Tokenizer(tokens.Tokenizer): 498 IDENTIFIERS = [("[", "]"), '"'] 499 QUOTES = ["'", '"'] 500 HEX_STRINGS = [("0x", ""), ("0X", "")] 501 VAR_SINGLE_TOKENS = {"@", "$", "#"} 502 503 KEYWORDS = { 504 **tokens.Tokenizer.KEYWORDS, 505 "CLUSTERED INDEX": TokenType.INDEX, 506 "DATETIME2": TokenType.DATETIME2, 507 "DATETIMEOFFSET": TokenType.TIMESTAMPTZ, 508 "DECLARE": TokenType.DECLARE, 509 "EXEC": TokenType.COMMAND, 510 "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT, 511 "IMAGE": TokenType.IMAGE, 512 "MONEY": TokenType.MONEY, 513 "NONCLUSTERED INDEX": TokenType.INDEX, 514 "NTEXT": TokenType.TEXT, 515 "OPTION": TokenType.OPTION, 516 "OUTPUT": TokenType.RETURNING, 517 "PRINT": TokenType.COMMAND, 518 "PROC": TokenType.PROCEDURE, 519 "REAL": TokenType.FLOAT, 520 "ROWVERSION": TokenType.ROWVERSION, 521 "SMALLDATETIME": TokenType.SMALLDATETIME, 522 "SMALLMONEY": TokenType.SMALLMONEY, 523 "SQL_VARIANT": TokenType.VARIANT, 524 "SYSTEM_USER": TokenType.CURRENT_USER, 525 "TOP": TokenType.TOP, 526 "TIMESTAMP": TokenType.ROWVERSION, 527 "TINYINT": TokenType.UTINYINT, 528 "UNIQUEIDENTIFIER": TokenType.UUID, 529 "UPDATE STATISTICS": TokenType.COMMAND, 530 "XML": TokenType.XML, 531 } 532 KEYWORDS.pop("/*+") 533 534 COMMANDS = {*tokens.Tokenizer.COMMANDS, TokenType.END} 535 536 class Parser(parser.Parser): 537 SET_REQUIRES_ASSIGNMENT_DELIMITER = False 538 LOG_DEFAULTS_TO_LN = True 539 ALTER_TABLE_ADD_REQUIRED_FOR_EACH_COLUMN = False 540 STRING_ALIASES = True 541 NO_PAREN_IF_COMMANDS = False 542 543 QUERY_MODIFIER_PARSERS = { 544 **parser.Parser.QUERY_MODIFIER_PARSERS, 545 TokenType.OPTION: lambda self: ("options", self._parse_options()), 546 } 547 548 # T-SQL does not allow BEGIN to be used as an identifier 549 ID_VAR_TOKENS = parser.Parser.ID_VAR_TOKENS - {TokenType.BEGIN} 550 ALIAS_TOKENS = parser.Parser.ALIAS_TOKENS - {TokenType.BEGIN} 551 TABLE_ALIAS_TOKENS = parser.Parser.TABLE_ALIAS_TOKENS - {TokenType.BEGIN} 552 COMMENT_TABLE_ALIAS_TOKENS = parser.Parser.COMMENT_TABLE_ALIAS_TOKENS - {TokenType.BEGIN} 553 UPDATE_ALIAS_TOKENS = parser.Parser.UPDATE_ALIAS_TOKENS - {TokenType.BEGIN} 554 555 FUNCTIONS = { 556 **parser.Parser.FUNCTIONS, 557 "CHARINDEX": lambda args: exp.StrPosition( 558 this=seq_get(args, 1), 559 substr=seq_get(args, 0), 560 position=seq_get(args, 2), 561 ), 562 "COUNT": lambda args: exp.Count( 563 this=seq_get(args, 0), expressions=args[1:], big_int=False 564 ), 565 "COUNT_BIG": lambda args: exp.Count( 566 this=seq_get(args, 0), expressions=args[1:], big_int=True 567 ), 568 "DATEADD": build_date_delta(exp.DateAdd, unit_mapping=DATE_DELTA_INTERVAL), 569 "DATEDIFF": _build_date_delta(exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL), 570 "DATENAME": _build_formatted_time(exp.TimeToStr, full_format_mapping=True), 571 "DATEPART": _build_formatted_time(exp.TimeToStr), 572 "DATETIMEFROMPARTS": _build_datetimefromparts, 573 "EOMONTH": _build_eomonth, 574 "FORMAT": _build_format, 575 "GETDATE": exp.CurrentTimestamp.from_arg_list, 576 "HASHBYTES": _build_hashbytes, 577 "ISNULL": build_coalesce, 578 "JSON_QUERY": _build_json_query, 579 "JSON_VALUE": parser.build_extract_json_with_path(exp.JSONExtractScalar), 580 "LEN": _build_with_arg_as_text(exp.Length), 581 "LEFT": _build_with_arg_as_text(exp.Left), 582 "NEWID": exp.Uuid.from_arg_list, 583 "RIGHT": _build_with_arg_as_text(exp.Right), 584 "PARSENAME": _build_parsename, 585 "REPLICATE": exp.Repeat.from_arg_list, 586 "SCHEMA_NAME": exp.CurrentSchema.from_arg_list, 587 "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)), 588 "SYSDATETIME": exp.CurrentTimestamp.from_arg_list, 589 "SUSER_NAME": exp.CurrentUser.from_arg_list, 590 "SUSER_SNAME": exp.CurrentUser.from_arg_list, 591 "SYSTEM_USER": exp.CurrentUser.from_arg_list, 592 "TIMEFROMPARTS": _build_timefromparts, 593 "DATETRUNC": _build_datetrunc, 594 } 595 596 JOIN_HINTS = {"LOOP", "HASH", "MERGE", "REMOTE"} 597 598 PROCEDURE_OPTIONS = dict.fromkeys( 599 ("ENCRYPTION", "RECOMPILE", "SCHEMABINDING", "NATIVE_COMPILATION", "EXECUTE"), tuple() 600 ) 601 602 COLUMN_DEFINITION_MODES = {"OUT", "OUTPUT", "READ_ONLY"} 603 604 RETURNS_TABLE_TOKENS = parser.Parser.ID_VAR_TOKENS - { 605 TokenType.TABLE, 606 *parser.Parser.TYPE_TOKENS, 607 } 608 609 STATEMENT_PARSERS = { 610 **parser.Parser.STATEMENT_PARSERS, 611 TokenType.DECLARE: lambda self: self._parse_declare(), 612 } 613 614 RANGE_PARSERS = { 615 **parser.Parser.RANGE_PARSERS, 616 TokenType.DCOLON: lambda self, this: self.expression( 617 exp.ScopeResolution, 618 this=this, 619 expression=self._parse_function() or self._parse_var(any_token=True), 620 ), 621 } 622 623 NO_PAREN_FUNCTION_PARSERS = { 624 **parser.Parser.NO_PAREN_FUNCTION_PARSERS, 625 "NEXT": lambda self: self._parse_next_value_for(), 626 } 627 628 # The DCOLON (::) operator serves as a scope resolution (exp.ScopeResolution) operator in T-SQL 629 COLUMN_OPERATORS = { 630 **parser.Parser.COLUMN_OPERATORS, 631 TokenType.DCOLON: lambda self, this, to: self.expression(exp.Cast, this=this, to=to) 632 if isinstance(to, exp.DataType) and to.this != exp.DataType.Type.USERDEFINED 633 else self.expression(exp.ScopeResolution, this=this, expression=to), 634 } 635 636 def _parse_dcolon(self) -> t.Optional[exp.Expression]: 637 # We want to use _parse_types() if the first token after :: is a known type, 638 # otherwise we could parse something like x::varchar(max) into a function 639 if self._match_set(self.TYPE_TOKENS, advance=False): 640 return self._parse_types() 641 642 return self._parse_function() or self._parse_types() 643 644 def _parse_options(self) -> t.Optional[t.List[exp.Expression]]: 645 if not self._match(TokenType.OPTION): 646 return None 647 648 def _parse_option() -> t.Optional[exp.Expression]: 649 option = self._parse_var_from_options(OPTIONS) 650 if not option: 651 return None 652 653 self._match(TokenType.EQ) 654 return self.expression( 655 exp.QueryOption, this=option, expression=self._parse_primary_or_var() 656 ) 657 658 return self._parse_wrapped_csv(_parse_option) 659 660 def _parse_projections(self) -> t.List[exp.Expression]: 661 """ 662 T-SQL supports the syntax alias = expression in the SELECT's projection list, 663 so we transform all parsed Selects to convert their EQ projections into Aliases. 664 665 See: https://learn.microsoft.com/en-us/sql/t-sql/queries/select-clause-transact-sql?view=sql-server-ver16#syntax 666 """ 667 return [ 668 ( 669 exp.alias_(projection.expression, projection.this.this, copy=False) 670 if isinstance(projection, exp.EQ) and isinstance(projection.this, exp.Column) 671 else projection 672 ) 673 for projection in super()._parse_projections() 674 ] 675 676 def _parse_commit_or_rollback(self) -> exp.Commit | exp.Rollback: 677 """Applies to SQL Server and Azure SQL Database 678 COMMIT [ { TRAN | TRANSACTION } 679 [ transaction_name | @tran_name_variable ] ] 680 [ WITH ( DELAYED_DURABILITY = { OFF | ON } ) ] 681 682 ROLLBACK { TRAN | TRANSACTION } 683 [ transaction_name | @tran_name_variable 684 | savepoint_name | @savepoint_variable ] 685 """ 686 rollback = self._prev.token_type == TokenType.ROLLBACK 687 688 self._match_texts(("TRAN", "TRANSACTION")) 689 this = self._parse_id_var() 690 691 if rollback: 692 return self.expression(exp.Rollback, this=this) 693 694 durability = None 695 if self._match_pair(TokenType.WITH, TokenType.L_PAREN): 696 self._match_text_seq("DELAYED_DURABILITY") 697 self._match(TokenType.EQ) 698 699 if self._match_text_seq("OFF"): 700 durability = False 701 else: 702 self._match(TokenType.ON) 703 durability = True 704 705 self._match_r_paren() 706 707 return self.expression(exp.Commit, this=this, durability=durability) 708 709 def _parse_transaction(self) -> exp.Transaction | exp.Command: 710 """Applies to SQL Server and Azure SQL Database 711 BEGIN { TRAN | TRANSACTION } 712 [ { transaction_name | @tran_name_variable } 713 [ WITH MARK [ 'description' ] ] 714 ] 715 """ 716 if self._match_texts(("TRAN", "TRANSACTION")): 717 transaction = self.expression(exp.Transaction, this=self._parse_id_var()) 718 if self._match_text_seq("WITH", "MARK"): 719 transaction.set("mark", self._parse_string()) 720 721 return transaction 722 723 return self._parse_as_command(self._prev) 724 725 def _parse_returns(self) -> exp.ReturnsProperty: 726 table = self._parse_id_var(any_token=False, tokens=self.RETURNS_TABLE_TOKENS) 727 returns = super()._parse_returns() 728 returns.set("table", table) 729 return returns 730 731 def _parse_convert( 732 self, strict: bool, safe: t.Optional[bool] = None 733 ) -> t.Optional[exp.Expression]: 734 this = self._parse_types() 735 self._match(TokenType.COMMA) 736 args = [this, *self._parse_csv(self._parse_assignment)] 737 convert = exp.Convert.from_arg_list(args) 738 convert.set("safe", safe) 739 convert.set("strict", strict) 740 return convert 741 742 def _parse_column_def( 743 self, this: t.Optional[exp.Expression], computed_column: bool = True 744 ) -> t.Optional[exp.Expression]: 745 this = super()._parse_column_def(this=this, computed_column=computed_column) 746 if not this: 747 return None 748 if self._match(TokenType.EQ): 749 this.set("default", self._parse_disjunction()) 750 if self._match_texts(self.COLUMN_DEFINITION_MODES): 751 this.set("output", self._prev.text) 752 return this 753 754 def _parse_user_defined_function( 755 self, kind: t.Optional[TokenType] = None 756 ) -> t.Optional[exp.Expression]: 757 this = super()._parse_user_defined_function(kind=kind) 758 759 if ( 760 kind == TokenType.FUNCTION 761 or isinstance(this, exp.UserDefinedFunction) 762 or self._match(TokenType.ALIAS, advance=False) 763 ): 764 return this 765 766 if not self._match(TokenType.WITH, advance=False): 767 expressions = self._parse_csv(self._parse_function_parameter) 768 else: 769 expressions = None 770 771 return self.expression(exp.UserDefinedFunction, this=this, expressions=expressions) 772 773 def _parse_id_var( 774 self, 775 any_token: bool = True, 776 tokens: t.Optional[t.Collection[TokenType]] = None, 777 ) -> t.Optional[exp.Expression]: 778 is_temporary = self._match(TokenType.HASH) 779 is_global = is_temporary and self._match(TokenType.HASH) 780 781 this = super()._parse_id_var(any_token=any_token, tokens=tokens) 782 if this: 783 if is_global: 784 this.set("global", True) 785 elif is_temporary: 786 this.set("temporary", True) 787 788 return this 789 790 def _parse_create(self) -> exp.Create | exp.Command: 791 create = super()._parse_create() 792 793 if isinstance(create, exp.Create): 794 table = create.this.this if isinstance(create.this, exp.Schema) else create.this 795 if isinstance(table, exp.Table) and table.this and table.this.args.get("temporary"): 796 if not create.args.get("properties"): 797 create.set("properties", exp.Properties(expressions=[])) 798 799 create.args["properties"].append("expressions", exp.TemporaryProperty()) 800 801 return create 802 803 def _parse_if(self) -> t.Optional[exp.Expression]: 804 index = self._index 805 806 if self._match_text_seq("OBJECT_ID"): 807 self._parse_wrapped_csv(self._parse_string) 808 if self._match_text_seq("IS", "NOT", "NULL") and self._match(TokenType.DROP): 809 return self._parse_drop(exists=True) 810 self._retreat(index) 811 812 return super()._parse_if() 813 814 def _parse_unique(self) -> exp.UniqueColumnConstraint: 815 if self._match_texts(("CLUSTERED", "NONCLUSTERED")): 816 this = self.CONSTRAINT_PARSERS[self._prev.text.upper()](self) 817 else: 818 this = self._parse_schema(self._parse_id_var(any_token=False)) 819 820 return self.expression(exp.UniqueColumnConstraint, this=this) 821 822 def _parse_partition(self) -> t.Optional[exp.Partition]: 823 if not self._match_text_seq("WITH", "(", "PARTITIONS"): 824 return None 825 826 def parse_range(): 827 low = self._parse_bitwise() 828 high = self._parse_bitwise() if self._match_text_seq("TO") else None 829 830 return ( 831 self.expression(exp.PartitionRange, this=low, expression=high) if high else low 832 ) 833 834 partition = self.expression( 835 exp.Partition, expressions=self._parse_wrapped_csv(parse_range) 836 ) 837 838 self._match_r_paren() 839 840 return partition 841 842 def _parse_declare(self) -> exp.Declare | exp.Command: 843 index = self._index 844 expressions = self._try_parse(partial(self._parse_csv, self._parse_declareitem)) 845 846 if not expressions or self._curr: 847 self._retreat(index) 848 return self._parse_as_command(self._prev) 849 850 return self.expression(exp.Declare, expressions=expressions) 851 852 def _parse_declareitem(self) -> t.Optional[exp.DeclareItem]: 853 var = self._parse_id_var() 854 if not var: 855 return None 856 857 value = None 858 self._match(TokenType.ALIAS) 859 if self._match(TokenType.TABLE): 860 data_type = self._parse_schema() 861 else: 862 data_type = self._parse_types() 863 if self._match(TokenType.EQ): 864 value = self._parse_bitwise() 865 866 return self.expression(exp.DeclareItem, this=var, kind=data_type, default=value) 867 868 class Generator(generator.Generator): 869 LIMIT_IS_TOP = True 870 QUERY_HINTS = False 871 RETURNING_END = False 872 NVL2_SUPPORTED = False 873 ALTER_TABLE_INCLUDE_COLUMN_KEYWORD = False 874 LIMIT_FETCH = "FETCH" 875 COMPUTED_COLUMN_WITH_TYPE = False 876 CTE_RECURSIVE_KEYWORD_REQUIRED = False 877 ENSURE_BOOLS = True 878 NULL_ORDERING_SUPPORTED = None 879 SUPPORTS_SINGLE_ARG_CONCAT = False 880 TABLESAMPLE_SEED_KEYWORD = "REPEATABLE" 881 SUPPORTS_SELECT_INTO = True 882 JSON_PATH_BRACKETED_KEY_SUPPORTED = False 883 SUPPORTS_TO_NUMBER = False 884 SET_OP_MODIFIERS = False 885 COPY_PARAMS_EQ_REQUIRED = True 886 PARSE_JSON_NAME = None 887 EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE = False 888 889 EXPRESSIONS_WITHOUT_NESTED_CTES = { 890 exp.Create, 891 exp.Delete, 892 exp.Insert, 893 exp.Intersect, 894 exp.Except, 895 exp.Merge, 896 exp.Select, 897 exp.Subquery, 898 exp.Union, 899 exp.Update, 900 } 901 902 SUPPORTED_JSON_PATH_PARTS = { 903 exp.JSONPathKey, 904 exp.JSONPathRoot, 905 exp.JSONPathSubscript, 906 } 907 908 TYPE_MAPPING = { 909 **generator.Generator.TYPE_MAPPING, 910 exp.DataType.Type.BOOLEAN: "BIT", 911 exp.DataType.Type.DATETIME2: "DATETIME2", 912 exp.DataType.Type.DECIMAL: "NUMERIC", 913 exp.DataType.Type.DOUBLE: "FLOAT", 914 exp.DataType.Type.INT: "INTEGER", 915 exp.DataType.Type.ROWVERSION: "ROWVERSION", 916 exp.DataType.Type.TEXT: "VARCHAR(MAX)", 917 exp.DataType.Type.TIMESTAMP: "DATETIME2", 918 exp.DataType.Type.TIMESTAMPNTZ: "DATETIME2", 919 exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET", 920 exp.DataType.Type.SMALLDATETIME: "SMALLDATETIME", 921 exp.DataType.Type.UTINYINT: "TINYINT", 922 exp.DataType.Type.VARIANT: "SQL_VARIANT", 923 exp.DataType.Type.UUID: "UNIQUEIDENTIFIER", 924 } 925 926 TYPE_MAPPING.pop(exp.DataType.Type.NCHAR) 927 TYPE_MAPPING.pop(exp.DataType.Type.NVARCHAR) 928 929 TRANSFORMS = { 930 **generator.Generator.TRANSFORMS, 931 exp.AnyValue: any_value_to_max_sql, 932 exp.ArrayToString: rename_func("STRING_AGG"), 933 exp.AutoIncrementColumnConstraint: lambda *_: "IDENTITY", 934 exp.Chr: rename_func("CHAR"), 935 exp.DateAdd: date_delta_sql("DATEADD"), 936 exp.DateDiff: date_delta_sql("DATEDIFF"), 937 exp.CTE: transforms.preprocess([qualify_derived_table_outputs]), 938 exp.CurrentDate: rename_func("GETDATE"), 939 exp.CurrentTimestamp: rename_func("GETDATE"), 940 exp.DateStrToDate: datestrtodate_sql, 941 exp.Extract: rename_func("DATEPART"), 942 exp.GeneratedAsIdentityColumnConstraint: generatedasidentitycolumnconstraint_sql, 943 exp.GroupConcat: _string_agg_sql, 944 exp.If: rename_func("IIF"), 945 exp.JSONExtract: _json_extract_sql, 946 exp.JSONExtractScalar: _json_extract_sql, 947 exp.LastDay: lambda self, e: self.func("EOMONTH", e.this), 948 exp.Ln: rename_func("LOG"), 949 exp.Max: max_or_greatest, 950 exp.MD5: lambda self, e: self.func("HASHBYTES", exp.Literal.string("MD5"), e.this), 951 exp.Min: min_or_least, 952 exp.NumberToStr: _format_sql, 953 exp.Repeat: rename_func("REPLICATE"), 954 exp.CurrentSchema: rename_func("SCHEMA_NAME"), 955 exp.Select: transforms.preprocess( 956 [ 957 transforms.eliminate_distinct_on, 958 transforms.eliminate_semi_and_anti_joins, 959 transforms.eliminate_qualify, 960 transforms.unnest_generate_date_array_using_recursive_cte, 961 ] 962 ), 963 exp.Stddev: rename_func("STDEV"), 964 exp.StrPosition: lambda self, e: strposition_sql( 965 self, e, func_name="CHARINDEX", supports_position=True 966 ), 967 exp.Subquery: transforms.preprocess([qualify_derived_table_outputs]), 968 exp.SHA: lambda self, e: self.func("HASHBYTES", exp.Literal.string("SHA1"), e.this), 969 exp.SHA2: lambda self, e: self.func( 970 "HASHBYTES", exp.Literal.string(f"SHA2_{e.args.get('length', 256)}"), e.this 971 ), 972 exp.TemporaryProperty: lambda self, e: "", 973 exp.TimeStrToTime: _timestrtotime_sql, 974 exp.TimeToStr: _format_sql, 975 exp.Trim: trim_sql, 976 exp.TsOrDsAdd: date_delta_sql("DATEADD", cast=True), 977 exp.TsOrDsDiff: date_delta_sql("DATEDIFF"), 978 exp.TimestampTrunc: lambda self, e: self.func("DATETRUNC", e.unit, e.this), 979 exp.Uuid: lambda *_: "NEWID()", 980 exp.DateFromParts: rename_func("DATEFROMPARTS"), 981 } 982 983 TRANSFORMS.pop(exp.ReturnsProperty) 984 985 PROPERTIES_LOCATION = { 986 **generator.Generator.PROPERTIES_LOCATION, 987 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 988 } 989 990 def scope_resolution(self, rhs: str, scope_name: str) -> str: 991 return f"{scope_name}::{rhs}" 992 993 def select_sql(self, expression: exp.Select) -> str: 994 limit = expression.args.get("limit") 995 offset = expression.args.get("offset") 996 997 if isinstance(limit, exp.Fetch) and not offset: 998 # Dialects like Oracle can FETCH directly from a row set but 999 # T-SQL requires an ORDER BY + OFFSET clause in order to FETCH 1000 offset = exp.Offset(expression=exp.Literal.number(0)) 1001 expression.set("offset", offset) 1002 1003 if offset: 1004 if not expression.args.get("order"): 1005 # ORDER BY is required in order to use OFFSET in a query, so we use 1006 # a noop order by, since we don't really care about the order. 1007 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 1008 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 1009 1010 if isinstance(limit, exp.Limit): 1011 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 1012 # we replace here because otherwise TOP would be generated in select_sql 1013 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 1014 1015 return super().select_sql(expression) 1016 1017 def convert_sql(self, expression: exp.Convert) -> str: 1018 name = "TRY_CONVERT" if expression.args.get("safe") else "CONVERT" 1019 return self.func( 1020 name, expression.this, expression.expression, expression.args.get("style") 1021 ) 1022 1023 def queryoption_sql(self, expression: exp.QueryOption) -> str: 1024 option = self.sql(expression, "this") 1025 value = self.sql(expression, "expression") 1026 if value: 1027 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 1028 return f"{option} {optional_equal_sign}{value}" 1029 return option 1030 1031 def lateral_op(self, expression: exp.Lateral) -> str: 1032 cross_apply = expression.args.get("cross_apply") 1033 if cross_apply is True: 1034 return "CROSS APPLY" 1035 if cross_apply is False: 1036 return "OUTER APPLY" 1037 1038 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 1039 self.unsupported("LATERAL clause is not supported.") 1040 return "LATERAL" 1041 1042 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 1043 this = expression.this 1044 split_count = len(this.name.split(".")) 1045 delimiter = expression.args.get("delimiter") 1046 part_index = expression.args.get("part_index") 1047 1048 if ( 1049 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 1050 or (delimiter and delimiter.name != ".") 1051 or not part_index 1052 or split_count > 4 1053 ): 1054 self.unsupported( 1055 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 1056 ) 1057 return "" 1058 1059 return self.func( 1060 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 1061 ) 1062 1063 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1064 nano = expression.args.get("nano") 1065 if nano is not None: 1066 nano.pop() 1067 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1068 1069 if expression.args.get("fractions") is None: 1070 expression.set("fractions", exp.Literal.number(0)) 1071 if expression.args.get("precision") is None: 1072 expression.set("precision", exp.Literal.number(0)) 1073 1074 return rename_func("TIMEFROMPARTS")(self, expression) 1075 1076 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1077 zone = expression.args.get("zone") 1078 if zone is not None: 1079 zone.pop() 1080 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1081 1082 nano = expression.args.get("nano") 1083 if nano is not None: 1084 nano.pop() 1085 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1086 1087 if expression.args.get("milli") is None: 1088 expression.set("milli", exp.Literal.number(0)) 1089 1090 return rename_func("DATETIMEFROMPARTS")(self, expression) 1091 1092 def setitem_sql(self, expression: exp.SetItem) -> str: 1093 this = expression.this 1094 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1095 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1096 return f"{self.sql(this.left)} {self.sql(this.right)}" 1097 1098 return super().setitem_sql(expression) 1099 1100 def boolean_sql(self, expression: exp.Boolean) -> str: 1101 if type(expression.parent) in BIT_TYPES or isinstance( 1102 expression.find_ancestor(exp.Values, exp.Select), exp.Values 1103 ): 1104 return "1" if expression.this else "0" 1105 1106 return "(1 = 1)" if expression.this else "(1 = 0)" 1107 1108 def is_sql(self, expression: exp.Is) -> str: 1109 if isinstance(expression.expression, exp.Boolean): 1110 return self.binary(expression, "=") 1111 return self.binary(expression, "IS") 1112 1113 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1114 sql = self.sql(expression, "this") 1115 properties = expression.args.get("properties") 1116 1117 if sql[:1] != "#" and any( 1118 isinstance(prop, exp.TemporaryProperty) 1119 for prop in (properties.expressions if properties else []) 1120 ): 1121 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1122 1123 return sql 1124 1125 def create_sql(self, expression: exp.Create) -> str: 1126 kind = expression.kind 1127 exists = expression.args.pop("exists", None) 1128 1129 like_property = expression.find(exp.LikeProperty) 1130 if like_property: 1131 ctas_expression = like_property.this 1132 else: 1133 ctas_expression = expression.expression 1134 1135 if kind == "VIEW": 1136 expression.this.set("catalog", None) 1137 with_ = expression.args.get("with") 1138 if ctas_expression and with_: 1139 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1140 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1141 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1142 ctas_expression.set("with", with_.pop()) 1143 1144 sql = super().create_sql(expression) 1145 1146 table = expression.find(exp.Table) 1147 1148 # Convert CTAS statement to SELECT .. INTO .. 1149 if kind == "TABLE" and ctas_expression: 1150 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1151 ctas_expression = ctas_expression.subquery() 1152 1153 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1154 select_into.set("into", exp.Into(this=table)) 1155 1156 if like_property: 1157 select_into.limit(0, copy=False) 1158 1159 sql = self.sql(select_into) 1160 1161 if exists: 1162 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1163 sql_with_ctes = self.prepend_ctes(expression, sql) 1164 sql_literal = self.sql(exp.Literal.string(sql_with_ctes)) 1165 if kind == "SCHEMA": 1166 return f"""IF NOT EXISTS (SELECT * FROM information_schema.schemata WHERE schema_name = {identifier}) EXEC({sql_literal})""" 1167 elif kind == "TABLE": 1168 assert table 1169 where = exp.and_( 1170 exp.column("table_name").eq(table.name), 1171 exp.column("table_schema").eq(table.db) if table.db else None, 1172 exp.column("table_catalog").eq(table.catalog) if table.catalog else None, 1173 ) 1174 return f"""IF NOT EXISTS (SELECT * FROM information_schema.tables WHERE {where}) EXEC({sql_literal})""" 1175 elif kind == "INDEX": 1176 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1177 return f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql_literal})""" 1178 elif expression.args.get("replace"): 1179 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1180 1181 return self.prepend_ctes(expression, sql) 1182 1183 def count_sql(self, expression: exp.Count) -> str: 1184 func_name = "COUNT_BIG" if expression.args.get("big_int") else "COUNT" 1185 return rename_func(func_name)(self, expression) 1186 1187 def offset_sql(self, expression: exp.Offset) -> str: 1188 return f"{super().offset_sql(expression)} ROWS" 1189 1190 def version_sql(self, expression: exp.Version) -> str: 1191 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1192 this = f"FOR {name}" 1193 expr = expression.expression 1194 kind = expression.text("kind") 1195 if kind in ("FROM", "BETWEEN"): 1196 args = expr.expressions 1197 sep = "TO" if kind == "FROM" else "AND" 1198 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1199 else: 1200 expr_sql = self.sql(expr) 1201 1202 expr_sql = f" {expr_sql}" if expr_sql else "" 1203 return f"{this} {kind}{expr_sql}" 1204 1205 def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str: 1206 table = expression.args.get("table") 1207 table = f"{table} " if table else "" 1208 return f"RETURNS {table}{self.sql(expression, 'this')}" 1209 1210 def returning_sql(self, expression: exp.Returning) -> str: 1211 into = self.sql(expression, "into") 1212 into = self.seg(f"INTO {into}") if into else "" 1213 return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}" 1214 1215 def transaction_sql(self, expression: exp.Transaction) -> str: 1216 this = self.sql(expression, "this") 1217 this = f" {this}" if this else "" 1218 mark = self.sql(expression, "mark") 1219 mark = f" WITH MARK {mark}" if mark else "" 1220 return f"BEGIN TRANSACTION{this}{mark}" 1221 1222 def commit_sql(self, expression: exp.Commit) -> str: 1223 this = self.sql(expression, "this") 1224 this = f" {this}" if this else "" 1225 durability = expression.args.get("durability") 1226 durability = ( 1227 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1228 if durability is not None 1229 else "" 1230 ) 1231 return f"COMMIT TRANSACTION{this}{durability}" 1232 1233 def rollback_sql(self, expression: exp.Rollback) -> str: 1234 this = self.sql(expression, "this") 1235 this = f" {this}" if this else "" 1236 return f"ROLLBACK TRANSACTION{this}" 1237 1238 def identifier_sql(self, expression: exp.Identifier) -> str: 1239 identifier = super().identifier_sql(expression) 1240 1241 if expression.args.get("global"): 1242 identifier = f"##{identifier}" 1243 elif expression.args.get("temporary"): 1244 identifier = f"#{identifier}" 1245 1246 return identifier 1247 1248 def constraint_sql(self, expression: exp.Constraint) -> str: 1249 this = self.sql(expression, "this") 1250 expressions = self.expressions(expression, flat=True, sep=" ") 1251 return f"CONSTRAINT {this} {expressions}" 1252 1253 def length_sql(self, expression: exp.Length) -> str: 1254 return self._uncast_text(expression, "LEN") 1255 1256 def right_sql(self, expression: exp.Right) -> str: 1257 return self._uncast_text(expression, "RIGHT") 1258 1259 def left_sql(self, expression: exp.Left) -> str: 1260 return self._uncast_text(expression, "LEFT") 1261 1262 def _uncast_text(self, expression: exp.Expression, name: str) -> str: 1263 this = expression.this 1264 if isinstance(this, exp.Cast) and this.is_type(exp.DataType.Type.TEXT): 1265 this_sql = self.sql(this, "this") 1266 else: 1267 this_sql = self.sql(this) 1268 expression_sql = self.sql(expression, "expression") 1269 return self.func(name, this_sql, expression_sql if expression_sql else None) 1270 1271 def partition_sql(self, expression: exp.Partition) -> str: 1272 return f"WITH (PARTITIONS({self.expressions(expression, flat=True)}))" 1273 1274 def alter_sql(self, expression: exp.Alter) -> str: 1275 action = seq_get(expression.args.get("actions") or [], 0) 1276 if isinstance(action, exp.AlterRename): 1277 return f"EXEC sp_rename '{self.sql(expression.this)}', '{action.this.name}'" 1278 return super().alter_sql(expression) 1279 1280 def drop_sql(self, expression: exp.Drop) -> str: 1281 if expression.args["kind"] == "VIEW": 1282 expression.this.set("catalog", None) 1283 return super().drop_sql(expression) 1284 1285 def options_modifier(self, expression: exp.Expression) -> str: 1286 options = self.expressions(expression, key="options") 1287 return f" OPTION{self.wrap(options)}" if options else "" 1288 1289 def dpipe_sql(self, expression: exp.DPipe) -> str: 1290 return self.sql( 1291 reduce(lambda x, y: exp.Add(this=x, expression=y), expression.flatten()) 1292 ) 1293 1294 def isascii_sql(self, expression: exp.IsAscii) -> str: 1295 return f"(PATINDEX(CONVERT(VARCHAR(MAX), 0x255b5e002d7f5d25) COLLATE Latin1_General_BIN, {self.sql(expression.this)}) = 0)" 1296 1297 def columndef_sql(self, expression: exp.ColumnDef, sep: str = " ") -> str: 1298 this = super().columndef_sql(expression, sep) 1299 default = self.sql(expression, "default") 1300 default = f" = {default}" if default else "" 1301 output = self.sql(expression, "output") 1302 output = f" {output}" if output else "" 1303 return f"{this}{default}{output}"
253def qualify_derived_table_outputs(expression: exp.Expression) -> exp.Expression: 254 """Ensures all (unnamed) output columns are aliased for CTEs and Subqueries.""" 255 alias = expression.args.get("alias") 256 257 if ( 258 isinstance(expression, (exp.CTE, exp.Subquery)) 259 and isinstance(alias, exp.TableAlias) 260 and not alias.columns 261 ): 262 from sqlglot.optimizer.qualify_columns import qualify_outputs 263 264 # We keep track of the unaliased column projection indexes instead of the expressions 265 # themselves, because the latter are going to be replaced by new nodes when the aliases 266 # are added and hence we won't be able to reach these newly added Alias parents 267 query = expression.this 268 unaliased_column_indexes = ( 269 i for i, c in enumerate(query.selects) if isinstance(c, exp.Column) and not c.alias 270 ) 271 272 qualify_outputs(query) 273 274 # Preserve the quoting information of columns for newly added Alias nodes 275 query_selects = query.selects 276 for select_index in unaliased_column_indexes: 277 alias = query_selects[select_index] 278 column = alias.this 279 if isinstance(column.this, exp.Identifier): 280 alias.args["alias"].set("quoted", column.this.quoted) 281 282 return expression
Ensures all (unnamed) output columns are aliased for CTEs and Subqueries.
385class TSQL(Dialect): 386 SUPPORTS_SEMI_ANTI_JOIN = False 387 LOG_BASE_FIRST = False 388 TYPED_DIVISION = True 389 CONCAT_COALESCE = True 390 NORMALIZATION_STRATEGY = NormalizationStrategy.CASE_INSENSITIVE 391 392 TIME_FORMAT = "'yyyy-mm-dd hh:mm:ss'" 393 394 TIME_MAPPING = { 395 "year": "%Y", 396 "dayofyear": "%j", 397 "day": "%d", 398 "dy": "%d", 399 "y": "%Y", 400 "week": "%W", 401 "ww": "%W", 402 "wk": "%W", 403 "hour": "%h", 404 "hh": "%I", 405 "minute": "%M", 406 "mi": "%M", 407 "n": "%M", 408 "second": "%S", 409 "ss": "%S", 410 "s": "%-S", 411 "millisecond": "%f", 412 "ms": "%f", 413 "weekday": "%w", 414 "dw": "%w", 415 "month": "%m", 416 "mm": "%M", 417 "m": "%-M", 418 "Y": "%Y", 419 "YYYY": "%Y", 420 "YY": "%y", 421 "MMMM": "%B", 422 "MMM": "%b", 423 "MM": "%m", 424 "M": "%-m", 425 "dddd": "%A", 426 "dd": "%d", 427 "d": "%-d", 428 "HH": "%H", 429 "H": "%-H", 430 "h": "%-I", 431 "ffffff": "%f", 432 "yyyy": "%Y", 433 "yy": "%y", 434 } 435 436 CONVERT_FORMAT_MAPPING = { 437 "0": "%b %d %Y %-I:%M%p", 438 "1": "%m/%d/%y", 439 "2": "%y.%m.%d", 440 "3": "%d/%m/%y", 441 "4": "%d.%m.%y", 442 "5": "%d-%m-%y", 443 "6": "%d %b %y", 444 "7": "%b %d, %y", 445 "8": "%H:%M:%S", 446 "9": "%b %d %Y %-I:%M:%S:%f%p", 447 "10": "mm-dd-yy", 448 "11": "yy/mm/dd", 449 "12": "yymmdd", 450 "13": "%d %b %Y %H:%M:ss:%f", 451 "14": "%H:%M:%S:%f", 452 "20": "%Y-%m-%d %H:%M:%S", 453 "21": "%Y-%m-%d %H:%M:%S.%f", 454 "22": "%m/%d/%y %-I:%M:%S %p", 455 "23": "%Y-%m-%d", 456 "24": "%H:%M:%S", 457 "25": "%Y-%m-%d %H:%M:%S.%f", 458 "100": "%b %d %Y %-I:%M%p", 459 "101": "%m/%d/%Y", 460 "102": "%Y.%m.%d", 461 "103": "%d/%m/%Y", 462 "104": "%d.%m.%Y", 463 "105": "%d-%m-%Y", 464 "106": "%d %b %Y", 465 "107": "%b %d, %Y", 466 "108": "%H:%M:%S", 467 "109": "%b %d %Y %-I:%M:%S:%f%p", 468 "110": "%m-%d-%Y", 469 "111": "%Y/%m/%d", 470 "112": "%Y%m%d", 471 "113": "%d %b %Y %H:%M:%S:%f", 472 "114": "%H:%M:%S:%f", 473 "120": "%Y-%m-%d %H:%M:%S", 474 "121": "%Y-%m-%d %H:%M:%S.%f", 475 } 476 477 FORMAT_TIME_MAPPING = { 478 "y": "%B %Y", 479 "d": "%m/%d/%Y", 480 "H": "%-H", 481 "h": "%-I", 482 "s": "%Y-%m-%d %H:%M:%S", 483 "D": "%A,%B,%Y", 484 "f": "%A,%B,%Y %-I:%M %p", 485 "F": "%A,%B,%Y %-I:%M:%S %p", 486 "g": "%m/%d/%Y %-I:%M %p", 487 "G": "%m/%d/%Y %-I:%M:%S %p", 488 "M": "%B %-d", 489 "m": "%B %-d", 490 "O": "%Y-%m-%dT%H:%M:%S", 491 "u": "%Y-%M-%D %H:%M:%S%z", 492 "U": "%A, %B %D, %Y %H:%M:%S%z", 493 "T": "%-I:%M:%S %p", 494 "t": "%-I:%M", 495 "Y": "%a %Y", 496 } 497 498 class Tokenizer(tokens.Tokenizer): 499 IDENTIFIERS = [("[", "]"), '"'] 500 QUOTES = ["'", '"'] 501 HEX_STRINGS = [("0x", ""), ("0X", "")] 502 VAR_SINGLE_TOKENS = {"@", "$", "#"} 503 504 KEYWORDS = { 505 **tokens.Tokenizer.KEYWORDS, 506 "CLUSTERED INDEX": TokenType.INDEX, 507 "DATETIME2": TokenType.DATETIME2, 508 "DATETIMEOFFSET": TokenType.TIMESTAMPTZ, 509 "DECLARE": TokenType.DECLARE, 510 "EXEC": TokenType.COMMAND, 511 "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT, 512 "IMAGE": TokenType.IMAGE, 513 "MONEY": TokenType.MONEY, 514 "NONCLUSTERED INDEX": TokenType.INDEX, 515 "NTEXT": TokenType.TEXT, 516 "OPTION": TokenType.OPTION, 517 "OUTPUT": TokenType.RETURNING, 518 "PRINT": TokenType.COMMAND, 519 "PROC": TokenType.PROCEDURE, 520 "REAL": TokenType.FLOAT, 521 "ROWVERSION": TokenType.ROWVERSION, 522 "SMALLDATETIME": TokenType.SMALLDATETIME, 523 "SMALLMONEY": TokenType.SMALLMONEY, 524 "SQL_VARIANT": TokenType.VARIANT, 525 "SYSTEM_USER": TokenType.CURRENT_USER, 526 "TOP": TokenType.TOP, 527 "TIMESTAMP": TokenType.ROWVERSION, 528 "TINYINT": TokenType.UTINYINT, 529 "UNIQUEIDENTIFIER": TokenType.UUID, 530 "UPDATE STATISTICS": TokenType.COMMAND, 531 "XML": TokenType.XML, 532 } 533 KEYWORDS.pop("/*+") 534 535 COMMANDS = {*tokens.Tokenizer.COMMANDS, TokenType.END} 536 537 class Parser(parser.Parser): 538 SET_REQUIRES_ASSIGNMENT_DELIMITER = False 539 LOG_DEFAULTS_TO_LN = True 540 ALTER_TABLE_ADD_REQUIRED_FOR_EACH_COLUMN = False 541 STRING_ALIASES = True 542 NO_PAREN_IF_COMMANDS = False 543 544 QUERY_MODIFIER_PARSERS = { 545 **parser.Parser.QUERY_MODIFIER_PARSERS, 546 TokenType.OPTION: lambda self: ("options", self._parse_options()), 547 } 548 549 # T-SQL does not allow BEGIN to be used as an identifier 550 ID_VAR_TOKENS = parser.Parser.ID_VAR_TOKENS - {TokenType.BEGIN} 551 ALIAS_TOKENS = parser.Parser.ALIAS_TOKENS - {TokenType.BEGIN} 552 TABLE_ALIAS_TOKENS = parser.Parser.TABLE_ALIAS_TOKENS - {TokenType.BEGIN} 553 COMMENT_TABLE_ALIAS_TOKENS = parser.Parser.COMMENT_TABLE_ALIAS_TOKENS - {TokenType.BEGIN} 554 UPDATE_ALIAS_TOKENS = parser.Parser.UPDATE_ALIAS_TOKENS - {TokenType.BEGIN} 555 556 FUNCTIONS = { 557 **parser.Parser.FUNCTIONS, 558 "CHARINDEX": lambda args: exp.StrPosition( 559 this=seq_get(args, 1), 560 substr=seq_get(args, 0), 561 position=seq_get(args, 2), 562 ), 563 "COUNT": lambda args: exp.Count( 564 this=seq_get(args, 0), expressions=args[1:], big_int=False 565 ), 566 "COUNT_BIG": lambda args: exp.Count( 567 this=seq_get(args, 0), expressions=args[1:], big_int=True 568 ), 569 "DATEADD": build_date_delta(exp.DateAdd, unit_mapping=DATE_DELTA_INTERVAL), 570 "DATEDIFF": _build_date_delta(exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL), 571 "DATENAME": _build_formatted_time(exp.TimeToStr, full_format_mapping=True), 572 "DATEPART": _build_formatted_time(exp.TimeToStr), 573 "DATETIMEFROMPARTS": _build_datetimefromparts, 574 "EOMONTH": _build_eomonth, 575 "FORMAT": _build_format, 576 "GETDATE": exp.CurrentTimestamp.from_arg_list, 577 "HASHBYTES": _build_hashbytes, 578 "ISNULL": build_coalesce, 579 "JSON_QUERY": _build_json_query, 580 "JSON_VALUE": parser.build_extract_json_with_path(exp.JSONExtractScalar), 581 "LEN": _build_with_arg_as_text(exp.Length), 582 "LEFT": _build_with_arg_as_text(exp.Left), 583 "NEWID": exp.Uuid.from_arg_list, 584 "RIGHT": _build_with_arg_as_text(exp.Right), 585 "PARSENAME": _build_parsename, 586 "REPLICATE": exp.Repeat.from_arg_list, 587 "SCHEMA_NAME": exp.CurrentSchema.from_arg_list, 588 "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)), 589 "SYSDATETIME": exp.CurrentTimestamp.from_arg_list, 590 "SUSER_NAME": exp.CurrentUser.from_arg_list, 591 "SUSER_SNAME": exp.CurrentUser.from_arg_list, 592 "SYSTEM_USER": exp.CurrentUser.from_arg_list, 593 "TIMEFROMPARTS": _build_timefromparts, 594 "DATETRUNC": _build_datetrunc, 595 } 596 597 JOIN_HINTS = {"LOOP", "HASH", "MERGE", "REMOTE"} 598 599 PROCEDURE_OPTIONS = dict.fromkeys( 600 ("ENCRYPTION", "RECOMPILE", "SCHEMABINDING", "NATIVE_COMPILATION", "EXECUTE"), tuple() 601 ) 602 603 COLUMN_DEFINITION_MODES = {"OUT", "OUTPUT", "READ_ONLY"} 604 605 RETURNS_TABLE_TOKENS = parser.Parser.ID_VAR_TOKENS - { 606 TokenType.TABLE, 607 *parser.Parser.TYPE_TOKENS, 608 } 609 610 STATEMENT_PARSERS = { 611 **parser.Parser.STATEMENT_PARSERS, 612 TokenType.DECLARE: lambda self: self._parse_declare(), 613 } 614 615 RANGE_PARSERS = { 616 **parser.Parser.RANGE_PARSERS, 617 TokenType.DCOLON: lambda self, this: self.expression( 618 exp.ScopeResolution, 619 this=this, 620 expression=self._parse_function() or self._parse_var(any_token=True), 621 ), 622 } 623 624 NO_PAREN_FUNCTION_PARSERS = { 625 **parser.Parser.NO_PAREN_FUNCTION_PARSERS, 626 "NEXT": lambda self: self._parse_next_value_for(), 627 } 628 629 # The DCOLON (::) operator serves as a scope resolution (exp.ScopeResolution) operator in T-SQL 630 COLUMN_OPERATORS = { 631 **parser.Parser.COLUMN_OPERATORS, 632 TokenType.DCOLON: lambda self, this, to: self.expression(exp.Cast, this=this, to=to) 633 if isinstance(to, exp.DataType) and to.this != exp.DataType.Type.USERDEFINED 634 else self.expression(exp.ScopeResolution, this=this, expression=to), 635 } 636 637 def _parse_dcolon(self) -> t.Optional[exp.Expression]: 638 # We want to use _parse_types() if the first token after :: is a known type, 639 # otherwise we could parse something like x::varchar(max) into a function 640 if self._match_set(self.TYPE_TOKENS, advance=False): 641 return self._parse_types() 642 643 return self._parse_function() or self._parse_types() 644 645 def _parse_options(self) -> t.Optional[t.List[exp.Expression]]: 646 if not self._match(TokenType.OPTION): 647 return None 648 649 def _parse_option() -> t.Optional[exp.Expression]: 650 option = self._parse_var_from_options(OPTIONS) 651 if not option: 652 return None 653 654 self._match(TokenType.EQ) 655 return self.expression( 656 exp.QueryOption, this=option, expression=self._parse_primary_or_var() 657 ) 658 659 return self._parse_wrapped_csv(_parse_option) 660 661 def _parse_projections(self) -> t.List[exp.Expression]: 662 """ 663 T-SQL supports the syntax alias = expression in the SELECT's projection list, 664 so we transform all parsed Selects to convert their EQ projections into Aliases. 665 666 See: https://learn.microsoft.com/en-us/sql/t-sql/queries/select-clause-transact-sql?view=sql-server-ver16#syntax 667 """ 668 return [ 669 ( 670 exp.alias_(projection.expression, projection.this.this, copy=False) 671 if isinstance(projection, exp.EQ) and isinstance(projection.this, exp.Column) 672 else projection 673 ) 674 for projection in super()._parse_projections() 675 ] 676 677 def _parse_commit_or_rollback(self) -> exp.Commit | exp.Rollback: 678 """Applies to SQL Server and Azure SQL Database 679 COMMIT [ { TRAN | TRANSACTION } 680 [ transaction_name | @tran_name_variable ] ] 681 [ WITH ( DELAYED_DURABILITY = { OFF | ON } ) ] 682 683 ROLLBACK { TRAN | TRANSACTION } 684 [ transaction_name | @tran_name_variable 685 | savepoint_name | @savepoint_variable ] 686 """ 687 rollback = self._prev.token_type == TokenType.ROLLBACK 688 689 self._match_texts(("TRAN", "TRANSACTION")) 690 this = self._parse_id_var() 691 692 if rollback: 693 return self.expression(exp.Rollback, this=this) 694 695 durability = None 696 if self._match_pair(TokenType.WITH, TokenType.L_PAREN): 697 self._match_text_seq("DELAYED_DURABILITY") 698 self._match(TokenType.EQ) 699 700 if self._match_text_seq("OFF"): 701 durability = False 702 else: 703 self._match(TokenType.ON) 704 durability = True 705 706 self._match_r_paren() 707 708 return self.expression(exp.Commit, this=this, durability=durability) 709 710 def _parse_transaction(self) -> exp.Transaction | exp.Command: 711 """Applies to SQL Server and Azure SQL Database 712 BEGIN { TRAN | TRANSACTION } 713 [ { transaction_name | @tran_name_variable } 714 [ WITH MARK [ 'description' ] ] 715 ] 716 """ 717 if self._match_texts(("TRAN", "TRANSACTION")): 718 transaction = self.expression(exp.Transaction, this=self._parse_id_var()) 719 if self._match_text_seq("WITH", "MARK"): 720 transaction.set("mark", self._parse_string()) 721 722 return transaction 723 724 return self._parse_as_command(self._prev) 725 726 def _parse_returns(self) -> exp.ReturnsProperty: 727 table = self._parse_id_var(any_token=False, tokens=self.RETURNS_TABLE_TOKENS) 728 returns = super()._parse_returns() 729 returns.set("table", table) 730 return returns 731 732 def _parse_convert( 733 self, strict: bool, safe: t.Optional[bool] = None 734 ) -> t.Optional[exp.Expression]: 735 this = self._parse_types() 736 self._match(TokenType.COMMA) 737 args = [this, *self._parse_csv(self._parse_assignment)] 738 convert = exp.Convert.from_arg_list(args) 739 convert.set("safe", safe) 740 convert.set("strict", strict) 741 return convert 742 743 def _parse_column_def( 744 self, this: t.Optional[exp.Expression], computed_column: bool = True 745 ) -> t.Optional[exp.Expression]: 746 this = super()._parse_column_def(this=this, computed_column=computed_column) 747 if not this: 748 return None 749 if self._match(TokenType.EQ): 750 this.set("default", self._parse_disjunction()) 751 if self._match_texts(self.COLUMN_DEFINITION_MODES): 752 this.set("output", self._prev.text) 753 return this 754 755 def _parse_user_defined_function( 756 self, kind: t.Optional[TokenType] = None 757 ) -> t.Optional[exp.Expression]: 758 this = super()._parse_user_defined_function(kind=kind) 759 760 if ( 761 kind == TokenType.FUNCTION 762 or isinstance(this, exp.UserDefinedFunction) 763 or self._match(TokenType.ALIAS, advance=False) 764 ): 765 return this 766 767 if not self._match(TokenType.WITH, advance=False): 768 expressions = self._parse_csv(self._parse_function_parameter) 769 else: 770 expressions = None 771 772 return self.expression(exp.UserDefinedFunction, this=this, expressions=expressions) 773 774 def _parse_id_var( 775 self, 776 any_token: bool = True, 777 tokens: t.Optional[t.Collection[TokenType]] = None, 778 ) -> t.Optional[exp.Expression]: 779 is_temporary = self._match(TokenType.HASH) 780 is_global = is_temporary and self._match(TokenType.HASH) 781 782 this = super()._parse_id_var(any_token=any_token, tokens=tokens) 783 if this: 784 if is_global: 785 this.set("global", True) 786 elif is_temporary: 787 this.set("temporary", True) 788 789 return this 790 791 def _parse_create(self) -> exp.Create | exp.Command: 792 create = super()._parse_create() 793 794 if isinstance(create, exp.Create): 795 table = create.this.this if isinstance(create.this, exp.Schema) else create.this 796 if isinstance(table, exp.Table) and table.this and table.this.args.get("temporary"): 797 if not create.args.get("properties"): 798 create.set("properties", exp.Properties(expressions=[])) 799 800 create.args["properties"].append("expressions", exp.TemporaryProperty()) 801 802 return create 803 804 def _parse_if(self) -> t.Optional[exp.Expression]: 805 index = self._index 806 807 if self._match_text_seq("OBJECT_ID"): 808 self._parse_wrapped_csv(self._parse_string) 809 if self._match_text_seq("IS", "NOT", "NULL") and self._match(TokenType.DROP): 810 return self._parse_drop(exists=True) 811 self._retreat(index) 812 813 return super()._parse_if() 814 815 def _parse_unique(self) -> exp.UniqueColumnConstraint: 816 if self._match_texts(("CLUSTERED", "NONCLUSTERED")): 817 this = self.CONSTRAINT_PARSERS[self._prev.text.upper()](self) 818 else: 819 this = self._parse_schema(self._parse_id_var(any_token=False)) 820 821 return self.expression(exp.UniqueColumnConstraint, this=this) 822 823 def _parse_partition(self) -> t.Optional[exp.Partition]: 824 if not self._match_text_seq("WITH", "(", "PARTITIONS"): 825 return None 826 827 def parse_range(): 828 low = self._parse_bitwise() 829 high = self._parse_bitwise() if self._match_text_seq("TO") else None 830 831 return ( 832 self.expression(exp.PartitionRange, this=low, expression=high) if high else low 833 ) 834 835 partition = self.expression( 836 exp.Partition, expressions=self._parse_wrapped_csv(parse_range) 837 ) 838 839 self._match_r_paren() 840 841 return partition 842 843 def _parse_declare(self) -> exp.Declare | exp.Command: 844 index = self._index 845 expressions = self._try_parse(partial(self._parse_csv, self._parse_declareitem)) 846 847 if not expressions or self._curr: 848 self._retreat(index) 849 return self._parse_as_command(self._prev) 850 851 return self.expression(exp.Declare, expressions=expressions) 852 853 def _parse_declareitem(self) -> t.Optional[exp.DeclareItem]: 854 var = self._parse_id_var() 855 if not var: 856 return None 857 858 value = None 859 self._match(TokenType.ALIAS) 860 if self._match(TokenType.TABLE): 861 data_type = self._parse_schema() 862 else: 863 data_type = self._parse_types() 864 if self._match(TokenType.EQ): 865 value = self._parse_bitwise() 866 867 return self.expression(exp.DeclareItem, this=var, kind=data_type, default=value) 868 869 class Generator(generator.Generator): 870 LIMIT_IS_TOP = True 871 QUERY_HINTS = False 872 RETURNING_END = False 873 NVL2_SUPPORTED = False 874 ALTER_TABLE_INCLUDE_COLUMN_KEYWORD = False 875 LIMIT_FETCH = "FETCH" 876 COMPUTED_COLUMN_WITH_TYPE = False 877 CTE_RECURSIVE_KEYWORD_REQUIRED = False 878 ENSURE_BOOLS = True 879 NULL_ORDERING_SUPPORTED = None 880 SUPPORTS_SINGLE_ARG_CONCAT = False 881 TABLESAMPLE_SEED_KEYWORD = "REPEATABLE" 882 SUPPORTS_SELECT_INTO = True 883 JSON_PATH_BRACKETED_KEY_SUPPORTED = False 884 SUPPORTS_TO_NUMBER = False 885 SET_OP_MODIFIERS = False 886 COPY_PARAMS_EQ_REQUIRED = True 887 PARSE_JSON_NAME = None 888 EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE = False 889 890 EXPRESSIONS_WITHOUT_NESTED_CTES = { 891 exp.Create, 892 exp.Delete, 893 exp.Insert, 894 exp.Intersect, 895 exp.Except, 896 exp.Merge, 897 exp.Select, 898 exp.Subquery, 899 exp.Union, 900 exp.Update, 901 } 902 903 SUPPORTED_JSON_PATH_PARTS = { 904 exp.JSONPathKey, 905 exp.JSONPathRoot, 906 exp.JSONPathSubscript, 907 } 908 909 TYPE_MAPPING = { 910 **generator.Generator.TYPE_MAPPING, 911 exp.DataType.Type.BOOLEAN: "BIT", 912 exp.DataType.Type.DATETIME2: "DATETIME2", 913 exp.DataType.Type.DECIMAL: "NUMERIC", 914 exp.DataType.Type.DOUBLE: "FLOAT", 915 exp.DataType.Type.INT: "INTEGER", 916 exp.DataType.Type.ROWVERSION: "ROWVERSION", 917 exp.DataType.Type.TEXT: "VARCHAR(MAX)", 918 exp.DataType.Type.TIMESTAMP: "DATETIME2", 919 exp.DataType.Type.TIMESTAMPNTZ: "DATETIME2", 920 exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET", 921 exp.DataType.Type.SMALLDATETIME: "SMALLDATETIME", 922 exp.DataType.Type.UTINYINT: "TINYINT", 923 exp.DataType.Type.VARIANT: "SQL_VARIANT", 924 exp.DataType.Type.UUID: "UNIQUEIDENTIFIER", 925 } 926 927 TYPE_MAPPING.pop(exp.DataType.Type.NCHAR) 928 TYPE_MAPPING.pop(exp.DataType.Type.NVARCHAR) 929 930 TRANSFORMS = { 931 **generator.Generator.TRANSFORMS, 932 exp.AnyValue: any_value_to_max_sql, 933 exp.ArrayToString: rename_func("STRING_AGG"), 934 exp.AutoIncrementColumnConstraint: lambda *_: "IDENTITY", 935 exp.Chr: rename_func("CHAR"), 936 exp.DateAdd: date_delta_sql("DATEADD"), 937 exp.DateDiff: date_delta_sql("DATEDIFF"), 938 exp.CTE: transforms.preprocess([qualify_derived_table_outputs]), 939 exp.CurrentDate: rename_func("GETDATE"), 940 exp.CurrentTimestamp: rename_func("GETDATE"), 941 exp.DateStrToDate: datestrtodate_sql, 942 exp.Extract: rename_func("DATEPART"), 943 exp.GeneratedAsIdentityColumnConstraint: generatedasidentitycolumnconstraint_sql, 944 exp.GroupConcat: _string_agg_sql, 945 exp.If: rename_func("IIF"), 946 exp.JSONExtract: _json_extract_sql, 947 exp.JSONExtractScalar: _json_extract_sql, 948 exp.LastDay: lambda self, e: self.func("EOMONTH", e.this), 949 exp.Ln: rename_func("LOG"), 950 exp.Max: max_or_greatest, 951 exp.MD5: lambda self, e: self.func("HASHBYTES", exp.Literal.string("MD5"), e.this), 952 exp.Min: min_or_least, 953 exp.NumberToStr: _format_sql, 954 exp.Repeat: rename_func("REPLICATE"), 955 exp.CurrentSchema: rename_func("SCHEMA_NAME"), 956 exp.Select: transforms.preprocess( 957 [ 958 transforms.eliminate_distinct_on, 959 transforms.eliminate_semi_and_anti_joins, 960 transforms.eliminate_qualify, 961 transforms.unnest_generate_date_array_using_recursive_cte, 962 ] 963 ), 964 exp.Stddev: rename_func("STDEV"), 965 exp.StrPosition: lambda self, e: strposition_sql( 966 self, e, func_name="CHARINDEX", supports_position=True 967 ), 968 exp.Subquery: transforms.preprocess([qualify_derived_table_outputs]), 969 exp.SHA: lambda self, e: self.func("HASHBYTES", exp.Literal.string("SHA1"), e.this), 970 exp.SHA2: lambda self, e: self.func( 971 "HASHBYTES", exp.Literal.string(f"SHA2_{e.args.get('length', 256)}"), e.this 972 ), 973 exp.TemporaryProperty: lambda self, e: "", 974 exp.TimeStrToTime: _timestrtotime_sql, 975 exp.TimeToStr: _format_sql, 976 exp.Trim: trim_sql, 977 exp.TsOrDsAdd: date_delta_sql("DATEADD", cast=True), 978 exp.TsOrDsDiff: date_delta_sql("DATEDIFF"), 979 exp.TimestampTrunc: lambda self, e: self.func("DATETRUNC", e.unit, e.this), 980 exp.Uuid: lambda *_: "NEWID()", 981 exp.DateFromParts: rename_func("DATEFROMPARTS"), 982 } 983 984 TRANSFORMS.pop(exp.ReturnsProperty) 985 986 PROPERTIES_LOCATION = { 987 **generator.Generator.PROPERTIES_LOCATION, 988 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 989 } 990 991 def scope_resolution(self, rhs: str, scope_name: str) -> str: 992 return f"{scope_name}::{rhs}" 993 994 def select_sql(self, expression: exp.Select) -> str: 995 limit = expression.args.get("limit") 996 offset = expression.args.get("offset") 997 998 if isinstance(limit, exp.Fetch) and not offset: 999 # Dialects like Oracle can FETCH directly from a row set but 1000 # T-SQL requires an ORDER BY + OFFSET clause in order to FETCH 1001 offset = exp.Offset(expression=exp.Literal.number(0)) 1002 expression.set("offset", offset) 1003 1004 if offset: 1005 if not expression.args.get("order"): 1006 # ORDER BY is required in order to use OFFSET in a query, so we use 1007 # a noop order by, since we don't really care about the order. 1008 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 1009 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 1010 1011 if isinstance(limit, exp.Limit): 1012 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 1013 # we replace here because otherwise TOP would be generated in select_sql 1014 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 1015 1016 return super().select_sql(expression) 1017 1018 def convert_sql(self, expression: exp.Convert) -> str: 1019 name = "TRY_CONVERT" if expression.args.get("safe") else "CONVERT" 1020 return self.func( 1021 name, expression.this, expression.expression, expression.args.get("style") 1022 ) 1023 1024 def queryoption_sql(self, expression: exp.QueryOption) -> str: 1025 option = self.sql(expression, "this") 1026 value = self.sql(expression, "expression") 1027 if value: 1028 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 1029 return f"{option} {optional_equal_sign}{value}" 1030 return option 1031 1032 def lateral_op(self, expression: exp.Lateral) -> str: 1033 cross_apply = expression.args.get("cross_apply") 1034 if cross_apply is True: 1035 return "CROSS APPLY" 1036 if cross_apply is False: 1037 return "OUTER APPLY" 1038 1039 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 1040 self.unsupported("LATERAL clause is not supported.") 1041 return "LATERAL" 1042 1043 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 1044 this = expression.this 1045 split_count = len(this.name.split(".")) 1046 delimiter = expression.args.get("delimiter") 1047 part_index = expression.args.get("part_index") 1048 1049 if ( 1050 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 1051 or (delimiter and delimiter.name != ".") 1052 or not part_index 1053 or split_count > 4 1054 ): 1055 self.unsupported( 1056 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 1057 ) 1058 return "" 1059 1060 return self.func( 1061 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 1062 ) 1063 1064 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1065 nano = expression.args.get("nano") 1066 if nano is not None: 1067 nano.pop() 1068 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1069 1070 if expression.args.get("fractions") is None: 1071 expression.set("fractions", exp.Literal.number(0)) 1072 if expression.args.get("precision") is None: 1073 expression.set("precision", exp.Literal.number(0)) 1074 1075 return rename_func("TIMEFROMPARTS")(self, expression) 1076 1077 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1078 zone = expression.args.get("zone") 1079 if zone is not None: 1080 zone.pop() 1081 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1082 1083 nano = expression.args.get("nano") 1084 if nano is not None: 1085 nano.pop() 1086 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1087 1088 if expression.args.get("milli") is None: 1089 expression.set("milli", exp.Literal.number(0)) 1090 1091 return rename_func("DATETIMEFROMPARTS")(self, expression) 1092 1093 def setitem_sql(self, expression: exp.SetItem) -> str: 1094 this = expression.this 1095 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1096 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1097 return f"{self.sql(this.left)} {self.sql(this.right)}" 1098 1099 return super().setitem_sql(expression) 1100 1101 def boolean_sql(self, expression: exp.Boolean) -> str: 1102 if type(expression.parent) in BIT_TYPES or isinstance( 1103 expression.find_ancestor(exp.Values, exp.Select), exp.Values 1104 ): 1105 return "1" if expression.this else "0" 1106 1107 return "(1 = 1)" if expression.this else "(1 = 0)" 1108 1109 def is_sql(self, expression: exp.Is) -> str: 1110 if isinstance(expression.expression, exp.Boolean): 1111 return self.binary(expression, "=") 1112 return self.binary(expression, "IS") 1113 1114 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1115 sql = self.sql(expression, "this") 1116 properties = expression.args.get("properties") 1117 1118 if sql[:1] != "#" and any( 1119 isinstance(prop, exp.TemporaryProperty) 1120 for prop in (properties.expressions if properties else []) 1121 ): 1122 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1123 1124 return sql 1125 1126 def create_sql(self, expression: exp.Create) -> str: 1127 kind = expression.kind 1128 exists = expression.args.pop("exists", None) 1129 1130 like_property = expression.find(exp.LikeProperty) 1131 if like_property: 1132 ctas_expression = like_property.this 1133 else: 1134 ctas_expression = expression.expression 1135 1136 if kind == "VIEW": 1137 expression.this.set("catalog", None) 1138 with_ = expression.args.get("with") 1139 if ctas_expression and with_: 1140 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1141 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1142 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1143 ctas_expression.set("with", with_.pop()) 1144 1145 sql = super().create_sql(expression) 1146 1147 table = expression.find(exp.Table) 1148 1149 # Convert CTAS statement to SELECT .. INTO .. 1150 if kind == "TABLE" and ctas_expression: 1151 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1152 ctas_expression = ctas_expression.subquery() 1153 1154 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1155 select_into.set("into", exp.Into(this=table)) 1156 1157 if like_property: 1158 select_into.limit(0, copy=False) 1159 1160 sql = self.sql(select_into) 1161 1162 if exists: 1163 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1164 sql_with_ctes = self.prepend_ctes(expression, sql) 1165 sql_literal = self.sql(exp.Literal.string(sql_with_ctes)) 1166 if kind == "SCHEMA": 1167 return f"""IF NOT EXISTS (SELECT * FROM information_schema.schemata WHERE schema_name = {identifier}) EXEC({sql_literal})""" 1168 elif kind == "TABLE": 1169 assert table 1170 where = exp.and_( 1171 exp.column("table_name").eq(table.name), 1172 exp.column("table_schema").eq(table.db) if table.db else None, 1173 exp.column("table_catalog").eq(table.catalog) if table.catalog else None, 1174 ) 1175 return f"""IF NOT EXISTS (SELECT * FROM information_schema.tables WHERE {where}) EXEC({sql_literal})""" 1176 elif kind == "INDEX": 1177 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1178 return f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql_literal})""" 1179 elif expression.args.get("replace"): 1180 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1181 1182 return self.prepend_ctes(expression, sql) 1183 1184 def count_sql(self, expression: exp.Count) -> str: 1185 func_name = "COUNT_BIG" if expression.args.get("big_int") else "COUNT" 1186 return rename_func(func_name)(self, expression) 1187 1188 def offset_sql(self, expression: exp.Offset) -> str: 1189 return f"{super().offset_sql(expression)} ROWS" 1190 1191 def version_sql(self, expression: exp.Version) -> str: 1192 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1193 this = f"FOR {name}" 1194 expr = expression.expression 1195 kind = expression.text("kind") 1196 if kind in ("FROM", "BETWEEN"): 1197 args = expr.expressions 1198 sep = "TO" if kind == "FROM" else "AND" 1199 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1200 else: 1201 expr_sql = self.sql(expr) 1202 1203 expr_sql = f" {expr_sql}" if expr_sql else "" 1204 return f"{this} {kind}{expr_sql}" 1205 1206 def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str: 1207 table = expression.args.get("table") 1208 table = f"{table} " if table else "" 1209 return f"RETURNS {table}{self.sql(expression, 'this')}" 1210 1211 def returning_sql(self, expression: exp.Returning) -> str: 1212 into = self.sql(expression, "into") 1213 into = self.seg(f"INTO {into}") if into else "" 1214 return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}" 1215 1216 def transaction_sql(self, expression: exp.Transaction) -> str: 1217 this = self.sql(expression, "this") 1218 this = f" {this}" if this else "" 1219 mark = self.sql(expression, "mark") 1220 mark = f" WITH MARK {mark}" if mark else "" 1221 return f"BEGIN TRANSACTION{this}{mark}" 1222 1223 def commit_sql(self, expression: exp.Commit) -> str: 1224 this = self.sql(expression, "this") 1225 this = f" {this}" if this else "" 1226 durability = expression.args.get("durability") 1227 durability = ( 1228 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1229 if durability is not None 1230 else "" 1231 ) 1232 return f"COMMIT TRANSACTION{this}{durability}" 1233 1234 def rollback_sql(self, expression: exp.Rollback) -> str: 1235 this = self.sql(expression, "this") 1236 this = f" {this}" if this else "" 1237 return f"ROLLBACK TRANSACTION{this}" 1238 1239 def identifier_sql(self, expression: exp.Identifier) -> str: 1240 identifier = super().identifier_sql(expression) 1241 1242 if expression.args.get("global"): 1243 identifier = f"##{identifier}" 1244 elif expression.args.get("temporary"): 1245 identifier = f"#{identifier}" 1246 1247 return identifier 1248 1249 def constraint_sql(self, expression: exp.Constraint) -> str: 1250 this = self.sql(expression, "this") 1251 expressions = self.expressions(expression, flat=True, sep=" ") 1252 return f"CONSTRAINT {this} {expressions}" 1253 1254 def length_sql(self, expression: exp.Length) -> str: 1255 return self._uncast_text(expression, "LEN") 1256 1257 def right_sql(self, expression: exp.Right) -> str: 1258 return self._uncast_text(expression, "RIGHT") 1259 1260 def left_sql(self, expression: exp.Left) -> str: 1261 return self._uncast_text(expression, "LEFT") 1262 1263 def _uncast_text(self, expression: exp.Expression, name: str) -> str: 1264 this = expression.this 1265 if isinstance(this, exp.Cast) and this.is_type(exp.DataType.Type.TEXT): 1266 this_sql = self.sql(this, "this") 1267 else: 1268 this_sql = self.sql(this) 1269 expression_sql = self.sql(expression, "expression") 1270 return self.func(name, this_sql, expression_sql if expression_sql else None) 1271 1272 def partition_sql(self, expression: exp.Partition) -> str: 1273 return f"WITH (PARTITIONS({self.expressions(expression, flat=True)}))" 1274 1275 def alter_sql(self, expression: exp.Alter) -> str: 1276 action = seq_get(expression.args.get("actions") or [], 0) 1277 if isinstance(action, exp.AlterRename): 1278 return f"EXEC sp_rename '{self.sql(expression.this)}', '{action.this.name}'" 1279 return super().alter_sql(expression) 1280 1281 def drop_sql(self, expression: exp.Drop) -> str: 1282 if expression.args["kind"] == "VIEW": 1283 expression.this.set("catalog", None) 1284 return super().drop_sql(expression) 1285 1286 def options_modifier(self, expression: exp.Expression) -> str: 1287 options = self.expressions(expression, key="options") 1288 return f" OPTION{self.wrap(options)}" if options else "" 1289 1290 def dpipe_sql(self, expression: exp.DPipe) -> str: 1291 return self.sql( 1292 reduce(lambda x, y: exp.Add(this=x, expression=y), expression.flatten()) 1293 ) 1294 1295 def isascii_sql(self, expression: exp.IsAscii) -> str: 1296 return f"(PATINDEX(CONVERT(VARCHAR(MAX), 0x255b5e002d7f5d25) COLLATE Latin1_General_BIN, {self.sql(expression.this)}) = 0)" 1297 1298 def columndef_sql(self, expression: exp.ColumnDef, sep: str = " ") -> str: 1299 this = super().columndef_sql(expression, sep) 1300 default = self.sql(expression, "default") 1301 default = f" = {default}" if default else "" 1302 output = self.sql(expression, "output") 1303 output = f" {output}" if output else "" 1304 return f"{this}{default}{output}"
Whether the base comes first in the LOG
function.
Possible values: True
, False
, None
(two arguments are not supported by LOG
)
Whether the behavior of a / b
depends on the types of a
and b
.
False means a / b
is always float division.
True means a / b
is integer division if both a
and b
are integers.
A NULL
arg in CONCAT
yields NULL
by default, but in some dialects it yields an empty string.
Specifies the strategy according to which identifiers should be normalized.
Associates this dialect's time formats with their equivalent Python strftime
formats.
498 class Tokenizer(tokens.Tokenizer): 499 IDENTIFIERS = [("[", "]"), '"'] 500 QUOTES = ["'", '"'] 501 HEX_STRINGS = [("0x", ""), ("0X", "")] 502 VAR_SINGLE_TOKENS = {"@", "$", "#"} 503 504 KEYWORDS = { 505 **tokens.Tokenizer.KEYWORDS, 506 "CLUSTERED INDEX": TokenType.INDEX, 507 "DATETIME2": TokenType.DATETIME2, 508 "DATETIMEOFFSET": TokenType.TIMESTAMPTZ, 509 "DECLARE": TokenType.DECLARE, 510 "EXEC": TokenType.COMMAND, 511 "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT, 512 "IMAGE": TokenType.IMAGE, 513 "MONEY": TokenType.MONEY, 514 "NONCLUSTERED INDEX": TokenType.INDEX, 515 "NTEXT": TokenType.TEXT, 516 "OPTION": TokenType.OPTION, 517 "OUTPUT": TokenType.RETURNING, 518 "PRINT": TokenType.COMMAND, 519 "PROC": TokenType.PROCEDURE, 520 "REAL": TokenType.FLOAT, 521 "ROWVERSION": TokenType.ROWVERSION, 522 "SMALLDATETIME": TokenType.SMALLDATETIME, 523 "SMALLMONEY": TokenType.SMALLMONEY, 524 "SQL_VARIANT": TokenType.VARIANT, 525 "SYSTEM_USER": TokenType.CURRENT_USER, 526 "TOP": TokenType.TOP, 527 "TIMESTAMP": TokenType.ROWVERSION, 528 "TINYINT": TokenType.UTINYINT, 529 "UNIQUEIDENTIFIER": TokenType.UUID, 530 "UPDATE STATISTICS": TokenType.COMMAND, 531 "XML": TokenType.XML, 532 } 533 KEYWORDS.pop("/*+") 534 535 COMMANDS = {*tokens.Tokenizer.COMMANDS, TokenType.END}
Inherited Members
- sqlglot.tokens.Tokenizer
- Tokenizer
- SINGLE_TOKENS
- BIT_STRINGS
- BYTE_STRINGS
- RAW_STRINGS
- HEREDOC_STRINGS
- UNICODE_STRINGS
- STRING_ESCAPES
- IDENTIFIER_ESCAPES
- HEREDOC_TAG_IS_IDENTIFIER
- HEREDOC_STRING_ALTERNATIVE
- STRING_ESCAPES_ALLOWED_IN_RAW_STRINGS
- NESTED_COMMENTS
- HINT_START
- TOKENS_PRECEDING_HINT
- WHITE_SPACE
- COMMAND_PREFIX_TOKENS
- NUMERIC_LITERALS
- COMMENTS
- dialect
- use_rs_tokenizer
- reset
- tokenize
- tokenize_rs
- size
- sql
- tokens
537 class Parser(parser.Parser): 538 SET_REQUIRES_ASSIGNMENT_DELIMITER = False 539 LOG_DEFAULTS_TO_LN = True 540 ALTER_TABLE_ADD_REQUIRED_FOR_EACH_COLUMN = False 541 STRING_ALIASES = True 542 NO_PAREN_IF_COMMANDS = False 543 544 QUERY_MODIFIER_PARSERS = { 545 **parser.Parser.QUERY_MODIFIER_PARSERS, 546 TokenType.OPTION: lambda self: ("options", self._parse_options()), 547 } 548 549 # T-SQL does not allow BEGIN to be used as an identifier 550 ID_VAR_TOKENS = parser.Parser.ID_VAR_TOKENS - {TokenType.BEGIN} 551 ALIAS_TOKENS = parser.Parser.ALIAS_TOKENS - {TokenType.BEGIN} 552 TABLE_ALIAS_TOKENS = parser.Parser.TABLE_ALIAS_TOKENS - {TokenType.BEGIN} 553 COMMENT_TABLE_ALIAS_TOKENS = parser.Parser.COMMENT_TABLE_ALIAS_TOKENS - {TokenType.BEGIN} 554 UPDATE_ALIAS_TOKENS = parser.Parser.UPDATE_ALIAS_TOKENS - {TokenType.BEGIN} 555 556 FUNCTIONS = { 557 **parser.Parser.FUNCTIONS, 558 "CHARINDEX": lambda args: exp.StrPosition( 559 this=seq_get(args, 1), 560 substr=seq_get(args, 0), 561 position=seq_get(args, 2), 562 ), 563 "COUNT": lambda args: exp.Count( 564 this=seq_get(args, 0), expressions=args[1:], big_int=False 565 ), 566 "COUNT_BIG": lambda args: exp.Count( 567 this=seq_get(args, 0), expressions=args[1:], big_int=True 568 ), 569 "DATEADD": build_date_delta(exp.DateAdd, unit_mapping=DATE_DELTA_INTERVAL), 570 "DATEDIFF": _build_date_delta(exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL), 571 "DATENAME": _build_formatted_time(exp.TimeToStr, full_format_mapping=True), 572 "DATEPART": _build_formatted_time(exp.TimeToStr), 573 "DATETIMEFROMPARTS": _build_datetimefromparts, 574 "EOMONTH": _build_eomonth, 575 "FORMAT": _build_format, 576 "GETDATE": exp.CurrentTimestamp.from_arg_list, 577 "HASHBYTES": _build_hashbytes, 578 "ISNULL": build_coalesce, 579 "JSON_QUERY": _build_json_query, 580 "JSON_VALUE": parser.build_extract_json_with_path(exp.JSONExtractScalar), 581 "LEN": _build_with_arg_as_text(exp.Length), 582 "LEFT": _build_with_arg_as_text(exp.Left), 583 "NEWID": exp.Uuid.from_arg_list, 584 "RIGHT": _build_with_arg_as_text(exp.Right), 585 "PARSENAME": _build_parsename, 586 "REPLICATE": exp.Repeat.from_arg_list, 587 "SCHEMA_NAME": exp.CurrentSchema.from_arg_list, 588 "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)), 589 "SYSDATETIME": exp.CurrentTimestamp.from_arg_list, 590 "SUSER_NAME": exp.CurrentUser.from_arg_list, 591 "SUSER_SNAME": exp.CurrentUser.from_arg_list, 592 "SYSTEM_USER": exp.CurrentUser.from_arg_list, 593 "TIMEFROMPARTS": _build_timefromparts, 594 "DATETRUNC": _build_datetrunc, 595 } 596 597 JOIN_HINTS = {"LOOP", "HASH", "MERGE", "REMOTE"} 598 599 PROCEDURE_OPTIONS = dict.fromkeys( 600 ("ENCRYPTION", "RECOMPILE", "SCHEMABINDING", "NATIVE_COMPILATION", "EXECUTE"), tuple() 601 ) 602 603 COLUMN_DEFINITION_MODES = {"OUT", "OUTPUT", "READ_ONLY"} 604 605 RETURNS_TABLE_TOKENS = parser.Parser.ID_VAR_TOKENS - { 606 TokenType.TABLE, 607 *parser.Parser.TYPE_TOKENS, 608 } 609 610 STATEMENT_PARSERS = { 611 **parser.Parser.STATEMENT_PARSERS, 612 TokenType.DECLARE: lambda self: self._parse_declare(), 613 } 614 615 RANGE_PARSERS = { 616 **parser.Parser.RANGE_PARSERS, 617 TokenType.DCOLON: lambda self, this: self.expression( 618 exp.ScopeResolution, 619 this=this, 620 expression=self._parse_function() or self._parse_var(any_token=True), 621 ), 622 } 623 624 NO_PAREN_FUNCTION_PARSERS = { 625 **parser.Parser.NO_PAREN_FUNCTION_PARSERS, 626 "NEXT": lambda self: self._parse_next_value_for(), 627 } 628 629 # The DCOLON (::) operator serves as a scope resolution (exp.ScopeResolution) operator in T-SQL 630 COLUMN_OPERATORS = { 631 **parser.Parser.COLUMN_OPERATORS, 632 TokenType.DCOLON: lambda self, this, to: self.expression(exp.Cast, this=this, to=to) 633 if isinstance(to, exp.DataType) and to.this != exp.DataType.Type.USERDEFINED 634 else self.expression(exp.ScopeResolution, this=this, expression=to), 635 } 636 637 def _parse_dcolon(self) -> t.Optional[exp.Expression]: 638 # We want to use _parse_types() if the first token after :: is a known type, 639 # otherwise we could parse something like x::varchar(max) into a function 640 if self._match_set(self.TYPE_TOKENS, advance=False): 641 return self._parse_types() 642 643 return self._parse_function() or self._parse_types() 644 645 def _parse_options(self) -> t.Optional[t.List[exp.Expression]]: 646 if not self._match(TokenType.OPTION): 647 return None 648 649 def _parse_option() -> t.Optional[exp.Expression]: 650 option = self._parse_var_from_options(OPTIONS) 651 if not option: 652 return None 653 654 self._match(TokenType.EQ) 655 return self.expression( 656 exp.QueryOption, this=option, expression=self._parse_primary_or_var() 657 ) 658 659 return self._parse_wrapped_csv(_parse_option) 660 661 def _parse_projections(self) -> t.List[exp.Expression]: 662 """ 663 T-SQL supports the syntax alias = expression in the SELECT's projection list, 664 so we transform all parsed Selects to convert their EQ projections into Aliases. 665 666 See: https://learn.microsoft.com/en-us/sql/t-sql/queries/select-clause-transact-sql?view=sql-server-ver16#syntax 667 """ 668 return [ 669 ( 670 exp.alias_(projection.expression, projection.this.this, copy=False) 671 if isinstance(projection, exp.EQ) and isinstance(projection.this, exp.Column) 672 else projection 673 ) 674 for projection in super()._parse_projections() 675 ] 676 677 def _parse_commit_or_rollback(self) -> exp.Commit | exp.Rollback: 678 """Applies to SQL Server and Azure SQL Database 679 COMMIT [ { TRAN | TRANSACTION } 680 [ transaction_name | @tran_name_variable ] ] 681 [ WITH ( DELAYED_DURABILITY = { OFF | ON } ) ] 682 683 ROLLBACK { TRAN | TRANSACTION } 684 [ transaction_name | @tran_name_variable 685 | savepoint_name | @savepoint_variable ] 686 """ 687 rollback = self._prev.token_type == TokenType.ROLLBACK 688 689 self._match_texts(("TRAN", "TRANSACTION")) 690 this = self._parse_id_var() 691 692 if rollback: 693 return self.expression(exp.Rollback, this=this) 694 695 durability = None 696 if self._match_pair(TokenType.WITH, TokenType.L_PAREN): 697 self._match_text_seq("DELAYED_DURABILITY") 698 self._match(TokenType.EQ) 699 700 if self._match_text_seq("OFF"): 701 durability = False 702 else: 703 self._match(TokenType.ON) 704 durability = True 705 706 self._match_r_paren() 707 708 return self.expression(exp.Commit, this=this, durability=durability) 709 710 def _parse_transaction(self) -> exp.Transaction | exp.Command: 711 """Applies to SQL Server and Azure SQL Database 712 BEGIN { TRAN | TRANSACTION } 713 [ { transaction_name | @tran_name_variable } 714 [ WITH MARK [ 'description' ] ] 715 ] 716 """ 717 if self._match_texts(("TRAN", "TRANSACTION")): 718 transaction = self.expression(exp.Transaction, this=self._parse_id_var()) 719 if self._match_text_seq("WITH", "MARK"): 720 transaction.set("mark", self._parse_string()) 721 722 return transaction 723 724 return self._parse_as_command(self._prev) 725 726 def _parse_returns(self) -> exp.ReturnsProperty: 727 table = self._parse_id_var(any_token=False, tokens=self.RETURNS_TABLE_TOKENS) 728 returns = super()._parse_returns() 729 returns.set("table", table) 730 return returns 731 732 def _parse_convert( 733 self, strict: bool, safe: t.Optional[bool] = None 734 ) -> t.Optional[exp.Expression]: 735 this = self._parse_types() 736 self._match(TokenType.COMMA) 737 args = [this, *self._parse_csv(self._parse_assignment)] 738 convert = exp.Convert.from_arg_list(args) 739 convert.set("safe", safe) 740 convert.set("strict", strict) 741 return convert 742 743 def _parse_column_def( 744 self, this: t.Optional[exp.Expression], computed_column: bool = True 745 ) -> t.Optional[exp.Expression]: 746 this = super()._parse_column_def(this=this, computed_column=computed_column) 747 if not this: 748 return None 749 if self._match(TokenType.EQ): 750 this.set("default", self._parse_disjunction()) 751 if self._match_texts(self.COLUMN_DEFINITION_MODES): 752 this.set("output", self._prev.text) 753 return this 754 755 def _parse_user_defined_function( 756 self, kind: t.Optional[TokenType] = None 757 ) -> t.Optional[exp.Expression]: 758 this = super()._parse_user_defined_function(kind=kind) 759 760 if ( 761 kind == TokenType.FUNCTION 762 or isinstance(this, exp.UserDefinedFunction) 763 or self._match(TokenType.ALIAS, advance=False) 764 ): 765 return this 766 767 if not self._match(TokenType.WITH, advance=False): 768 expressions = self._parse_csv(self._parse_function_parameter) 769 else: 770 expressions = None 771 772 return self.expression(exp.UserDefinedFunction, this=this, expressions=expressions) 773 774 def _parse_id_var( 775 self, 776 any_token: bool = True, 777 tokens: t.Optional[t.Collection[TokenType]] = None, 778 ) -> t.Optional[exp.Expression]: 779 is_temporary = self._match(TokenType.HASH) 780 is_global = is_temporary and self._match(TokenType.HASH) 781 782 this = super()._parse_id_var(any_token=any_token, tokens=tokens) 783 if this: 784 if is_global: 785 this.set("global", True) 786 elif is_temporary: 787 this.set("temporary", True) 788 789 return this 790 791 def _parse_create(self) -> exp.Create | exp.Command: 792 create = super()._parse_create() 793 794 if isinstance(create, exp.Create): 795 table = create.this.this if isinstance(create.this, exp.Schema) else create.this 796 if isinstance(table, exp.Table) and table.this and table.this.args.get("temporary"): 797 if not create.args.get("properties"): 798 create.set("properties", exp.Properties(expressions=[])) 799 800 create.args["properties"].append("expressions", exp.TemporaryProperty()) 801 802 return create 803 804 def _parse_if(self) -> t.Optional[exp.Expression]: 805 index = self._index 806 807 if self._match_text_seq("OBJECT_ID"): 808 self._parse_wrapped_csv(self._parse_string) 809 if self._match_text_seq("IS", "NOT", "NULL") and self._match(TokenType.DROP): 810 return self._parse_drop(exists=True) 811 self._retreat(index) 812 813 return super()._parse_if() 814 815 def _parse_unique(self) -> exp.UniqueColumnConstraint: 816 if self._match_texts(("CLUSTERED", "NONCLUSTERED")): 817 this = self.CONSTRAINT_PARSERS[self._prev.text.upper()](self) 818 else: 819 this = self._parse_schema(self._parse_id_var(any_token=False)) 820 821 return self.expression(exp.UniqueColumnConstraint, this=this) 822 823 def _parse_partition(self) -> t.Optional[exp.Partition]: 824 if not self._match_text_seq("WITH", "(", "PARTITIONS"): 825 return None 826 827 def parse_range(): 828 low = self._parse_bitwise() 829 high = self._parse_bitwise() if self._match_text_seq("TO") else None 830 831 return ( 832 self.expression(exp.PartitionRange, this=low, expression=high) if high else low 833 ) 834 835 partition = self.expression( 836 exp.Partition, expressions=self._parse_wrapped_csv(parse_range) 837 ) 838 839 self._match_r_paren() 840 841 return partition 842 843 def _parse_declare(self) -> exp.Declare | exp.Command: 844 index = self._index 845 expressions = self._try_parse(partial(self._parse_csv, self._parse_declareitem)) 846 847 if not expressions or self._curr: 848 self._retreat(index) 849 return self._parse_as_command(self._prev) 850 851 return self.expression(exp.Declare, expressions=expressions) 852 853 def _parse_declareitem(self) -> t.Optional[exp.DeclareItem]: 854 var = self._parse_id_var() 855 if not var: 856 return None 857 858 value = None 859 self._match(TokenType.ALIAS) 860 if self._match(TokenType.TABLE): 861 data_type = self._parse_schema() 862 else: 863 data_type = self._parse_types() 864 if self._match(TokenType.EQ): 865 value = self._parse_bitwise() 866 867 return self.expression(exp.DeclareItem, this=var, kind=data_type, default=value)
Parser consumes a list of tokens produced by the Tokenizer and produces a parsed syntax tree.
Arguments:
- error_level: The desired error level. Default: ErrorLevel.IMMEDIATE
- error_message_context: The amount of context to capture from a query string when displaying the error message (in number of characters). Default: 100
- max_errors: Maximum number of error messages to include in a raised ParseError. This is only relevant if error_level is ErrorLevel.RAISE. Default: 3
Inherited Members
- sqlglot.parser.Parser
- Parser
- NO_PAREN_FUNCTIONS
- STRUCT_TYPE_TOKENS
- NESTED_TYPE_TOKENS
- ENUM_TYPE_TOKENS
- AGGREGATE_TYPE_TOKENS
- TYPE_TOKENS
- SIGNED_TO_UNSIGNED_TYPE_TOKEN
- SUBQUERY_PREDICATES
- RESERVED_TOKENS
- DB_CREATABLES
- CREATABLES
- ALTERABLES
- ARRAY_CONSTRUCTORS
- TRIM_TYPES
- FUNC_TOKENS
- CONJUNCTION
- ASSIGNMENT
- DISJUNCTION
- EQUALITY
- COMPARISON
- BITWISE
- TERM
- FACTOR
- EXPONENT
- TIMES
- TIMESTAMPS
- SET_OPERATIONS
- JOIN_METHODS
- JOIN_SIDES
- JOIN_KINDS
- LAMBDAS
- EXPRESSION_PARSERS
- UNARY_PARSERS
- STRING_PARSERS
- NUMERIC_PARSERS
- PRIMARY_PARSERS
- PLACEHOLDER_PARSERS
- PROPERTY_PARSERS
- CONSTRAINT_PARSERS
- ALTER_PARSERS
- ALTER_ALTER_PARSERS
- SCHEMA_UNNAMED_CONSTRAINTS
- INVALID_FUNC_NAME_TOKENS
- FUNCTIONS_WITH_ALIASED_ARGS
- KEY_VALUE_DEFINITIONS
- FUNCTION_PARSERS
- SET_PARSERS
- SHOW_PARSERS
- TYPE_LITERAL_PARSERS
- TYPE_CONVERTERS
- DDL_SELECT_TOKENS
- PRE_VOLATILE_TOKENS
- TRANSACTION_KIND
- TRANSACTION_CHARACTERISTICS
- CONFLICT_ACTIONS
- CREATE_SEQUENCE
- ISOLATED_LOADING_OPTIONS
- USABLES
- CAST_ACTIONS
- SCHEMA_BINDING_OPTIONS
- EXECUTE_AS_OPTIONS
- KEY_CONSTRAINT_OPTIONS
- INSERT_ALTERNATIVES
- CLONE_KEYWORDS
- HISTORICAL_DATA_PREFIX
- HISTORICAL_DATA_KIND
- OPCLASS_FOLLOW_KEYWORDS
- OPTYPE_FOLLOW_TOKENS
- TABLE_INDEX_HINT_TOKENS
- VIEW_ATTRIBUTES
- WINDOW_ALIAS_TOKENS
- WINDOW_BEFORE_PAREN_TOKENS
- WINDOW_SIDES
- JSON_KEY_VALUE_SEPARATOR_TOKENS
- FETCH_TOKENS
- ADD_CONSTRAINT_TOKENS
- DISTINCT_TOKENS
- NULL_TOKENS
- UNNEST_OFFSET_ALIAS_TOKENS
- SELECT_START_TOKENS
- COPY_INTO_VARLEN_OPTIONS
- IS_JSON_PREDICATE_KIND
- ODBC_DATETIME_LITERALS
- ON_CONDITION_TOKENS
- PRIVILEGE_FOLLOW_TOKENS
- DESCRIBE_STYLES
- ANALYZE_STYLES
- ANALYZE_EXPRESSION_PARSERS
- PARTITION_KEYWORDS
- AMBIGUOUS_ALIAS_TOKENS
- OPERATION_MODIFIERS
- RECURSIVE_CTE_SEARCH_KIND
- MODIFIABLES
- STRICT_CAST
- PREFIXED_PIVOT_COLUMNS
- IDENTIFY_PIVOT_STRINGS
- TABLESAMPLE_CSV
- DEFAULT_SAMPLING_METHOD
- TRIM_PATTERN_FIRST
- MODIFIERS_ATTACHED_TO_SET_OP
- SET_OP_MODIFIERS
- JSON_ARROWS_REQUIRE_JSON_TYPE
- COLON_IS_VARIANT_EXTRACT
- VALUES_FOLLOWED_BY_PAREN
- SUPPORTS_IMPLICIT_UNNEST
- INTERVAL_SPANS
- SUPPORTS_PARTITION_SELECTION
- WRAPPED_TRANSFORM_COLUMN_CONSTRAINT
- OPTIONAL_ALIAS_TOKEN_CTE
- error_level
- error_message_context
- max_errors
- dialect
- reset
- parse
- parse_into
- check_errors
- raise_error
- expression
- validate_expression
- errors
- sql
869 class Generator(generator.Generator): 870 LIMIT_IS_TOP = True 871 QUERY_HINTS = False 872 RETURNING_END = False 873 NVL2_SUPPORTED = False 874 ALTER_TABLE_INCLUDE_COLUMN_KEYWORD = False 875 LIMIT_FETCH = "FETCH" 876 COMPUTED_COLUMN_WITH_TYPE = False 877 CTE_RECURSIVE_KEYWORD_REQUIRED = False 878 ENSURE_BOOLS = True 879 NULL_ORDERING_SUPPORTED = None 880 SUPPORTS_SINGLE_ARG_CONCAT = False 881 TABLESAMPLE_SEED_KEYWORD = "REPEATABLE" 882 SUPPORTS_SELECT_INTO = True 883 JSON_PATH_BRACKETED_KEY_SUPPORTED = False 884 SUPPORTS_TO_NUMBER = False 885 SET_OP_MODIFIERS = False 886 COPY_PARAMS_EQ_REQUIRED = True 887 PARSE_JSON_NAME = None 888 EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE = False 889 890 EXPRESSIONS_WITHOUT_NESTED_CTES = { 891 exp.Create, 892 exp.Delete, 893 exp.Insert, 894 exp.Intersect, 895 exp.Except, 896 exp.Merge, 897 exp.Select, 898 exp.Subquery, 899 exp.Union, 900 exp.Update, 901 } 902 903 SUPPORTED_JSON_PATH_PARTS = { 904 exp.JSONPathKey, 905 exp.JSONPathRoot, 906 exp.JSONPathSubscript, 907 } 908 909 TYPE_MAPPING = { 910 **generator.Generator.TYPE_MAPPING, 911 exp.DataType.Type.BOOLEAN: "BIT", 912 exp.DataType.Type.DATETIME2: "DATETIME2", 913 exp.DataType.Type.DECIMAL: "NUMERIC", 914 exp.DataType.Type.DOUBLE: "FLOAT", 915 exp.DataType.Type.INT: "INTEGER", 916 exp.DataType.Type.ROWVERSION: "ROWVERSION", 917 exp.DataType.Type.TEXT: "VARCHAR(MAX)", 918 exp.DataType.Type.TIMESTAMP: "DATETIME2", 919 exp.DataType.Type.TIMESTAMPNTZ: "DATETIME2", 920 exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET", 921 exp.DataType.Type.SMALLDATETIME: "SMALLDATETIME", 922 exp.DataType.Type.UTINYINT: "TINYINT", 923 exp.DataType.Type.VARIANT: "SQL_VARIANT", 924 exp.DataType.Type.UUID: "UNIQUEIDENTIFIER", 925 } 926 927 TYPE_MAPPING.pop(exp.DataType.Type.NCHAR) 928 TYPE_MAPPING.pop(exp.DataType.Type.NVARCHAR) 929 930 TRANSFORMS = { 931 **generator.Generator.TRANSFORMS, 932 exp.AnyValue: any_value_to_max_sql, 933 exp.ArrayToString: rename_func("STRING_AGG"), 934 exp.AutoIncrementColumnConstraint: lambda *_: "IDENTITY", 935 exp.Chr: rename_func("CHAR"), 936 exp.DateAdd: date_delta_sql("DATEADD"), 937 exp.DateDiff: date_delta_sql("DATEDIFF"), 938 exp.CTE: transforms.preprocess([qualify_derived_table_outputs]), 939 exp.CurrentDate: rename_func("GETDATE"), 940 exp.CurrentTimestamp: rename_func("GETDATE"), 941 exp.DateStrToDate: datestrtodate_sql, 942 exp.Extract: rename_func("DATEPART"), 943 exp.GeneratedAsIdentityColumnConstraint: generatedasidentitycolumnconstraint_sql, 944 exp.GroupConcat: _string_agg_sql, 945 exp.If: rename_func("IIF"), 946 exp.JSONExtract: _json_extract_sql, 947 exp.JSONExtractScalar: _json_extract_sql, 948 exp.LastDay: lambda self, e: self.func("EOMONTH", e.this), 949 exp.Ln: rename_func("LOG"), 950 exp.Max: max_or_greatest, 951 exp.MD5: lambda self, e: self.func("HASHBYTES", exp.Literal.string("MD5"), e.this), 952 exp.Min: min_or_least, 953 exp.NumberToStr: _format_sql, 954 exp.Repeat: rename_func("REPLICATE"), 955 exp.CurrentSchema: rename_func("SCHEMA_NAME"), 956 exp.Select: transforms.preprocess( 957 [ 958 transforms.eliminate_distinct_on, 959 transforms.eliminate_semi_and_anti_joins, 960 transforms.eliminate_qualify, 961 transforms.unnest_generate_date_array_using_recursive_cte, 962 ] 963 ), 964 exp.Stddev: rename_func("STDEV"), 965 exp.StrPosition: lambda self, e: strposition_sql( 966 self, e, func_name="CHARINDEX", supports_position=True 967 ), 968 exp.Subquery: transforms.preprocess([qualify_derived_table_outputs]), 969 exp.SHA: lambda self, e: self.func("HASHBYTES", exp.Literal.string("SHA1"), e.this), 970 exp.SHA2: lambda self, e: self.func( 971 "HASHBYTES", exp.Literal.string(f"SHA2_{e.args.get('length', 256)}"), e.this 972 ), 973 exp.TemporaryProperty: lambda self, e: "", 974 exp.TimeStrToTime: _timestrtotime_sql, 975 exp.TimeToStr: _format_sql, 976 exp.Trim: trim_sql, 977 exp.TsOrDsAdd: date_delta_sql("DATEADD", cast=True), 978 exp.TsOrDsDiff: date_delta_sql("DATEDIFF"), 979 exp.TimestampTrunc: lambda self, e: self.func("DATETRUNC", e.unit, e.this), 980 exp.Uuid: lambda *_: "NEWID()", 981 exp.DateFromParts: rename_func("DATEFROMPARTS"), 982 } 983 984 TRANSFORMS.pop(exp.ReturnsProperty) 985 986 PROPERTIES_LOCATION = { 987 **generator.Generator.PROPERTIES_LOCATION, 988 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 989 } 990 991 def scope_resolution(self, rhs: str, scope_name: str) -> str: 992 return f"{scope_name}::{rhs}" 993 994 def select_sql(self, expression: exp.Select) -> str: 995 limit = expression.args.get("limit") 996 offset = expression.args.get("offset") 997 998 if isinstance(limit, exp.Fetch) and not offset: 999 # Dialects like Oracle can FETCH directly from a row set but 1000 # T-SQL requires an ORDER BY + OFFSET clause in order to FETCH 1001 offset = exp.Offset(expression=exp.Literal.number(0)) 1002 expression.set("offset", offset) 1003 1004 if offset: 1005 if not expression.args.get("order"): 1006 # ORDER BY is required in order to use OFFSET in a query, so we use 1007 # a noop order by, since we don't really care about the order. 1008 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 1009 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 1010 1011 if isinstance(limit, exp.Limit): 1012 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 1013 # we replace here because otherwise TOP would be generated in select_sql 1014 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 1015 1016 return super().select_sql(expression) 1017 1018 def convert_sql(self, expression: exp.Convert) -> str: 1019 name = "TRY_CONVERT" if expression.args.get("safe") else "CONVERT" 1020 return self.func( 1021 name, expression.this, expression.expression, expression.args.get("style") 1022 ) 1023 1024 def queryoption_sql(self, expression: exp.QueryOption) -> str: 1025 option = self.sql(expression, "this") 1026 value = self.sql(expression, "expression") 1027 if value: 1028 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 1029 return f"{option} {optional_equal_sign}{value}" 1030 return option 1031 1032 def lateral_op(self, expression: exp.Lateral) -> str: 1033 cross_apply = expression.args.get("cross_apply") 1034 if cross_apply is True: 1035 return "CROSS APPLY" 1036 if cross_apply is False: 1037 return "OUTER APPLY" 1038 1039 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 1040 self.unsupported("LATERAL clause is not supported.") 1041 return "LATERAL" 1042 1043 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 1044 this = expression.this 1045 split_count = len(this.name.split(".")) 1046 delimiter = expression.args.get("delimiter") 1047 part_index = expression.args.get("part_index") 1048 1049 if ( 1050 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 1051 or (delimiter and delimiter.name != ".") 1052 or not part_index 1053 or split_count > 4 1054 ): 1055 self.unsupported( 1056 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 1057 ) 1058 return "" 1059 1060 return self.func( 1061 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 1062 ) 1063 1064 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1065 nano = expression.args.get("nano") 1066 if nano is not None: 1067 nano.pop() 1068 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1069 1070 if expression.args.get("fractions") is None: 1071 expression.set("fractions", exp.Literal.number(0)) 1072 if expression.args.get("precision") is None: 1073 expression.set("precision", exp.Literal.number(0)) 1074 1075 return rename_func("TIMEFROMPARTS")(self, expression) 1076 1077 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1078 zone = expression.args.get("zone") 1079 if zone is not None: 1080 zone.pop() 1081 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1082 1083 nano = expression.args.get("nano") 1084 if nano is not None: 1085 nano.pop() 1086 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1087 1088 if expression.args.get("milli") is None: 1089 expression.set("milli", exp.Literal.number(0)) 1090 1091 return rename_func("DATETIMEFROMPARTS")(self, expression) 1092 1093 def setitem_sql(self, expression: exp.SetItem) -> str: 1094 this = expression.this 1095 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1096 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1097 return f"{self.sql(this.left)} {self.sql(this.right)}" 1098 1099 return super().setitem_sql(expression) 1100 1101 def boolean_sql(self, expression: exp.Boolean) -> str: 1102 if type(expression.parent) in BIT_TYPES or isinstance( 1103 expression.find_ancestor(exp.Values, exp.Select), exp.Values 1104 ): 1105 return "1" if expression.this else "0" 1106 1107 return "(1 = 1)" if expression.this else "(1 = 0)" 1108 1109 def is_sql(self, expression: exp.Is) -> str: 1110 if isinstance(expression.expression, exp.Boolean): 1111 return self.binary(expression, "=") 1112 return self.binary(expression, "IS") 1113 1114 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1115 sql = self.sql(expression, "this") 1116 properties = expression.args.get("properties") 1117 1118 if sql[:1] != "#" and any( 1119 isinstance(prop, exp.TemporaryProperty) 1120 for prop in (properties.expressions if properties else []) 1121 ): 1122 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1123 1124 return sql 1125 1126 def create_sql(self, expression: exp.Create) -> str: 1127 kind = expression.kind 1128 exists = expression.args.pop("exists", None) 1129 1130 like_property = expression.find(exp.LikeProperty) 1131 if like_property: 1132 ctas_expression = like_property.this 1133 else: 1134 ctas_expression = expression.expression 1135 1136 if kind == "VIEW": 1137 expression.this.set("catalog", None) 1138 with_ = expression.args.get("with") 1139 if ctas_expression and with_: 1140 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1141 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1142 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1143 ctas_expression.set("with", with_.pop()) 1144 1145 sql = super().create_sql(expression) 1146 1147 table = expression.find(exp.Table) 1148 1149 # Convert CTAS statement to SELECT .. INTO .. 1150 if kind == "TABLE" and ctas_expression: 1151 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1152 ctas_expression = ctas_expression.subquery() 1153 1154 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1155 select_into.set("into", exp.Into(this=table)) 1156 1157 if like_property: 1158 select_into.limit(0, copy=False) 1159 1160 sql = self.sql(select_into) 1161 1162 if exists: 1163 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1164 sql_with_ctes = self.prepend_ctes(expression, sql) 1165 sql_literal = self.sql(exp.Literal.string(sql_with_ctes)) 1166 if kind == "SCHEMA": 1167 return f"""IF NOT EXISTS (SELECT * FROM information_schema.schemata WHERE schema_name = {identifier}) EXEC({sql_literal})""" 1168 elif kind == "TABLE": 1169 assert table 1170 where = exp.and_( 1171 exp.column("table_name").eq(table.name), 1172 exp.column("table_schema").eq(table.db) if table.db else None, 1173 exp.column("table_catalog").eq(table.catalog) if table.catalog else None, 1174 ) 1175 return f"""IF NOT EXISTS (SELECT * FROM information_schema.tables WHERE {where}) EXEC({sql_literal})""" 1176 elif kind == "INDEX": 1177 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1178 return f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql_literal})""" 1179 elif expression.args.get("replace"): 1180 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1181 1182 return self.prepend_ctes(expression, sql) 1183 1184 def count_sql(self, expression: exp.Count) -> str: 1185 func_name = "COUNT_BIG" if expression.args.get("big_int") else "COUNT" 1186 return rename_func(func_name)(self, expression) 1187 1188 def offset_sql(self, expression: exp.Offset) -> str: 1189 return f"{super().offset_sql(expression)} ROWS" 1190 1191 def version_sql(self, expression: exp.Version) -> str: 1192 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1193 this = f"FOR {name}" 1194 expr = expression.expression 1195 kind = expression.text("kind") 1196 if kind in ("FROM", "BETWEEN"): 1197 args = expr.expressions 1198 sep = "TO" if kind == "FROM" else "AND" 1199 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1200 else: 1201 expr_sql = self.sql(expr) 1202 1203 expr_sql = f" {expr_sql}" if expr_sql else "" 1204 return f"{this} {kind}{expr_sql}" 1205 1206 def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str: 1207 table = expression.args.get("table") 1208 table = f"{table} " if table else "" 1209 return f"RETURNS {table}{self.sql(expression, 'this')}" 1210 1211 def returning_sql(self, expression: exp.Returning) -> str: 1212 into = self.sql(expression, "into") 1213 into = self.seg(f"INTO {into}") if into else "" 1214 return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}" 1215 1216 def transaction_sql(self, expression: exp.Transaction) -> str: 1217 this = self.sql(expression, "this") 1218 this = f" {this}" if this else "" 1219 mark = self.sql(expression, "mark") 1220 mark = f" WITH MARK {mark}" if mark else "" 1221 return f"BEGIN TRANSACTION{this}{mark}" 1222 1223 def commit_sql(self, expression: exp.Commit) -> str: 1224 this = self.sql(expression, "this") 1225 this = f" {this}" if this else "" 1226 durability = expression.args.get("durability") 1227 durability = ( 1228 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1229 if durability is not None 1230 else "" 1231 ) 1232 return f"COMMIT TRANSACTION{this}{durability}" 1233 1234 def rollback_sql(self, expression: exp.Rollback) -> str: 1235 this = self.sql(expression, "this") 1236 this = f" {this}" if this else "" 1237 return f"ROLLBACK TRANSACTION{this}" 1238 1239 def identifier_sql(self, expression: exp.Identifier) -> str: 1240 identifier = super().identifier_sql(expression) 1241 1242 if expression.args.get("global"): 1243 identifier = f"##{identifier}" 1244 elif expression.args.get("temporary"): 1245 identifier = f"#{identifier}" 1246 1247 return identifier 1248 1249 def constraint_sql(self, expression: exp.Constraint) -> str: 1250 this = self.sql(expression, "this") 1251 expressions = self.expressions(expression, flat=True, sep=" ") 1252 return f"CONSTRAINT {this} {expressions}" 1253 1254 def length_sql(self, expression: exp.Length) -> str: 1255 return self._uncast_text(expression, "LEN") 1256 1257 def right_sql(self, expression: exp.Right) -> str: 1258 return self._uncast_text(expression, "RIGHT") 1259 1260 def left_sql(self, expression: exp.Left) -> str: 1261 return self._uncast_text(expression, "LEFT") 1262 1263 def _uncast_text(self, expression: exp.Expression, name: str) -> str: 1264 this = expression.this 1265 if isinstance(this, exp.Cast) and this.is_type(exp.DataType.Type.TEXT): 1266 this_sql = self.sql(this, "this") 1267 else: 1268 this_sql = self.sql(this) 1269 expression_sql = self.sql(expression, "expression") 1270 return self.func(name, this_sql, expression_sql if expression_sql else None) 1271 1272 def partition_sql(self, expression: exp.Partition) -> str: 1273 return f"WITH (PARTITIONS({self.expressions(expression, flat=True)}))" 1274 1275 def alter_sql(self, expression: exp.Alter) -> str: 1276 action = seq_get(expression.args.get("actions") or [], 0) 1277 if isinstance(action, exp.AlterRename): 1278 return f"EXEC sp_rename '{self.sql(expression.this)}', '{action.this.name}'" 1279 return super().alter_sql(expression) 1280 1281 def drop_sql(self, expression: exp.Drop) -> str: 1282 if expression.args["kind"] == "VIEW": 1283 expression.this.set("catalog", None) 1284 return super().drop_sql(expression) 1285 1286 def options_modifier(self, expression: exp.Expression) -> str: 1287 options = self.expressions(expression, key="options") 1288 return f" OPTION{self.wrap(options)}" if options else "" 1289 1290 def dpipe_sql(self, expression: exp.DPipe) -> str: 1291 return self.sql( 1292 reduce(lambda x, y: exp.Add(this=x, expression=y), expression.flatten()) 1293 ) 1294 1295 def isascii_sql(self, expression: exp.IsAscii) -> str: 1296 return f"(PATINDEX(CONVERT(VARCHAR(MAX), 0x255b5e002d7f5d25) COLLATE Latin1_General_BIN, {self.sql(expression.this)}) = 0)" 1297 1298 def columndef_sql(self, expression: exp.ColumnDef, sep: str = " ") -> str: 1299 this = super().columndef_sql(expression, sep) 1300 default = self.sql(expression, "default") 1301 default = f" = {default}" if default else "" 1302 output = self.sql(expression, "output") 1303 output = f" {output}" if output else "" 1304 return f"{this}{default}{output}"
Generator converts a given syntax tree to the corresponding SQL string.
Arguments:
- pretty: Whether to format the produced SQL string. Default: False.
- identify: Determines when an identifier should be quoted. Possible values are: False (default): Never quote, except in cases where it's mandatory by the dialect. True or 'always': Always quote. 'safe': Only quote identifiers that are case insensitive.
- normalize: Whether to normalize identifiers to lowercase. Default: False.
- pad: The pad size in a formatted string. For example, this affects the indentation of a projection in a query, relative to its nesting level. Default: 2.
- indent: The indentation size in a formatted string. For example, this affects the
indentation of subqueries and filters under a
WHERE
clause. Default: 2. - normalize_functions: How to normalize function names. Possible values are: "upper" or True (default): Convert names to uppercase. "lower": Convert names to lowercase. False: Disables function name normalization.
- unsupported_level: Determines the generator's behavior when it encounters unsupported expressions. Default ErrorLevel.WARN.
- max_unsupported: Maximum number of unsupported messages to include in a raised UnsupportedError. This is only relevant if unsupported_level is ErrorLevel.RAISE. Default: 3
- leading_comma: Whether the comma is leading or trailing in select expressions. This is only relevant when generating in pretty mode. Default: False
- max_text_width: The max number of characters in a segment before creating new lines in pretty mode. The default is on the smaller end because the length only represents a segment and not the true line length. Default: 80
- comments: Whether to preserve comments in the output SQL code. Default: True
994 def select_sql(self, expression: exp.Select) -> str: 995 limit = expression.args.get("limit") 996 offset = expression.args.get("offset") 997 998 if isinstance(limit, exp.Fetch) and not offset: 999 # Dialects like Oracle can FETCH directly from a row set but 1000 # T-SQL requires an ORDER BY + OFFSET clause in order to FETCH 1001 offset = exp.Offset(expression=exp.Literal.number(0)) 1002 expression.set("offset", offset) 1003 1004 if offset: 1005 if not expression.args.get("order"): 1006 # ORDER BY is required in order to use OFFSET in a query, so we use 1007 # a noop order by, since we don't really care about the order. 1008 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 1009 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 1010 1011 if isinstance(limit, exp.Limit): 1012 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 1013 # we replace here because otherwise TOP would be generated in select_sql 1014 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 1015 1016 return super().select_sql(expression)
1024 def queryoption_sql(self, expression: exp.QueryOption) -> str: 1025 option = self.sql(expression, "this") 1026 value = self.sql(expression, "expression") 1027 if value: 1028 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 1029 return f"{option} {optional_equal_sign}{value}" 1030 return option
1032 def lateral_op(self, expression: exp.Lateral) -> str: 1033 cross_apply = expression.args.get("cross_apply") 1034 if cross_apply is True: 1035 return "CROSS APPLY" 1036 if cross_apply is False: 1037 return "OUTER APPLY" 1038 1039 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 1040 self.unsupported("LATERAL clause is not supported.") 1041 return "LATERAL"
1043 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 1044 this = expression.this 1045 split_count = len(this.name.split(".")) 1046 delimiter = expression.args.get("delimiter") 1047 part_index = expression.args.get("part_index") 1048 1049 if ( 1050 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 1051 or (delimiter and delimiter.name != ".") 1052 or not part_index 1053 or split_count > 4 1054 ): 1055 self.unsupported( 1056 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 1057 ) 1058 return "" 1059 1060 return self.func( 1061 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 1062 )
1064 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1065 nano = expression.args.get("nano") 1066 if nano is not None: 1067 nano.pop() 1068 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1069 1070 if expression.args.get("fractions") is None: 1071 expression.set("fractions", exp.Literal.number(0)) 1072 if expression.args.get("precision") is None: 1073 expression.set("precision", exp.Literal.number(0)) 1074 1075 return rename_func("TIMEFROMPARTS")(self, expression)
1077 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1078 zone = expression.args.get("zone") 1079 if zone is not None: 1080 zone.pop() 1081 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1082 1083 nano = expression.args.get("nano") 1084 if nano is not None: 1085 nano.pop() 1086 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1087 1088 if expression.args.get("milli") is None: 1089 expression.set("milli", exp.Literal.number(0)) 1090 1091 return rename_func("DATETIMEFROMPARTS")(self, expression)
1093 def setitem_sql(self, expression: exp.SetItem) -> str: 1094 this = expression.this 1095 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1096 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1097 return f"{self.sql(this.left)} {self.sql(this.right)}" 1098 1099 return super().setitem_sql(expression)
1114 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1115 sql = self.sql(expression, "this") 1116 properties = expression.args.get("properties") 1117 1118 if sql[:1] != "#" and any( 1119 isinstance(prop, exp.TemporaryProperty) 1120 for prop in (properties.expressions if properties else []) 1121 ): 1122 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1123 1124 return sql
1126 def create_sql(self, expression: exp.Create) -> str: 1127 kind = expression.kind 1128 exists = expression.args.pop("exists", None) 1129 1130 like_property = expression.find(exp.LikeProperty) 1131 if like_property: 1132 ctas_expression = like_property.this 1133 else: 1134 ctas_expression = expression.expression 1135 1136 if kind == "VIEW": 1137 expression.this.set("catalog", None) 1138 with_ = expression.args.get("with") 1139 if ctas_expression and with_: 1140 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1141 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1142 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1143 ctas_expression.set("with", with_.pop()) 1144 1145 sql = super().create_sql(expression) 1146 1147 table = expression.find(exp.Table) 1148 1149 # Convert CTAS statement to SELECT .. INTO .. 1150 if kind == "TABLE" and ctas_expression: 1151 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1152 ctas_expression = ctas_expression.subquery() 1153 1154 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1155 select_into.set("into", exp.Into(this=table)) 1156 1157 if like_property: 1158 select_into.limit(0, copy=False) 1159 1160 sql = self.sql(select_into) 1161 1162 if exists: 1163 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1164 sql_with_ctes = self.prepend_ctes(expression, sql) 1165 sql_literal = self.sql(exp.Literal.string(sql_with_ctes)) 1166 if kind == "SCHEMA": 1167 return f"""IF NOT EXISTS (SELECT * FROM information_schema.schemata WHERE schema_name = {identifier}) EXEC({sql_literal})""" 1168 elif kind == "TABLE": 1169 assert table 1170 where = exp.and_( 1171 exp.column("table_name").eq(table.name), 1172 exp.column("table_schema").eq(table.db) if table.db else None, 1173 exp.column("table_catalog").eq(table.catalog) if table.catalog else None, 1174 ) 1175 return f"""IF NOT EXISTS (SELECT * FROM information_schema.tables WHERE {where}) EXEC({sql_literal})""" 1176 elif kind == "INDEX": 1177 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1178 return f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql_literal})""" 1179 elif expression.args.get("replace"): 1180 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1181 1182 return self.prepend_ctes(expression, sql)
1191 def version_sql(self, expression: exp.Version) -> str: 1192 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1193 this = f"FOR {name}" 1194 expr = expression.expression 1195 kind = expression.text("kind") 1196 if kind in ("FROM", "BETWEEN"): 1197 args = expr.expressions 1198 sep = "TO" if kind == "FROM" else "AND" 1199 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1200 else: 1201 expr_sql = self.sql(expr) 1202 1203 expr_sql = f" {expr_sql}" if expr_sql else "" 1204 return f"{this} {kind}{expr_sql}"
1223 def commit_sql(self, expression: exp.Commit) -> str: 1224 this = self.sql(expression, "this") 1225 this = f" {this}" if this else "" 1226 durability = expression.args.get("durability") 1227 durability = ( 1228 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1229 if durability is not None 1230 else "" 1231 ) 1232 return f"COMMIT TRANSACTION{this}{durability}"
1239 def identifier_sql(self, expression: exp.Identifier) -> str: 1240 identifier = super().identifier_sql(expression) 1241 1242 if expression.args.get("global"): 1243 identifier = f"##{identifier}" 1244 elif expression.args.get("temporary"): 1245 identifier = f"#{identifier}" 1246 1247 return identifier
1298 def columndef_sql(self, expression: exp.ColumnDef, sep: str = " ") -> str: 1299 this = super().columndef_sql(expression, sep) 1300 default = self.sql(expression, "default") 1301 default = f" = {default}" if default else "" 1302 output = self.sql(expression, "output") 1303 output = f" {output}" if output else "" 1304 return f"{this}{default}{output}"
Inherited Members
- sqlglot.generator.Generator
- Generator
- IGNORE_NULLS_IN_FUNC
- LOCKING_READS_SUPPORTED
- WRAP_DERIVED_VALUES
- CREATE_FUNCTION_RETURN_AS
- MATCHED_BY_SOURCE
- SINGLE_STRING_INTERVAL
- INTERVAL_ALLOWS_PLURAL_FORM
- LIMIT_ONLY_LITERALS
- RENAME_TABLE_WITH_DB
- GROUPINGS_SEP
- INDEX_ON
- JOIN_HINTS
- TABLE_HINTS
- QUERY_HINT_SEP
- IS_BOOL_ALLOWED
- DUPLICATE_KEY_UPDATE_WITH_SET
- EXTRACT_ALLOWS_QUOTES
- TZ_TO_WITH_TIME_ZONE
- VALUES_AS_TABLE
- UNNEST_WITH_ORDINALITY
- AGGREGATE_FILTER_SUPPORTED
- SEMI_ANTI_JOIN_WITH_SIDE
- SUPPORTS_TABLE_COPY
- TABLESAMPLE_REQUIRES_PARENS
- TABLESAMPLE_SIZE_IS_ROWS
- TABLESAMPLE_KEYWORDS
- TABLESAMPLE_WITH_METHOD
- COLLATE_IS_FUNC
- DATA_TYPE_SPECIFIERS_ALLOWED
- LAST_DAY_SUPPORTS_DATE_PART
- SUPPORTS_TABLE_ALIAS_COLUMNS
- UNPIVOT_ALIASES_ARE_IDENTIFIERS
- JSON_KEY_VALUE_PAIR_SEP
- INSERT_OVERWRITE
- SUPPORTS_UNLOGGED_TABLES
- SUPPORTS_CREATE_TABLE_LIKE
- LIKE_PROPERTY_INSIDE_SCHEMA
- MULTI_ARG_DISTINCT
- JSON_TYPE_REQUIRED_FOR_EXTRACTION
- JSON_PATH_SINGLE_QUOTE_ESCAPE
- CAN_IMPLEMENT_ARRAY_ANY
- COPY_PARAMS_ARE_WRAPPED
- COPY_HAS_INTO_KEYWORD
- STAR_EXCEPT
- HEX_FUNC
- WITH_PROPERTIES_PREFIX
- QUOTE_JSON_PATH
- PAD_FILL_PATTERN_IS_REQUIRED
- SUPPORTS_EXPLODING_PROJECTIONS
- ARRAY_CONCAT_IS_VAR_LEN
- SUPPORTS_CONVERT_TIMEZONE
- SUPPORTS_MEDIAN
- SUPPORTS_UNIX_SECONDS
- ARRAY_SIZE_NAME
- ALTER_SET_TYPE
- ARRAY_SIZE_DIM_REQUIRED
- TIME_PART_SINGULARS
- TOKEN_MAPPING
- STRUCT_DELIMITER
- PARAMETER_TOKEN
- NAMED_PLACEHOLDER_TOKEN
- EXPRESSION_PRECEDES_PROPERTIES_CREATABLES
- RESERVED_KEYWORDS
- WITH_SEPARATED_COMMENTS
- EXCLUDE_COMMENTS
- UNWRAPPED_INTERVAL_VALUES
- PARAMETERIZABLE_TEXT_TYPES
- SENTINEL_LINE_BREAK
- pretty
- identify
- normalize
- pad
- unsupported_level
- max_unsupported
- leading_comma
- max_text_width
- comments
- dialect
- normalize_functions
- unsupported_messages
- generate
- preprocess
- unsupported
- sep
- seg
- pad_comment
- maybe_comment
- wrap
- no_identify
- normalize_func
- indent
- sql
- uncache_sql
- cache_sql
- characterset_sql
- column_parts
- column_sql
- columnposition_sql
- columnconstraint_sql
- computedcolumnconstraint_sql
- autoincrementcolumnconstraint_sql
- compresscolumnconstraint_sql
- generatedasidentitycolumnconstraint_sql
- generatedasrowcolumnconstraint_sql
- periodforsystemtimeconstraint_sql
- notnullcolumnconstraint_sql
- transformcolumnconstraint_sql
- primarykeycolumnconstraint_sql
- uniquecolumnconstraint_sql
- sequenceproperties_sql
- clone_sql
- describe_sql
- heredoc_sql
- prepend_ctes
- with_sql
- cte_sql
- tablealias_sql
- bitstring_sql
- hexstring_sql
- bytestring_sql
- unicodestring_sql
- rawstring_sql
- datatypeparam_sql
- datatype_sql
- directory_sql
- delete_sql
- set_operation
- set_operations
- fetch_sql
- limitoptions_sql
- filter_sql
- hint_sql
- indexparameters_sql
- index_sql
- hex_sql
- lowerhex_sql
- inputoutputformat_sql
- national_sql
- properties_sql
- root_properties
- properties
- with_properties
- locate_properties
- property_name
- property_sql
- likeproperty_sql
- fallbackproperty_sql
- journalproperty_sql
- freespaceproperty_sql
- checksumproperty_sql
- mergeblockratioproperty_sql
- datablocksizeproperty_sql
- blockcompressionproperty_sql
- isolatedloadingproperty_sql
- partitionboundspec_sql
- partitionedofproperty_sql
- lockingproperty_sql
- withdataproperty_sql
- withsystemversioningproperty_sql
- insert_sql
- introducer_sql
- kill_sql
- pseudotype_sql
- objectidentifier_sql
- onconflict_sql
- rowformatdelimitedproperty_sql
- withtablehint_sql
- indextablehint_sql
- historicaldata_sql
- table_parts
- table_sql
- tablefromrows_sql
- tablesample_sql
- pivot_sql
- tuple_sql
- update_sql
- values_sql
- var_sql
- into_sql
- from_sql
- groupingsets_sql
- rollup_sql
- cube_sql
- group_sql
- having_sql
- connect_sql
- prior_sql
- join_sql
- lambda_sql
- lateral_sql
- limit_sql
- set_sql
- pragma_sql
- lock_sql
- literal_sql
- escape_str
- loaddata_sql
- null_sql
- order_sql
- withfill_sql
- cluster_sql
- distribute_sql
- sort_sql
- ordered_sql
- matchrecognizemeasure_sql
- matchrecognize_sql
- query_modifiers
- offset_limit_modifiers
- after_limit_modifiers
- schema_sql
- schema_columns_sql
- star_sql
- parameter_sql
- sessionparameter_sql
- placeholder_sql
- subquery_sql
- qualify_sql
- unnest_sql
- prewhere_sql
- where_sql
- window_sql
- partition_by_sql
- windowspec_sql
- withingroup_sql
- between_sql
- bracket_offset_expressions
- bracket_sql
- all_sql
- any_sql
- exists_sql
- case_sql
- nextvaluefor_sql
- extract_sql
- trim_sql
- convert_concat_args
- concat_sql
- concatws_sql
- check_sql
- foreignkey_sql
- primarykey_sql
- if_sql
- matchagainst_sql
- jsonkeyvalue_sql
- jsonpath_sql
- json_path_part
- formatjson_sql
- jsonobject_sql
- jsonobjectagg_sql
- jsonarray_sql
- jsonarrayagg_sql
- jsoncolumndef_sql
- jsonschema_sql
- jsontable_sql
- openjsoncolumndef_sql
- openjson_sql
- in_sql
- in_unnest_op
- interval_sql
- return_sql
- reference_sql
- anonymous_sql
- paren_sql
- neg_sql
- not_sql
- alias_sql
- pivotalias_sql
- aliases_sql
- atindex_sql
- attimezone_sql
- fromtimezone_sql
- add_sql
- and_sql
- or_sql
- xor_sql
- connector_sql
- bitwiseand_sql
- bitwiseleftshift_sql
- bitwisenot_sql
- bitwiseor_sql
- bitwiserightshift_sql
- bitwisexor_sql
- cast_sql
- currentdate_sql
- collate_sql
- command_sql
- comment_sql
- mergetreettlaction_sql
- mergetreettl_sql
- altercolumn_sql
- alterindex_sql
- alterdiststyle_sql
- altersortkey_sql
- alterrename_sql
- renamecolumn_sql
- alterset_sql
- add_column_sql
- droppartition_sql
- addconstraint_sql
- distinct_sql
- ignorenulls_sql
- respectnulls_sql
- havingmax_sql
- intdiv_sql
- div_sql
- safedivide_sql
- overlaps_sql
- distance_sql
- dot_sql
- eq_sql
- propertyeq_sql
- escape_sql
- glob_sql
- gt_sql
- gte_sql
- ilike_sql
- ilikeany_sql
- like_sql
- likeany_sql
- similarto_sql
- lt_sql
- lte_sql
- mod_sql
- mul_sql
- neq_sql
- nullsafeeq_sql
- nullsafeneq_sql
- slice_sql
- sub_sql
- trycast_sql
- jsoncast_sql
- try_sql
- log_sql
- use_sql
- binary
- ceil_floor
- function_fallback_sql
- func
- format_args
- too_wide
- format_time
- expressions
- op_expressions
- naked_property
- tag_sql
- token_sql
- userdefinedfunction_sql
- joinhint_sql
- kwarg_sql
- when_sql
- whens_sql
- merge_sql
- tochar_sql
- tonumber_sql
- dictproperty_sql
- dictrange_sql
- dictsubproperty_sql
- duplicatekeyproperty_sql
- uniquekeyproperty_sql
- distributedbyproperty_sql
- oncluster_sql
- clusteredbyproperty_sql
- anyvalue_sql
- querytransform_sql
- indexconstraintoption_sql
- checkcolumnconstraint_sql
- indexcolumnconstraint_sql
- nvl2_sql
- comprehension_sql
- columnprefix_sql
- opclass_sql
- predict_sql
- forin_sql
- refresh_sql
- toarray_sql
- tsordstotime_sql
- tsordstotimestamp_sql
- tsordstodatetime_sql
- tsordstodate_sql
- unixdate_sql
- lastday_sql
- dateadd_sql
- arrayany_sql
- struct_sql
- partitionrange_sql
- truncatetable_sql
- copyparameter_sql
- credentials_sql
- copy_sql
- semicolon_sql
- datadeletionproperty_sql
- maskingpolicycolumnconstraint_sql
- gapfill_sql
- scoperesolution_sql
- parsejson_sql
- rand_sql
- changes_sql
- pad_sql
- summarize_sql
- explodinggenerateseries_sql
- arrayconcat_sql
- converttimezone_sql
- json_sql
- jsonvalue_sql
- conditionalinsert_sql
- multitableinserts_sql
- oncondition_sql
- jsonextractquote_sql
- jsonexists_sql
- arrayagg_sql
- apply_sql
- grant_sql
- grantprivilege_sql
- grantprincipal_sql
- columns_sql
- overlay_sql
- todouble_sql
- string_sql
- median_sql
- overflowtruncatebehavior_sql
- unixseconds_sql
- arraysize_sql
- attach_sql
- detach_sql
- attachoption_sql
- featuresattime_sql
- watermarkcolumnconstraint_sql
- encodeproperty_sql
- includeproperty_sql
- xmlelement_sql
- partitionbyrangeproperty_sql
- partitionbyrangepropertydynamic_sql
- unpivotcolumns_sql
- analyzesample_sql
- analyzestatistics_sql
- analyzehistogram_sql
- analyzedelete_sql
- analyzelistchainedrows_sql
- analyzevalidate_sql
- analyze_sql
- xmltable_sql
- xmlnamespace_sql
- export_sql
- declare_sql
- declareitem_sql
- recursivewithsearch_sql
- parameterizedagg_sql
- anonymousaggfunc_sql
- combinedaggfunc_sql
- combinedparameterizedagg_sql
- show_sql
- put_sql