sqlglot.dialects.tsql
1from __future__ import annotations 2 3import datetime 4import re 5import typing as t 6from functools import partial, reduce 7 8from sqlglot import exp, generator, parser, tokens, transforms 9from sqlglot.dialects.dialect import ( 10 Dialect, 11 NormalizationStrategy, 12 any_value_to_max_sql, 13 date_delta_sql, 14 datestrtodate_sql, 15 generatedasidentitycolumnconstraint_sql, 16 max_or_greatest, 17 min_or_least, 18 build_date_delta, 19 rename_func, 20 trim_sql, 21 timestrtotime_sql, 22) 23from sqlglot.helper import seq_get 24from sqlglot.parser import build_coalesce 25from sqlglot.time import format_time 26from sqlglot.tokens import TokenType 27 28if t.TYPE_CHECKING: 29 from sqlglot._typing import E 30 31FULL_FORMAT_TIME_MAPPING = { 32 "weekday": "%A", 33 "dw": "%A", 34 "w": "%A", 35 "month": "%B", 36 "mm": "%B", 37 "m": "%B", 38} 39 40DATE_DELTA_INTERVAL = { 41 "year": "year", 42 "yyyy": "year", 43 "yy": "year", 44 "quarter": "quarter", 45 "qq": "quarter", 46 "q": "quarter", 47 "month": "month", 48 "mm": "month", 49 "m": "month", 50 "week": "week", 51 "ww": "week", 52 "wk": "week", 53 "day": "day", 54 "dd": "day", 55 "d": "day", 56} 57 58 59DATE_FMT_RE = re.compile("([dD]{1,2})|([mM]{1,2})|([yY]{1,4})|([hH]{1,2})|([sS]{1,2})") 60 61# N = Numeric, C=Currency 62TRANSPILE_SAFE_NUMBER_FMT = {"N", "C"} 63 64DEFAULT_START_DATE = datetime.date(1900, 1, 1) 65 66BIT_TYPES = {exp.EQ, exp.NEQ, exp.Is, exp.In, exp.Select, exp.Alias} 67 68# Unsupported options: 69# - OPTIMIZE FOR ( @variable_name { UNKNOWN | = <literal_constant> } [ , ...n ] ) 70# - TABLE HINT 71OPTIONS: parser.OPTIONS_TYPE = { 72 **dict.fromkeys( 73 ( 74 "DISABLE_OPTIMIZED_PLAN_FORCING", 75 "FAST", 76 "IGNORE_NONCLUSTERED_COLUMNSTORE_INDEX", 77 "LABEL", 78 "MAXDOP", 79 "MAXRECURSION", 80 "MAX_GRANT_PERCENT", 81 "MIN_GRANT_PERCENT", 82 "NO_PERFORMANCE_SPOOL", 83 "QUERYTRACEON", 84 "RECOMPILE", 85 ), 86 tuple(), 87 ), 88 "CONCAT": ("UNION",), 89 "DISABLE": ("EXTERNALPUSHDOWN", "SCALEOUTEXECUTION"), 90 "EXPAND": ("VIEWS",), 91 "FORCE": ("EXTERNALPUSHDOWN", "ORDER", "SCALEOUTEXECUTION"), 92 "HASH": ("GROUP", "JOIN", "UNION"), 93 "KEEP": ("PLAN",), 94 "KEEPFIXED": ("PLAN",), 95 "LOOP": ("JOIN",), 96 "MERGE": ("JOIN", "UNION"), 97 "OPTIMIZE": (("FOR", "UNKNOWN"),), 98 "ORDER": ("GROUP",), 99 "PARAMETERIZATION": ("FORCED", "SIMPLE"), 100 "ROBUST": ("PLAN",), 101 "USE": ("PLAN",), 102} 103 104OPTIONS_THAT_REQUIRE_EQUAL = ("MAX_GRANT_PERCENT", "MIN_GRANT_PERCENT", "LABEL") 105 106 107def _build_formatted_time( 108 exp_class: t.Type[E], full_format_mapping: t.Optional[bool] = None 109) -> t.Callable[[t.List], E]: 110 def _builder(args: t.List) -> E: 111 assert len(args) == 2 112 113 return exp_class( 114 this=exp.cast(args[1], exp.DataType.Type.DATETIME2), 115 format=exp.Literal.string( 116 format_time( 117 args[0].name.lower(), 118 ( 119 {**TSQL.TIME_MAPPING, **FULL_FORMAT_TIME_MAPPING} 120 if full_format_mapping 121 else TSQL.TIME_MAPPING 122 ), 123 ) 124 ), 125 ) 126 127 return _builder 128 129 130def _build_format(args: t.List) -> exp.NumberToStr | exp.TimeToStr: 131 this = seq_get(args, 0) 132 fmt = seq_get(args, 1) 133 culture = seq_get(args, 2) 134 135 number_fmt = fmt and (fmt.name in TRANSPILE_SAFE_NUMBER_FMT or not DATE_FMT_RE.search(fmt.name)) 136 137 if number_fmt: 138 return exp.NumberToStr(this=this, format=fmt, culture=culture) 139 140 if fmt: 141 fmt = exp.Literal.string( 142 format_time(fmt.name, TSQL.FORMAT_TIME_MAPPING) 143 if len(fmt.name) == 1 144 else format_time(fmt.name, TSQL.TIME_MAPPING) 145 ) 146 147 return exp.TimeToStr(this=this, format=fmt, culture=culture) 148 149 150def _build_eomonth(args: t.List) -> exp.LastDay: 151 date = exp.TsOrDsToDate(this=seq_get(args, 0)) 152 month_lag = seq_get(args, 1) 153 154 if month_lag is None: 155 this: exp.Expression = date 156 else: 157 unit = DATE_DELTA_INTERVAL.get("month") 158 this = exp.DateAdd(this=date, expression=month_lag, unit=unit and exp.var(unit)) 159 160 return exp.LastDay(this=this) 161 162 163def _build_hashbytes(args: t.List) -> exp.Expression: 164 kind, data = args 165 kind = kind.name.upper() if kind.is_string else "" 166 167 if kind == "MD5": 168 args.pop(0) 169 return exp.MD5(this=data) 170 if kind in ("SHA", "SHA1"): 171 args.pop(0) 172 return exp.SHA(this=data) 173 if kind == "SHA2_256": 174 return exp.SHA2(this=data, length=exp.Literal.number(256)) 175 if kind == "SHA2_512": 176 return exp.SHA2(this=data, length=exp.Literal.number(512)) 177 178 return exp.func("HASHBYTES", *args) 179 180 181DATEPART_ONLY_FORMATS = {"DW", "WK", "HOUR", "QUARTER"} 182 183 184def _format_sql(self: TSQL.Generator, expression: exp.NumberToStr | exp.TimeToStr) -> str: 185 fmt = expression.args["format"] 186 187 if not isinstance(expression, exp.NumberToStr): 188 if fmt.is_string: 189 mapped_fmt = format_time(fmt.name, TSQL.INVERSE_TIME_MAPPING) 190 191 name = (mapped_fmt or "").upper() 192 if name in DATEPART_ONLY_FORMATS: 193 return self.func("DATEPART", name, expression.this) 194 195 fmt_sql = self.sql(exp.Literal.string(mapped_fmt)) 196 else: 197 fmt_sql = self.format_time(expression) or self.sql(fmt) 198 else: 199 fmt_sql = self.sql(fmt) 200 201 return self.func("FORMAT", expression.this, fmt_sql, expression.args.get("culture")) 202 203 204def _string_agg_sql(self: TSQL.Generator, expression: exp.GroupConcat) -> str: 205 this = expression.this 206 distinct = expression.find(exp.Distinct) 207 if distinct: 208 # exp.Distinct can appear below an exp.Order or an exp.GroupConcat expression 209 self.unsupported("T-SQL STRING_AGG doesn't support DISTINCT.") 210 this = distinct.pop().expressions[0] 211 212 order = "" 213 if isinstance(expression.this, exp.Order): 214 if expression.this.this: 215 this = expression.this.this.pop() 216 # Order has a leading space 217 order = f" WITHIN GROUP ({self.sql(expression.this)[1:]})" 218 219 separator = expression.args.get("separator") or exp.Literal.string(",") 220 return f"STRING_AGG({self.format_args(this, separator)}){order}" 221 222 223def _build_date_delta( 224 exp_class: t.Type[E], unit_mapping: t.Optional[t.Dict[str, str]] = None 225) -> t.Callable[[t.List], E]: 226 def _builder(args: t.List) -> E: 227 unit = seq_get(args, 0) 228 if unit and unit_mapping: 229 unit = exp.var(unit_mapping.get(unit.name.lower(), unit.name)) 230 231 start_date = seq_get(args, 1) 232 if start_date and start_date.is_number: 233 # Numeric types are valid DATETIME values 234 if start_date.is_int: 235 adds = DEFAULT_START_DATE + datetime.timedelta(days=start_date.to_py()) 236 start_date = exp.Literal.string(adds.strftime("%F")) 237 else: 238 # We currently don't handle float values, i.e. they're not converted to equivalent DATETIMEs. 239 # This is not a problem when generating T-SQL code, it is when transpiling to other dialects. 240 return exp_class(this=seq_get(args, 2), expression=start_date, unit=unit) 241 242 return exp_class( 243 this=exp.TimeStrToTime(this=seq_get(args, 2)), 244 expression=exp.TimeStrToTime(this=start_date), 245 unit=unit, 246 ) 247 248 return _builder 249 250 251def qualify_derived_table_outputs(expression: exp.Expression) -> exp.Expression: 252 """Ensures all (unnamed) output columns are aliased for CTEs and Subqueries.""" 253 alias = expression.args.get("alias") 254 255 if ( 256 isinstance(expression, (exp.CTE, exp.Subquery)) 257 and isinstance(alias, exp.TableAlias) 258 and not alias.columns 259 ): 260 from sqlglot.optimizer.qualify_columns import qualify_outputs 261 262 # We keep track of the unaliased column projection indexes instead of the expressions 263 # themselves, because the latter are going to be replaced by new nodes when the aliases 264 # are added and hence we won't be able to reach these newly added Alias parents 265 query = expression.this 266 unaliased_column_indexes = ( 267 i for i, c in enumerate(query.selects) if isinstance(c, exp.Column) and not c.alias 268 ) 269 270 qualify_outputs(query) 271 272 # Preserve the quoting information of columns for newly added Alias nodes 273 query_selects = query.selects 274 for select_index in unaliased_column_indexes: 275 alias = query_selects[select_index] 276 column = alias.this 277 if isinstance(column.this, exp.Identifier): 278 alias.args["alias"].set("quoted", column.this.quoted) 279 280 return expression 281 282 283# https://learn.microsoft.com/en-us/sql/t-sql/functions/datetimefromparts-transact-sql?view=sql-server-ver16#syntax 284def _build_datetimefromparts(args: t.List) -> exp.TimestampFromParts: 285 return exp.TimestampFromParts( 286 year=seq_get(args, 0), 287 month=seq_get(args, 1), 288 day=seq_get(args, 2), 289 hour=seq_get(args, 3), 290 min=seq_get(args, 4), 291 sec=seq_get(args, 5), 292 milli=seq_get(args, 6), 293 ) 294 295 296# https://learn.microsoft.com/en-us/sql/t-sql/functions/timefromparts-transact-sql?view=sql-server-ver16#syntax 297def _build_timefromparts(args: t.List) -> exp.TimeFromParts: 298 return exp.TimeFromParts( 299 hour=seq_get(args, 0), 300 min=seq_get(args, 1), 301 sec=seq_get(args, 2), 302 fractions=seq_get(args, 3), 303 precision=seq_get(args, 4), 304 ) 305 306 307def _build_with_arg_as_text( 308 klass: t.Type[exp.Expression], 309) -> t.Callable[[t.List[exp.Expression]], exp.Expression]: 310 def _parse(args: t.List[exp.Expression]) -> exp.Expression: 311 this = seq_get(args, 0) 312 313 if this and not this.is_string: 314 this = exp.cast(this, exp.DataType.Type.TEXT) 315 316 expression = seq_get(args, 1) 317 kwargs = {"this": this} 318 319 if expression: 320 kwargs["expression"] = expression 321 322 return klass(**kwargs) 323 324 return _parse 325 326 327# https://learn.microsoft.com/en-us/sql/t-sql/functions/parsename-transact-sql?view=sql-server-ver16 328def _build_parsename(args: t.List) -> exp.SplitPart | exp.Anonymous: 329 # PARSENAME(...) will be stored into exp.SplitPart if: 330 # - All args are literals 331 # - The part index (2nd arg) is <= 4 (max valid value, otherwise TSQL returns NULL) 332 if len(args) == 2 and all(isinstance(arg, exp.Literal) for arg in args): 333 this = args[0] 334 part_index = args[1] 335 split_count = len(this.name.split(".")) 336 if split_count <= 4: 337 return exp.SplitPart( 338 this=this, 339 delimiter=exp.Literal.string("."), 340 part_index=exp.Literal.number(split_count + 1 - part_index.to_py()), 341 ) 342 343 return exp.Anonymous(this="PARSENAME", expressions=args) 344 345 346def _build_json_query(args: t.List, dialect: Dialect) -> exp.JSONExtract: 347 if len(args) == 1: 348 # The default value for path is '$'. As a result, if you don't provide a 349 # value for path, JSON_QUERY returns the input expression. 350 args.append(exp.Literal.string("$")) 351 352 return parser.build_extract_json_with_path(exp.JSONExtract)(args, dialect) 353 354 355def _json_extract_sql( 356 self: TSQL.Generator, expression: exp.JSONExtract | exp.JSONExtractScalar 357) -> str: 358 json_query = self.func("JSON_QUERY", expression.this, expression.expression) 359 json_value = self.func("JSON_VALUE", expression.this, expression.expression) 360 return self.func("ISNULL", json_query, json_value) 361 362 363def _timestrtotime_sql(self: TSQL.Generator, expression: exp.TimeStrToTime): 364 sql = timestrtotime_sql(self, expression) 365 if expression.args.get("zone"): 366 # If there is a timezone, produce an expression like: 367 # CAST('2020-01-01 12:13:14-08:00' AS DATETIMEOFFSET) AT TIME ZONE 'UTC' 368 # If you dont have AT TIME ZONE 'UTC', wrapping that expression in another cast back to DATETIME2 just drops the timezone information 369 return self.sql(exp.AtTimeZone(this=sql, zone=exp.Literal.string("UTC"))) 370 return sql 371 372 373def _build_datetrunc(args: t.List) -> exp.TimestampTrunc: 374 unit = seq_get(args, 0) 375 this = seq_get(args, 1) 376 377 if this and this.is_string: 378 this = exp.cast(this, exp.DataType.Type.DATETIME2) 379 380 return exp.TimestampTrunc(this=this, unit=unit) 381 382 383class TSQL(Dialect): 384 SUPPORTS_SEMI_ANTI_JOIN = False 385 LOG_BASE_FIRST = False 386 TYPED_DIVISION = True 387 CONCAT_COALESCE = True 388 NORMALIZATION_STRATEGY = NormalizationStrategy.CASE_INSENSITIVE 389 390 TIME_FORMAT = "'yyyy-mm-dd hh:mm:ss'" 391 392 TIME_MAPPING = { 393 "year": "%Y", 394 "dayofyear": "%j", 395 "day": "%d", 396 "dy": "%d", 397 "y": "%Y", 398 "week": "%W", 399 "ww": "%W", 400 "wk": "%W", 401 "hour": "%h", 402 "hh": "%I", 403 "minute": "%M", 404 "mi": "%M", 405 "n": "%M", 406 "second": "%S", 407 "ss": "%S", 408 "s": "%-S", 409 "millisecond": "%f", 410 "ms": "%f", 411 "weekday": "%w", 412 "dw": "%w", 413 "month": "%m", 414 "mm": "%M", 415 "m": "%-M", 416 "Y": "%Y", 417 "YYYY": "%Y", 418 "YY": "%y", 419 "MMMM": "%B", 420 "MMM": "%b", 421 "MM": "%m", 422 "M": "%-m", 423 "dddd": "%A", 424 "dd": "%d", 425 "d": "%-d", 426 "HH": "%H", 427 "H": "%-H", 428 "h": "%-I", 429 "ffffff": "%f", 430 "yyyy": "%Y", 431 "yy": "%y", 432 } 433 434 CONVERT_FORMAT_MAPPING = { 435 "0": "%b %d %Y %-I:%M%p", 436 "1": "%m/%d/%y", 437 "2": "%y.%m.%d", 438 "3": "%d/%m/%y", 439 "4": "%d.%m.%y", 440 "5": "%d-%m-%y", 441 "6": "%d %b %y", 442 "7": "%b %d, %y", 443 "8": "%H:%M:%S", 444 "9": "%b %d %Y %-I:%M:%S:%f%p", 445 "10": "mm-dd-yy", 446 "11": "yy/mm/dd", 447 "12": "yymmdd", 448 "13": "%d %b %Y %H:%M:ss:%f", 449 "14": "%H:%M:%S:%f", 450 "20": "%Y-%m-%d %H:%M:%S", 451 "21": "%Y-%m-%d %H:%M:%S.%f", 452 "22": "%m/%d/%y %-I:%M:%S %p", 453 "23": "%Y-%m-%d", 454 "24": "%H:%M:%S", 455 "25": "%Y-%m-%d %H:%M:%S.%f", 456 "100": "%b %d %Y %-I:%M%p", 457 "101": "%m/%d/%Y", 458 "102": "%Y.%m.%d", 459 "103": "%d/%m/%Y", 460 "104": "%d.%m.%Y", 461 "105": "%d-%m-%Y", 462 "106": "%d %b %Y", 463 "107": "%b %d, %Y", 464 "108": "%H:%M:%S", 465 "109": "%b %d %Y %-I:%M:%S:%f%p", 466 "110": "%m-%d-%Y", 467 "111": "%Y/%m/%d", 468 "112": "%Y%m%d", 469 "113": "%d %b %Y %H:%M:%S:%f", 470 "114": "%H:%M:%S:%f", 471 "120": "%Y-%m-%d %H:%M:%S", 472 "121": "%Y-%m-%d %H:%M:%S.%f", 473 } 474 475 FORMAT_TIME_MAPPING = { 476 "y": "%B %Y", 477 "d": "%m/%d/%Y", 478 "H": "%-H", 479 "h": "%-I", 480 "s": "%Y-%m-%d %H:%M:%S", 481 "D": "%A,%B,%Y", 482 "f": "%A,%B,%Y %-I:%M %p", 483 "F": "%A,%B,%Y %-I:%M:%S %p", 484 "g": "%m/%d/%Y %-I:%M %p", 485 "G": "%m/%d/%Y %-I:%M:%S %p", 486 "M": "%B %-d", 487 "m": "%B %-d", 488 "O": "%Y-%m-%dT%H:%M:%S", 489 "u": "%Y-%M-%D %H:%M:%S%z", 490 "U": "%A, %B %D, %Y %H:%M:%S%z", 491 "T": "%-I:%M:%S %p", 492 "t": "%-I:%M", 493 "Y": "%a %Y", 494 } 495 496 class Tokenizer(tokens.Tokenizer): 497 IDENTIFIERS = [("[", "]"), '"'] 498 QUOTES = ["'", '"'] 499 HEX_STRINGS = [("0x", ""), ("0X", "")] 500 VAR_SINGLE_TOKENS = {"@", "$", "#"} 501 502 KEYWORDS = { 503 **tokens.Tokenizer.KEYWORDS, 504 "CLUSTERED INDEX": TokenType.INDEX, 505 "DATETIME2": TokenType.DATETIME2, 506 "DATETIMEOFFSET": TokenType.TIMESTAMPTZ, 507 "DECLARE": TokenType.DECLARE, 508 "EXEC": TokenType.COMMAND, 509 "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT, 510 "IMAGE": TokenType.IMAGE, 511 "MONEY": TokenType.MONEY, 512 "NONCLUSTERED INDEX": TokenType.INDEX, 513 "NTEXT": TokenType.TEXT, 514 "OPTION": TokenType.OPTION, 515 "OUTPUT": TokenType.RETURNING, 516 "PRINT": TokenType.COMMAND, 517 "PROC": TokenType.PROCEDURE, 518 "REAL": TokenType.FLOAT, 519 "ROWVERSION": TokenType.ROWVERSION, 520 "SMALLDATETIME": TokenType.SMALLDATETIME, 521 "SMALLMONEY": TokenType.SMALLMONEY, 522 "SQL_VARIANT": TokenType.VARIANT, 523 "SYSTEM_USER": TokenType.CURRENT_USER, 524 "TOP": TokenType.TOP, 525 "TIMESTAMP": TokenType.ROWVERSION, 526 "TINYINT": TokenType.UTINYINT, 527 "UNIQUEIDENTIFIER": TokenType.UNIQUEIDENTIFIER, 528 "UPDATE STATISTICS": TokenType.COMMAND, 529 "XML": TokenType.XML, 530 } 531 KEYWORDS.pop("/*+") 532 533 COMMANDS = {*tokens.Tokenizer.COMMANDS, TokenType.END} 534 535 class Parser(parser.Parser): 536 SET_REQUIRES_ASSIGNMENT_DELIMITER = False 537 LOG_DEFAULTS_TO_LN = True 538 ALTER_TABLE_ADD_REQUIRED_FOR_EACH_COLUMN = False 539 STRING_ALIASES = True 540 NO_PAREN_IF_COMMANDS = False 541 542 QUERY_MODIFIER_PARSERS = { 543 **parser.Parser.QUERY_MODIFIER_PARSERS, 544 TokenType.OPTION: lambda self: ("options", self._parse_options()), 545 } 546 547 FUNCTIONS = { 548 **parser.Parser.FUNCTIONS, 549 "CHARINDEX": lambda args: exp.StrPosition( 550 this=seq_get(args, 1), 551 substr=seq_get(args, 0), 552 position=seq_get(args, 2), 553 ), 554 "COUNT": lambda args: exp.Count( 555 this=seq_get(args, 0), expressions=args[1:], big_int=False 556 ), 557 "COUNT_BIG": lambda args: exp.Count( 558 this=seq_get(args, 0), expressions=args[1:], big_int=True 559 ), 560 "DATEADD": build_date_delta(exp.DateAdd, unit_mapping=DATE_DELTA_INTERVAL), 561 "DATEDIFF": _build_date_delta(exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL), 562 "DATENAME": _build_formatted_time(exp.TimeToStr, full_format_mapping=True), 563 "DATEPART": _build_formatted_time(exp.TimeToStr), 564 "DATETIMEFROMPARTS": _build_datetimefromparts, 565 "EOMONTH": _build_eomonth, 566 "FORMAT": _build_format, 567 "GETDATE": exp.CurrentTimestamp.from_arg_list, 568 "HASHBYTES": _build_hashbytes, 569 "ISNULL": build_coalesce, 570 "JSON_QUERY": _build_json_query, 571 "JSON_VALUE": parser.build_extract_json_with_path(exp.JSONExtractScalar), 572 "LEN": _build_with_arg_as_text(exp.Length), 573 "LEFT": _build_with_arg_as_text(exp.Left), 574 "RIGHT": _build_with_arg_as_text(exp.Right), 575 "PARSENAME": _build_parsename, 576 "REPLICATE": exp.Repeat.from_arg_list, 577 "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)), 578 "SYSDATETIME": exp.CurrentTimestamp.from_arg_list, 579 "SUSER_NAME": exp.CurrentUser.from_arg_list, 580 "SUSER_SNAME": exp.CurrentUser.from_arg_list, 581 "SYSTEM_USER": exp.CurrentUser.from_arg_list, 582 "TIMEFROMPARTS": _build_timefromparts, 583 "DATETRUNC": _build_datetrunc, 584 } 585 586 JOIN_HINTS = {"LOOP", "HASH", "MERGE", "REMOTE"} 587 588 PROCEDURE_OPTIONS = dict.fromkeys( 589 ("ENCRYPTION", "RECOMPILE", "SCHEMABINDING", "NATIVE_COMPILATION", "EXECUTE"), tuple() 590 ) 591 592 RETURNS_TABLE_TOKENS = parser.Parser.ID_VAR_TOKENS - { 593 TokenType.TABLE, 594 *parser.Parser.TYPE_TOKENS, 595 } 596 597 STATEMENT_PARSERS = { 598 **parser.Parser.STATEMENT_PARSERS, 599 TokenType.DECLARE: lambda self: self._parse_declare(), 600 } 601 602 RANGE_PARSERS = { 603 **parser.Parser.RANGE_PARSERS, 604 TokenType.DCOLON: lambda self, this: self.expression( 605 exp.ScopeResolution, 606 this=this, 607 expression=self._parse_function() or self._parse_var(any_token=True), 608 ), 609 } 610 611 NO_PAREN_FUNCTION_PARSERS = { 612 **parser.Parser.NO_PAREN_FUNCTION_PARSERS, 613 "NEXT": lambda self: self._parse_next_value_for(), 614 } 615 616 # The DCOLON (::) operator serves as a scope resolution (exp.ScopeResolution) operator in T-SQL 617 COLUMN_OPERATORS = { 618 **parser.Parser.COLUMN_OPERATORS, 619 TokenType.DCOLON: lambda self, this, to: self.expression(exp.Cast, this=this, to=to) 620 if isinstance(to, exp.DataType) and to.this != exp.DataType.Type.USERDEFINED 621 else self.expression(exp.ScopeResolution, this=this, expression=to), 622 } 623 624 def _parse_dcolon(self) -> t.Optional[exp.Expression]: 625 # We want to use _parse_types() if the first token after :: is a known type, 626 # otherwise we could parse something like x::varchar(max) into a function 627 if self._match_set(self.TYPE_TOKENS, advance=False): 628 return self._parse_types() 629 630 return self._parse_function() or self._parse_types() 631 632 def _parse_options(self) -> t.Optional[t.List[exp.Expression]]: 633 if not self._match(TokenType.OPTION): 634 return None 635 636 def _parse_option() -> t.Optional[exp.Expression]: 637 option = self._parse_var_from_options(OPTIONS) 638 if not option: 639 return None 640 641 self._match(TokenType.EQ) 642 return self.expression( 643 exp.QueryOption, this=option, expression=self._parse_primary_or_var() 644 ) 645 646 return self._parse_wrapped_csv(_parse_option) 647 648 def _parse_projections(self) -> t.List[exp.Expression]: 649 """ 650 T-SQL supports the syntax alias = expression in the SELECT's projection list, 651 so we transform all parsed Selects to convert their EQ projections into Aliases. 652 653 See: https://learn.microsoft.com/en-us/sql/t-sql/queries/select-clause-transact-sql?view=sql-server-ver16#syntax 654 """ 655 return [ 656 ( 657 exp.alias_(projection.expression, projection.this.this, copy=False) 658 if isinstance(projection, exp.EQ) and isinstance(projection.this, exp.Column) 659 else projection 660 ) 661 for projection in super()._parse_projections() 662 ] 663 664 def _parse_commit_or_rollback(self) -> exp.Commit | exp.Rollback: 665 """Applies to SQL Server and Azure SQL Database 666 COMMIT [ { TRAN | TRANSACTION } 667 [ transaction_name | @tran_name_variable ] ] 668 [ WITH ( DELAYED_DURABILITY = { OFF | ON } ) ] 669 670 ROLLBACK { TRAN | TRANSACTION } 671 [ transaction_name | @tran_name_variable 672 | savepoint_name | @savepoint_variable ] 673 """ 674 rollback = self._prev.token_type == TokenType.ROLLBACK 675 676 self._match_texts(("TRAN", "TRANSACTION")) 677 this = self._parse_id_var() 678 679 if rollback: 680 return self.expression(exp.Rollback, this=this) 681 682 durability = None 683 if self._match_pair(TokenType.WITH, TokenType.L_PAREN): 684 self._match_text_seq("DELAYED_DURABILITY") 685 self._match(TokenType.EQ) 686 687 if self._match_text_seq("OFF"): 688 durability = False 689 else: 690 self._match(TokenType.ON) 691 durability = True 692 693 self._match_r_paren() 694 695 return self.expression(exp.Commit, this=this, durability=durability) 696 697 def _parse_transaction(self) -> exp.Transaction | exp.Command: 698 """Applies to SQL Server and Azure SQL Database 699 BEGIN { TRAN | TRANSACTION } 700 [ { transaction_name | @tran_name_variable } 701 [ WITH MARK [ 'description' ] ] 702 ] 703 """ 704 if self._match_texts(("TRAN", "TRANSACTION")): 705 transaction = self.expression(exp.Transaction, this=self._parse_id_var()) 706 if self._match_text_seq("WITH", "MARK"): 707 transaction.set("mark", self._parse_string()) 708 709 return transaction 710 711 return self._parse_as_command(self._prev) 712 713 def _parse_returns(self) -> exp.ReturnsProperty: 714 table = self._parse_id_var(any_token=False, tokens=self.RETURNS_TABLE_TOKENS) 715 returns = super()._parse_returns() 716 returns.set("table", table) 717 return returns 718 719 def _parse_convert( 720 self, strict: bool, safe: t.Optional[bool] = None 721 ) -> t.Optional[exp.Expression]: 722 this = self._parse_types() 723 self._match(TokenType.COMMA) 724 args = [this, *self._parse_csv(self._parse_assignment)] 725 convert = exp.Convert.from_arg_list(args) 726 convert.set("safe", safe) 727 convert.set("strict", strict) 728 return convert 729 730 def _parse_user_defined_function( 731 self, kind: t.Optional[TokenType] = None 732 ) -> t.Optional[exp.Expression]: 733 this = super()._parse_user_defined_function(kind=kind) 734 735 if ( 736 kind == TokenType.FUNCTION 737 or isinstance(this, exp.UserDefinedFunction) 738 or self._match(TokenType.ALIAS, advance=False) 739 ): 740 return this 741 742 if not self._match(TokenType.WITH, advance=False): 743 expressions = self._parse_csv(self._parse_function_parameter) 744 else: 745 expressions = None 746 747 return self.expression(exp.UserDefinedFunction, this=this, expressions=expressions) 748 749 def _parse_id_var( 750 self, 751 any_token: bool = True, 752 tokens: t.Optional[t.Collection[TokenType]] = None, 753 ) -> t.Optional[exp.Expression]: 754 is_temporary = self._match(TokenType.HASH) 755 is_global = is_temporary and self._match(TokenType.HASH) 756 757 this = super()._parse_id_var(any_token=any_token, tokens=tokens) 758 if this: 759 if is_global: 760 this.set("global", True) 761 elif is_temporary: 762 this.set("temporary", True) 763 764 return this 765 766 def _parse_create(self) -> exp.Create | exp.Command: 767 create = super()._parse_create() 768 769 if isinstance(create, exp.Create): 770 table = create.this.this if isinstance(create.this, exp.Schema) else create.this 771 if isinstance(table, exp.Table) and table.this.args.get("temporary"): 772 if not create.args.get("properties"): 773 create.set("properties", exp.Properties(expressions=[])) 774 775 create.args["properties"].append("expressions", exp.TemporaryProperty()) 776 777 return create 778 779 def _parse_if(self) -> t.Optional[exp.Expression]: 780 index = self._index 781 782 if self._match_text_seq("OBJECT_ID"): 783 self._parse_wrapped_csv(self._parse_string) 784 if self._match_text_seq("IS", "NOT", "NULL") and self._match(TokenType.DROP): 785 return self._parse_drop(exists=True) 786 self._retreat(index) 787 788 return super()._parse_if() 789 790 def _parse_unique(self) -> exp.UniqueColumnConstraint: 791 if self._match_texts(("CLUSTERED", "NONCLUSTERED")): 792 this = self.CONSTRAINT_PARSERS[self._prev.text.upper()](self) 793 else: 794 this = self._parse_schema(self._parse_id_var(any_token=False)) 795 796 return self.expression(exp.UniqueColumnConstraint, this=this) 797 798 def _parse_partition(self) -> t.Optional[exp.Partition]: 799 if not self._match_text_seq("WITH", "(", "PARTITIONS"): 800 return None 801 802 def parse_range(): 803 low = self._parse_bitwise() 804 high = self._parse_bitwise() if self._match_text_seq("TO") else None 805 806 return ( 807 self.expression(exp.PartitionRange, this=low, expression=high) if high else low 808 ) 809 810 partition = self.expression( 811 exp.Partition, expressions=self._parse_wrapped_csv(parse_range) 812 ) 813 814 self._match_r_paren() 815 816 return partition 817 818 def _parse_declare(self) -> exp.Declare | exp.Command: 819 index = self._index 820 expressions = self._try_parse(partial(self._parse_csv, self._parse_declareitem)) 821 822 if not expressions or self._curr: 823 self._retreat(index) 824 return self._parse_as_command(self._prev) 825 826 return self.expression(exp.Declare, expressions=expressions) 827 828 def _parse_declareitem(self) -> t.Optional[exp.DeclareItem]: 829 var = self._parse_id_var() 830 if not var: 831 return None 832 833 value = None 834 self._match(TokenType.ALIAS) 835 if self._match(TokenType.TABLE): 836 data_type = self._parse_schema() 837 else: 838 data_type = self._parse_types() 839 if self._match(TokenType.EQ): 840 value = self._parse_bitwise() 841 842 return self.expression(exp.DeclareItem, this=var, kind=data_type, default=value) 843 844 class Generator(generator.Generator): 845 LIMIT_IS_TOP = True 846 QUERY_HINTS = False 847 RETURNING_END = False 848 NVL2_SUPPORTED = False 849 ALTER_TABLE_INCLUDE_COLUMN_KEYWORD = False 850 LIMIT_FETCH = "FETCH" 851 COMPUTED_COLUMN_WITH_TYPE = False 852 CTE_RECURSIVE_KEYWORD_REQUIRED = False 853 ENSURE_BOOLS = True 854 NULL_ORDERING_SUPPORTED = None 855 SUPPORTS_SINGLE_ARG_CONCAT = False 856 TABLESAMPLE_SEED_KEYWORD = "REPEATABLE" 857 SUPPORTS_SELECT_INTO = True 858 JSON_PATH_BRACKETED_KEY_SUPPORTED = False 859 SUPPORTS_TO_NUMBER = False 860 SET_OP_MODIFIERS = False 861 COPY_PARAMS_EQ_REQUIRED = True 862 PARSE_JSON_NAME = None 863 EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE = False 864 865 EXPRESSIONS_WITHOUT_NESTED_CTES = { 866 exp.Create, 867 exp.Delete, 868 exp.Insert, 869 exp.Intersect, 870 exp.Except, 871 exp.Merge, 872 exp.Select, 873 exp.Subquery, 874 exp.Union, 875 exp.Update, 876 } 877 878 SUPPORTED_JSON_PATH_PARTS = { 879 exp.JSONPathKey, 880 exp.JSONPathRoot, 881 exp.JSONPathSubscript, 882 } 883 884 TYPE_MAPPING = { 885 **generator.Generator.TYPE_MAPPING, 886 exp.DataType.Type.BOOLEAN: "BIT", 887 exp.DataType.Type.DATETIME2: "DATETIME2", 888 exp.DataType.Type.DECIMAL: "NUMERIC", 889 exp.DataType.Type.DOUBLE: "FLOAT", 890 exp.DataType.Type.INT: "INTEGER", 891 exp.DataType.Type.ROWVERSION: "ROWVERSION", 892 exp.DataType.Type.TEXT: "VARCHAR(MAX)", 893 exp.DataType.Type.TIMESTAMP: "DATETIME2", 894 exp.DataType.Type.TIMESTAMPNTZ: "DATETIME2", 895 exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET", 896 exp.DataType.Type.SMALLDATETIME: "SMALLDATETIME", 897 exp.DataType.Type.UTINYINT: "TINYINT", 898 exp.DataType.Type.VARIANT: "SQL_VARIANT", 899 } 900 901 TYPE_MAPPING.pop(exp.DataType.Type.NCHAR) 902 TYPE_MAPPING.pop(exp.DataType.Type.NVARCHAR) 903 904 TRANSFORMS = { 905 **generator.Generator.TRANSFORMS, 906 exp.AnyValue: any_value_to_max_sql, 907 exp.ArrayToString: rename_func("STRING_AGG"), 908 exp.AutoIncrementColumnConstraint: lambda *_: "IDENTITY", 909 exp.Chr: rename_func("CHAR"), 910 exp.DateAdd: date_delta_sql("DATEADD"), 911 exp.DateDiff: date_delta_sql("DATEDIFF"), 912 exp.CTE: transforms.preprocess([qualify_derived_table_outputs]), 913 exp.CurrentDate: rename_func("GETDATE"), 914 exp.CurrentTimestamp: rename_func("GETDATE"), 915 exp.DateStrToDate: datestrtodate_sql, 916 exp.Extract: rename_func("DATEPART"), 917 exp.GeneratedAsIdentityColumnConstraint: generatedasidentitycolumnconstraint_sql, 918 exp.GroupConcat: _string_agg_sql, 919 exp.If: rename_func("IIF"), 920 exp.JSONExtract: _json_extract_sql, 921 exp.JSONExtractScalar: _json_extract_sql, 922 exp.LastDay: lambda self, e: self.func("EOMONTH", e.this), 923 exp.Ln: rename_func("LOG"), 924 exp.Max: max_or_greatest, 925 exp.MD5: lambda self, e: self.func("HASHBYTES", exp.Literal.string("MD5"), e.this), 926 exp.Min: min_or_least, 927 exp.NumberToStr: _format_sql, 928 exp.Repeat: rename_func("REPLICATE"), 929 exp.Select: transforms.preprocess( 930 [ 931 transforms.eliminate_distinct_on, 932 transforms.eliminate_semi_and_anti_joins, 933 transforms.eliminate_qualify, 934 transforms.unnest_generate_date_array_using_recursive_cte, 935 ] 936 ), 937 exp.Stddev: rename_func("STDEV"), 938 exp.StrPosition: lambda self, e: self.func( 939 "CHARINDEX", e.args.get("substr"), e.this, e.args.get("position") 940 ), 941 exp.Subquery: transforms.preprocess([qualify_derived_table_outputs]), 942 exp.SHA: lambda self, e: self.func("HASHBYTES", exp.Literal.string("SHA1"), e.this), 943 exp.SHA2: lambda self, e: self.func( 944 "HASHBYTES", exp.Literal.string(f"SHA2_{e.args.get('length', 256)}"), e.this 945 ), 946 exp.TemporaryProperty: lambda self, e: "", 947 exp.TimeStrToTime: _timestrtotime_sql, 948 exp.TimeToStr: _format_sql, 949 exp.Trim: trim_sql, 950 exp.TsOrDsAdd: date_delta_sql("DATEADD", cast=True), 951 exp.TsOrDsDiff: date_delta_sql("DATEDIFF"), 952 exp.TimestampTrunc: lambda self, e: self.func("DATETRUNC", e.unit, e.this), 953 exp.DateFromParts: rename_func("DATEFROMPARTS"), 954 } 955 956 TRANSFORMS.pop(exp.ReturnsProperty) 957 958 PROPERTIES_LOCATION = { 959 **generator.Generator.PROPERTIES_LOCATION, 960 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 961 } 962 963 def scope_resolution(self, rhs: str, scope_name: str) -> str: 964 return f"{scope_name}::{rhs}" 965 966 def select_sql(self, expression: exp.Select) -> str: 967 if expression.args.get("offset"): 968 if not expression.args.get("order"): 969 # ORDER BY is required in order to use OFFSET in a query, so we use 970 # a noop order by, since we don't really care about the order. 971 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 972 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 973 974 limit = expression.args.get("limit") 975 if isinstance(limit, exp.Limit): 976 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 977 # we replace here because otherwise TOP would be generated in select_sql 978 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 979 980 return super().select_sql(expression) 981 982 def convert_sql(self, expression: exp.Convert) -> str: 983 name = "TRY_CONVERT" if expression.args.get("safe") else "CONVERT" 984 return self.func( 985 name, expression.this, expression.expression, expression.args.get("style") 986 ) 987 988 def queryoption_sql(self, expression: exp.QueryOption) -> str: 989 option = self.sql(expression, "this") 990 value = self.sql(expression, "expression") 991 if value: 992 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 993 return f"{option} {optional_equal_sign}{value}" 994 return option 995 996 def lateral_op(self, expression: exp.Lateral) -> str: 997 cross_apply = expression.args.get("cross_apply") 998 if cross_apply is True: 999 return "CROSS APPLY" 1000 if cross_apply is False: 1001 return "OUTER APPLY" 1002 1003 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 1004 self.unsupported("LATERAL clause is not supported.") 1005 return "LATERAL" 1006 1007 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 1008 this = expression.this 1009 split_count = len(this.name.split(".")) 1010 delimiter = expression.args.get("delimiter") 1011 part_index = expression.args.get("part_index") 1012 1013 if ( 1014 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 1015 or (delimiter and delimiter.name != ".") 1016 or not part_index 1017 or split_count > 4 1018 ): 1019 self.unsupported( 1020 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 1021 ) 1022 return "" 1023 1024 return self.func( 1025 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 1026 ) 1027 1028 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1029 nano = expression.args.get("nano") 1030 if nano is not None: 1031 nano.pop() 1032 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1033 1034 if expression.args.get("fractions") is None: 1035 expression.set("fractions", exp.Literal.number(0)) 1036 if expression.args.get("precision") is None: 1037 expression.set("precision", exp.Literal.number(0)) 1038 1039 return rename_func("TIMEFROMPARTS")(self, expression) 1040 1041 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1042 zone = expression.args.get("zone") 1043 if zone is not None: 1044 zone.pop() 1045 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1046 1047 nano = expression.args.get("nano") 1048 if nano is not None: 1049 nano.pop() 1050 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1051 1052 if expression.args.get("milli") is None: 1053 expression.set("milli", exp.Literal.number(0)) 1054 1055 return rename_func("DATETIMEFROMPARTS")(self, expression) 1056 1057 def setitem_sql(self, expression: exp.SetItem) -> str: 1058 this = expression.this 1059 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1060 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1061 return f"{self.sql(this.left)} {self.sql(this.right)}" 1062 1063 return super().setitem_sql(expression) 1064 1065 def boolean_sql(self, expression: exp.Boolean) -> str: 1066 if type(expression.parent) in BIT_TYPES or isinstance( 1067 expression.find_ancestor(exp.Values, exp.Select), exp.Values 1068 ): 1069 return "1" if expression.this else "0" 1070 1071 return "(1 = 1)" if expression.this else "(1 = 0)" 1072 1073 def is_sql(self, expression: exp.Is) -> str: 1074 if isinstance(expression.expression, exp.Boolean): 1075 return self.binary(expression, "=") 1076 return self.binary(expression, "IS") 1077 1078 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1079 sql = self.sql(expression, "this") 1080 properties = expression.args.get("properties") 1081 1082 if sql[:1] != "#" and any( 1083 isinstance(prop, exp.TemporaryProperty) 1084 for prop in (properties.expressions if properties else []) 1085 ): 1086 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1087 1088 return sql 1089 1090 def create_sql(self, expression: exp.Create) -> str: 1091 kind = expression.kind 1092 exists = expression.args.pop("exists", None) 1093 1094 like_property = expression.find(exp.LikeProperty) 1095 if like_property: 1096 ctas_expression = like_property.this 1097 else: 1098 ctas_expression = expression.expression 1099 1100 if kind == "VIEW": 1101 expression.this.set("catalog", None) 1102 with_ = expression.args.get("with") 1103 if ctas_expression and with_: 1104 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1105 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1106 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1107 ctas_expression.set("with", with_.pop()) 1108 1109 sql = super().create_sql(expression) 1110 1111 table = expression.find(exp.Table) 1112 1113 # Convert CTAS statement to SELECT .. INTO .. 1114 if kind == "TABLE" and ctas_expression: 1115 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1116 ctas_expression = ctas_expression.subquery() 1117 1118 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1119 select_into.set("into", exp.Into(this=table)) 1120 1121 if like_property: 1122 select_into.limit(0, copy=False) 1123 1124 sql = self.sql(select_into) 1125 1126 if exists: 1127 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1128 sql_with_ctes = self.prepend_ctes(expression, sql) 1129 sql_literal = self.sql(exp.Literal.string(sql_with_ctes)) 1130 if kind == "SCHEMA": 1131 return f"""IF NOT EXISTS (SELECT * FROM information_schema.schemata WHERE schema_name = {identifier}) EXEC({sql_literal})""" 1132 elif kind == "TABLE": 1133 assert table 1134 where = exp.and_( 1135 exp.column("table_name").eq(table.name), 1136 exp.column("table_schema").eq(table.db) if table.db else None, 1137 exp.column("table_catalog").eq(table.catalog) if table.catalog else None, 1138 ) 1139 return f"""IF NOT EXISTS (SELECT * FROM information_schema.tables WHERE {where}) EXEC({sql_literal})""" 1140 elif kind == "INDEX": 1141 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1142 return f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql_literal})""" 1143 elif expression.args.get("replace"): 1144 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1145 1146 return self.prepend_ctes(expression, sql) 1147 1148 def count_sql(self, expression: exp.Count) -> str: 1149 func_name = "COUNT_BIG" if expression.args.get("big_int") else "COUNT" 1150 return rename_func(func_name)(self, expression) 1151 1152 def offset_sql(self, expression: exp.Offset) -> str: 1153 return f"{super().offset_sql(expression)} ROWS" 1154 1155 def version_sql(self, expression: exp.Version) -> str: 1156 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1157 this = f"FOR {name}" 1158 expr = expression.expression 1159 kind = expression.text("kind") 1160 if kind in ("FROM", "BETWEEN"): 1161 args = expr.expressions 1162 sep = "TO" if kind == "FROM" else "AND" 1163 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1164 else: 1165 expr_sql = self.sql(expr) 1166 1167 expr_sql = f" {expr_sql}" if expr_sql else "" 1168 return f"{this} {kind}{expr_sql}" 1169 1170 def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str: 1171 table = expression.args.get("table") 1172 table = f"{table} " if table else "" 1173 return f"RETURNS {table}{self.sql(expression, 'this')}" 1174 1175 def returning_sql(self, expression: exp.Returning) -> str: 1176 into = self.sql(expression, "into") 1177 into = self.seg(f"INTO {into}") if into else "" 1178 return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}" 1179 1180 def transaction_sql(self, expression: exp.Transaction) -> str: 1181 this = self.sql(expression, "this") 1182 this = f" {this}" if this else "" 1183 mark = self.sql(expression, "mark") 1184 mark = f" WITH MARK {mark}" if mark else "" 1185 return f"BEGIN TRANSACTION{this}{mark}" 1186 1187 def commit_sql(self, expression: exp.Commit) -> str: 1188 this = self.sql(expression, "this") 1189 this = f" {this}" if this else "" 1190 durability = expression.args.get("durability") 1191 durability = ( 1192 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1193 if durability is not None 1194 else "" 1195 ) 1196 return f"COMMIT TRANSACTION{this}{durability}" 1197 1198 def rollback_sql(self, expression: exp.Rollback) -> str: 1199 this = self.sql(expression, "this") 1200 this = f" {this}" if this else "" 1201 return f"ROLLBACK TRANSACTION{this}" 1202 1203 def identifier_sql(self, expression: exp.Identifier) -> str: 1204 identifier = super().identifier_sql(expression) 1205 1206 if expression.args.get("global"): 1207 identifier = f"##{identifier}" 1208 elif expression.args.get("temporary"): 1209 identifier = f"#{identifier}" 1210 1211 return identifier 1212 1213 def constraint_sql(self, expression: exp.Constraint) -> str: 1214 this = self.sql(expression, "this") 1215 expressions = self.expressions(expression, flat=True, sep=" ") 1216 return f"CONSTRAINT {this} {expressions}" 1217 1218 def length_sql(self, expression: exp.Length) -> str: 1219 return self._uncast_text(expression, "LEN") 1220 1221 def right_sql(self, expression: exp.Right) -> str: 1222 return self._uncast_text(expression, "RIGHT") 1223 1224 def left_sql(self, expression: exp.Left) -> str: 1225 return self._uncast_text(expression, "LEFT") 1226 1227 def _uncast_text(self, expression: exp.Expression, name: str) -> str: 1228 this = expression.this 1229 if isinstance(this, exp.Cast) and this.is_type(exp.DataType.Type.TEXT): 1230 this_sql = self.sql(this, "this") 1231 else: 1232 this_sql = self.sql(this) 1233 expression_sql = self.sql(expression, "expression") 1234 return self.func(name, this_sql, expression_sql if expression_sql else None) 1235 1236 def partition_sql(self, expression: exp.Partition) -> str: 1237 return f"WITH (PARTITIONS({self.expressions(expression, flat=True)}))" 1238 1239 def alter_sql(self, expression: exp.Alter) -> str: 1240 action = seq_get(expression.args.get("actions") or [], 0) 1241 if isinstance(action, exp.AlterRename): 1242 return f"EXEC sp_rename '{self.sql(expression.this)}', '{action.this.name}'" 1243 return super().alter_sql(expression) 1244 1245 def drop_sql(self, expression: exp.Drop) -> str: 1246 if expression.args["kind"] == "VIEW": 1247 expression.this.set("catalog", None) 1248 return super().drop_sql(expression) 1249 1250 def declare_sql(self, expression: exp.Declare) -> str: 1251 return f"DECLARE {self.expressions(expression, flat=True)}" 1252 1253 def declareitem_sql(self, expression: exp.DeclareItem) -> str: 1254 variable = self.sql(expression, "this") 1255 default = self.sql(expression, "default") 1256 default = f" = {default}" if default else "" 1257 1258 kind = self.sql(expression, "kind") 1259 if isinstance(expression.args.get("kind"), exp.Schema): 1260 kind = f"TABLE {kind}" 1261 1262 return f"{variable} AS {kind}{default}" 1263 1264 def options_modifier(self, expression: exp.Expression) -> str: 1265 options = self.expressions(expression, key="options") 1266 return f" OPTION{self.wrap(options)}" if options else "" 1267 1268 def dpipe_sql(self, expression: exp.DPipe) -> str: 1269 return self.sql( 1270 reduce(lambda x, y: exp.Add(this=x, expression=y), expression.flatten()) 1271 ) 1272 1273 def isascii_sql(self, expression: exp.IsAscii) -> str: 1274 return f"(PATINDEX(CONVERT(VARCHAR(MAX), 0x255b5e002d7f5d25) COLLATE Latin1_General_BIN, {self.sql(expression.this)}) = 0)"
252def qualify_derived_table_outputs(expression: exp.Expression) -> exp.Expression: 253 """Ensures all (unnamed) output columns are aliased for CTEs and Subqueries.""" 254 alias = expression.args.get("alias") 255 256 if ( 257 isinstance(expression, (exp.CTE, exp.Subquery)) 258 and isinstance(alias, exp.TableAlias) 259 and not alias.columns 260 ): 261 from sqlglot.optimizer.qualify_columns import qualify_outputs 262 263 # We keep track of the unaliased column projection indexes instead of the expressions 264 # themselves, because the latter are going to be replaced by new nodes when the aliases 265 # are added and hence we won't be able to reach these newly added Alias parents 266 query = expression.this 267 unaliased_column_indexes = ( 268 i for i, c in enumerate(query.selects) if isinstance(c, exp.Column) and not c.alias 269 ) 270 271 qualify_outputs(query) 272 273 # Preserve the quoting information of columns for newly added Alias nodes 274 query_selects = query.selects 275 for select_index in unaliased_column_indexes: 276 alias = query_selects[select_index] 277 column = alias.this 278 if isinstance(column.this, exp.Identifier): 279 alias.args["alias"].set("quoted", column.this.quoted) 280 281 return expression
Ensures all (unnamed) output columns are aliased for CTEs and Subqueries.
384class TSQL(Dialect): 385 SUPPORTS_SEMI_ANTI_JOIN = False 386 LOG_BASE_FIRST = False 387 TYPED_DIVISION = True 388 CONCAT_COALESCE = True 389 NORMALIZATION_STRATEGY = NormalizationStrategy.CASE_INSENSITIVE 390 391 TIME_FORMAT = "'yyyy-mm-dd hh:mm:ss'" 392 393 TIME_MAPPING = { 394 "year": "%Y", 395 "dayofyear": "%j", 396 "day": "%d", 397 "dy": "%d", 398 "y": "%Y", 399 "week": "%W", 400 "ww": "%W", 401 "wk": "%W", 402 "hour": "%h", 403 "hh": "%I", 404 "minute": "%M", 405 "mi": "%M", 406 "n": "%M", 407 "second": "%S", 408 "ss": "%S", 409 "s": "%-S", 410 "millisecond": "%f", 411 "ms": "%f", 412 "weekday": "%w", 413 "dw": "%w", 414 "month": "%m", 415 "mm": "%M", 416 "m": "%-M", 417 "Y": "%Y", 418 "YYYY": "%Y", 419 "YY": "%y", 420 "MMMM": "%B", 421 "MMM": "%b", 422 "MM": "%m", 423 "M": "%-m", 424 "dddd": "%A", 425 "dd": "%d", 426 "d": "%-d", 427 "HH": "%H", 428 "H": "%-H", 429 "h": "%-I", 430 "ffffff": "%f", 431 "yyyy": "%Y", 432 "yy": "%y", 433 } 434 435 CONVERT_FORMAT_MAPPING = { 436 "0": "%b %d %Y %-I:%M%p", 437 "1": "%m/%d/%y", 438 "2": "%y.%m.%d", 439 "3": "%d/%m/%y", 440 "4": "%d.%m.%y", 441 "5": "%d-%m-%y", 442 "6": "%d %b %y", 443 "7": "%b %d, %y", 444 "8": "%H:%M:%S", 445 "9": "%b %d %Y %-I:%M:%S:%f%p", 446 "10": "mm-dd-yy", 447 "11": "yy/mm/dd", 448 "12": "yymmdd", 449 "13": "%d %b %Y %H:%M:ss:%f", 450 "14": "%H:%M:%S:%f", 451 "20": "%Y-%m-%d %H:%M:%S", 452 "21": "%Y-%m-%d %H:%M:%S.%f", 453 "22": "%m/%d/%y %-I:%M:%S %p", 454 "23": "%Y-%m-%d", 455 "24": "%H:%M:%S", 456 "25": "%Y-%m-%d %H:%M:%S.%f", 457 "100": "%b %d %Y %-I:%M%p", 458 "101": "%m/%d/%Y", 459 "102": "%Y.%m.%d", 460 "103": "%d/%m/%Y", 461 "104": "%d.%m.%Y", 462 "105": "%d-%m-%Y", 463 "106": "%d %b %Y", 464 "107": "%b %d, %Y", 465 "108": "%H:%M:%S", 466 "109": "%b %d %Y %-I:%M:%S:%f%p", 467 "110": "%m-%d-%Y", 468 "111": "%Y/%m/%d", 469 "112": "%Y%m%d", 470 "113": "%d %b %Y %H:%M:%S:%f", 471 "114": "%H:%M:%S:%f", 472 "120": "%Y-%m-%d %H:%M:%S", 473 "121": "%Y-%m-%d %H:%M:%S.%f", 474 } 475 476 FORMAT_TIME_MAPPING = { 477 "y": "%B %Y", 478 "d": "%m/%d/%Y", 479 "H": "%-H", 480 "h": "%-I", 481 "s": "%Y-%m-%d %H:%M:%S", 482 "D": "%A,%B,%Y", 483 "f": "%A,%B,%Y %-I:%M %p", 484 "F": "%A,%B,%Y %-I:%M:%S %p", 485 "g": "%m/%d/%Y %-I:%M %p", 486 "G": "%m/%d/%Y %-I:%M:%S %p", 487 "M": "%B %-d", 488 "m": "%B %-d", 489 "O": "%Y-%m-%dT%H:%M:%S", 490 "u": "%Y-%M-%D %H:%M:%S%z", 491 "U": "%A, %B %D, %Y %H:%M:%S%z", 492 "T": "%-I:%M:%S %p", 493 "t": "%-I:%M", 494 "Y": "%a %Y", 495 } 496 497 class Tokenizer(tokens.Tokenizer): 498 IDENTIFIERS = [("[", "]"), '"'] 499 QUOTES = ["'", '"'] 500 HEX_STRINGS = [("0x", ""), ("0X", "")] 501 VAR_SINGLE_TOKENS = {"@", "$", "#"} 502 503 KEYWORDS = { 504 **tokens.Tokenizer.KEYWORDS, 505 "CLUSTERED INDEX": TokenType.INDEX, 506 "DATETIME2": TokenType.DATETIME2, 507 "DATETIMEOFFSET": TokenType.TIMESTAMPTZ, 508 "DECLARE": TokenType.DECLARE, 509 "EXEC": TokenType.COMMAND, 510 "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT, 511 "IMAGE": TokenType.IMAGE, 512 "MONEY": TokenType.MONEY, 513 "NONCLUSTERED INDEX": TokenType.INDEX, 514 "NTEXT": TokenType.TEXT, 515 "OPTION": TokenType.OPTION, 516 "OUTPUT": TokenType.RETURNING, 517 "PRINT": TokenType.COMMAND, 518 "PROC": TokenType.PROCEDURE, 519 "REAL": TokenType.FLOAT, 520 "ROWVERSION": TokenType.ROWVERSION, 521 "SMALLDATETIME": TokenType.SMALLDATETIME, 522 "SMALLMONEY": TokenType.SMALLMONEY, 523 "SQL_VARIANT": TokenType.VARIANT, 524 "SYSTEM_USER": TokenType.CURRENT_USER, 525 "TOP": TokenType.TOP, 526 "TIMESTAMP": TokenType.ROWVERSION, 527 "TINYINT": TokenType.UTINYINT, 528 "UNIQUEIDENTIFIER": TokenType.UNIQUEIDENTIFIER, 529 "UPDATE STATISTICS": TokenType.COMMAND, 530 "XML": TokenType.XML, 531 } 532 KEYWORDS.pop("/*+") 533 534 COMMANDS = {*tokens.Tokenizer.COMMANDS, TokenType.END} 535 536 class Parser(parser.Parser): 537 SET_REQUIRES_ASSIGNMENT_DELIMITER = False 538 LOG_DEFAULTS_TO_LN = True 539 ALTER_TABLE_ADD_REQUIRED_FOR_EACH_COLUMN = False 540 STRING_ALIASES = True 541 NO_PAREN_IF_COMMANDS = False 542 543 QUERY_MODIFIER_PARSERS = { 544 **parser.Parser.QUERY_MODIFIER_PARSERS, 545 TokenType.OPTION: lambda self: ("options", self._parse_options()), 546 } 547 548 FUNCTIONS = { 549 **parser.Parser.FUNCTIONS, 550 "CHARINDEX": lambda args: exp.StrPosition( 551 this=seq_get(args, 1), 552 substr=seq_get(args, 0), 553 position=seq_get(args, 2), 554 ), 555 "COUNT": lambda args: exp.Count( 556 this=seq_get(args, 0), expressions=args[1:], big_int=False 557 ), 558 "COUNT_BIG": lambda args: exp.Count( 559 this=seq_get(args, 0), expressions=args[1:], big_int=True 560 ), 561 "DATEADD": build_date_delta(exp.DateAdd, unit_mapping=DATE_DELTA_INTERVAL), 562 "DATEDIFF": _build_date_delta(exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL), 563 "DATENAME": _build_formatted_time(exp.TimeToStr, full_format_mapping=True), 564 "DATEPART": _build_formatted_time(exp.TimeToStr), 565 "DATETIMEFROMPARTS": _build_datetimefromparts, 566 "EOMONTH": _build_eomonth, 567 "FORMAT": _build_format, 568 "GETDATE": exp.CurrentTimestamp.from_arg_list, 569 "HASHBYTES": _build_hashbytes, 570 "ISNULL": build_coalesce, 571 "JSON_QUERY": _build_json_query, 572 "JSON_VALUE": parser.build_extract_json_with_path(exp.JSONExtractScalar), 573 "LEN": _build_with_arg_as_text(exp.Length), 574 "LEFT": _build_with_arg_as_text(exp.Left), 575 "RIGHT": _build_with_arg_as_text(exp.Right), 576 "PARSENAME": _build_parsename, 577 "REPLICATE": exp.Repeat.from_arg_list, 578 "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)), 579 "SYSDATETIME": exp.CurrentTimestamp.from_arg_list, 580 "SUSER_NAME": exp.CurrentUser.from_arg_list, 581 "SUSER_SNAME": exp.CurrentUser.from_arg_list, 582 "SYSTEM_USER": exp.CurrentUser.from_arg_list, 583 "TIMEFROMPARTS": _build_timefromparts, 584 "DATETRUNC": _build_datetrunc, 585 } 586 587 JOIN_HINTS = {"LOOP", "HASH", "MERGE", "REMOTE"} 588 589 PROCEDURE_OPTIONS = dict.fromkeys( 590 ("ENCRYPTION", "RECOMPILE", "SCHEMABINDING", "NATIVE_COMPILATION", "EXECUTE"), tuple() 591 ) 592 593 RETURNS_TABLE_TOKENS = parser.Parser.ID_VAR_TOKENS - { 594 TokenType.TABLE, 595 *parser.Parser.TYPE_TOKENS, 596 } 597 598 STATEMENT_PARSERS = { 599 **parser.Parser.STATEMENT_PARSERS, 600 TokenType.DECLARE: lambda self: self._parse_declare(), 601 } 602 603 RANGE_PARSERS = { 604 **parser.Parser.RANGE_PARSERS, 605 TokenType.DCOLON: lambda self, this: self.expression( 606 exp.ScopeResolution, 607 this=this, 608 expression=self._parse_function() or self._parse_var(any_token=True), 609 ), 610 } 611 612 NO_PAREN_FUNCTION_PARSERS = { 613 **parser.Parser.NO_PAREN_FUNCTION_PARSERS, 614 "NEXT": lambda self: self._parse_next_value_for(), 615 } 616 617 # The DCOLON (::) operator serves as a scope resolution (exp.ScopeResolution) operator in T-SQL 618 COLUMN_OPERATORS = { 619 **parser.Parser.COLUMN_OPERATORS, 620 TokenType.DCOLON: lambda self, this, to: self.expression(exp.Cast, this=this, to=to) 621 if isinstance(to, exp.DataType) and to.this != exp.DataType.Type.USERDEFINED 622 else self.expression(exp.ScopeResolution, this=this, expression=to), 623 } 624 625 def _parse_dcolon(self) -> t.Optional[exp.Expression]: 626 # We want to use _parse_types() if the first token after :: is a known type, 627 # otherwise we could parse something like x::varchar(max) into a function 628 if self._match_set(self.TYPE_TOKENS, advance=False): 629 return self._parse_types() 630 631 return self._parse_function() or self._parse_types() 632 633 def _parse_options(self) -> t.Optional[t.List[exp.Expression]]: 634 if not self._match(TokenType.OPTION): 635 return None 636 637 def _parse_option() -> t.Optional[exp.Expression]: 638 option = self._parse_var_from_options(OPTIONS) 639 if not option: 640 return None 641 642 self._match(TokenType.EQ) 643 return self.expression( 644 exp.QueryOption, this=option, expression=self._parse_primary_or_var() 645 ) 646 647 return self._parse_wrapped_csv(_parse_option) 648 649 def _parse_projections(self) -> t.List[exp.Expression]: 650 """ 651 T-SQL supports the syntax alias = expression in the SELECT's projection list, 652 so we transform all parsed Selects to convert their EQ projections into Aliases. 653 654 See: https://learn.microsoft.com/en-us/sql/t-sql/queries/select-clause-transact-sql?view=sql-server-ver16#syntax 655 """ 656 return [ 657 ( 658 exp.alias_(projection.expression, projection.this.this, copy=False) 659 if isinstance(projection, exp.EQ) and isinstance(projection.this, exp.Column) 660 else projection 661 ) 662 for projection in super()._parse_projections() 663 ] 664 665 def _parse_commit_or_rollback(self) -> exp.Commit | exp.Rollback: 666 """Applies to SQL Server and Azure SQL Database 667 COMMIT [ { TRAN | TRANSACTION } 668 [ transaction_name | @tran_name_variable ] ] 669 [ WITH ( DELAYED_DURABILITY = { OFF | ON } ) ] 670 671 ROLLBACK { TRAN | TRANSACTION } 672 [ transaction_name | @tran_name_variable 673 | savepoint_name | @savepoint_variable ] 674 """ 675 rollback = self._prev.token_type == TokenType.ROLLBACK 676 677 self._match_texts(("TRAN", "TRANSACTION")) 678 this = self._parse_id_var() 679 680 if rollback: 681 return self.expression(exp.Rollback, this=this) 682 683 durability = None 684 if self._match_pair(TokenType.WITH, TokenType.L_PAREN): 685 self._match_text_seq("DELAYED_DURABILITY") 686 self._match(TokenType.EQ) 687 688 if self._match_text_seq("OFF"): 689 durability = False 690 else: 691 self._match(TokenType.ON) 692 durability = True 693 694 self._match_r_paren() 695 696 return self.expression(exp.Commit, this=this, durability=durability) 697 698 def _parse_transaction(self) -> exp.Transaction | exp.Command: 699 """Applies to SQL Server and Azure SQL Database 700 BEGIN { TRAN | TRANSACTION } 701 [ { transaction_name | @tran_name_variable } 702 [ WITH MARK [ 'description' ] ] 703 ] 704 """ 705 if self._match_texts(("TRAN", "TRANSACTION")): 706 transaction = self.expression(exp.Transaction, this=self._parse_id_var()) 707 if self._match_text_seq("WITH", "MARK"): 708 transaction.set("mark", self._parse_string()) 709 710 return transaction 711 712 return self._parse_as_command(self._prev) 713 714 def _parse_returns(self) -> exp.ReturnsProperty: 715 table = self._parse_id_var(any_token=False, tokens=self.RETURNS_TABLE_TOKENS) 716 returns = super()._parse_returns() 717 returns.set("table", table) 718 return returns 719 720 def _parse_convert( 721 self, strict: bool, safe: t.Optional[bool] = None 722 ) -> t.Optional[exp.Expression]: 723 this = self._parse_types() 724 self._match(TokenType.COMMA) 725 args = [this, *self._parse_csv(self._parse_assignment)] 726 convert = exp.Convert.from_arg_list(args) 727 convert.set("safe", safe) 728 convert.set("strict", strict) 729 return convert 730 731 def _parse_user_defined_function( 732 self, kind: t.Optional[TokenType] = None 733 ) -> t.Optional[exp.Expression]: 734 this = super()._parse_user_defined_function(kind=kind) 735 736 if ( 737 kind == TokenType.FUNCTION 738 or isinstance(this, exp.UserDefinedFunction) 739 or self._match(TokenType.ALIAS, advance=False) 740 ): 741 return this 742 743 if not self._match(TokenType.WITH, advance=False): 744 expressions = self._parse_csv(self._parse_function_parameter) 745 else: 746 expressions = None 747 748 return self.expression(exp.UserDefinedFunction, this=this, expressions=expressions) 749 750 def _parse_id_var( 751 self, 752 any_token: bool = True, 753 tokens: t.Optional[t.Collection[TokenType]] = None, 754 ) -> t.Optional[exp.Expression]: 755 is_temporary = self._match(TokenType.HASH) 756 is_global = is_temporary and self._match(TokenType.HASH) 757 758 this = super()._parse_id_var(any_token=any_token, tokens=tokens) 759 if this: 760 if is_global: 761 this.set("global", True) 762 elif is_temporary: 763 this.set("temporary", True) 764 765 return this 766 767 def _parse_create(self) -> exp.Create | exp.Command: 768 create = super()._parse_create() 769 770 if isinstance(create, exp.Create): 771 table = create.this.this if isinstance(create.this, exp.Schema) else create.this 772 if isinstance(table, exp.Table) and table.this.args.get("temporary"): 773 if not create.args.get("properties"): 774 create.set("properties", exp.Properties(expressions=[])) 775 776 create.args["properties"].append("expressions", exp.TemporaryProperty()) 777 778 return create 779 780 def _parse_if(self) -> t.Optional[exp.Expression]: 781 index = self._index 782 783 if self._match_text_seq("OBJECT_ID"): 784 self._parse_wrapped_csv(self._parse_string) 785 if self._match_text_seq("IS", "NOT", "NULL") and self._match(TokenType.DROP): 786 return self._parse_drop(exists=True) 787 self._retreat(index) 788 789 return super()._parse_if() 790 791 def _parse_unique(self) -> exp.UniqueColumnConstraint: 792 if self._match_texts(("CLUSTERED", "NONCLUSTERED")): 793 this = self.CONSTRAINT_PARSERS[self._prev.text.upper()](self) 794 else: 795 this = self._parse_schema(self._parse_id_var(any_token=False)) 796 797 return self.expression(exp.UniqueColumnConstraint, this=this) 798 799 def _parse_partition(self) -> t.Optional[exp.Partition]: 800 if not self._match_text_seq("WITH", "(", "PARTITIONS"): 801 return None 802 803 def parse_range(): 804 low = self._parse_bitwise() 805 high = self._parse_bitwise() if self._match_text_seq("TO") else None 806 807 return ( 808 self.expression(exp.PartitionRange, this=low, expression=high) if high else low 809 ) 810 811 partition = self.expression( 812 exp.Partition, expressions=self._parse_wrapped_csv(parse_range) 813 ) 814 815 self._match_r_paren() 816 817 return partition 818 819 def _parse_declare(self) -> exp.Declare | exp.Command: 820 index = self._index 821 expressions = self._try_parse(partial(self._parse_csv, self._parse_declareitem)) 822 823 if not expressions or self._curr: 824 self._retreat(index) 825 return self._parse_as_command(self._prev) 826 827 return self.expression(exp.Declare, expressions=expressions) 828 829 def _parse_declareitem(self) -> t.Optional[exp.DeclareItem]: 830 var = self._parse_id_var() 831 if not var: 832 return None 833 834 value = None 835 self._match(TokenType.ALIAS) 836 if self._match(TokenType.TABLE): 837 data_type = self._parse_schema() 838 else: 839 data_type = self._parse_types() 840 if self._match(TokenType.EQ): 841 value = self._parse_bitwise() 842 843 return self.expression(exp.DeclareItem, this=var, kind=data_type, default=value) 844 845 class Generator(generator.Generator): 846 LIMIT_IS_TOP = True 847 QUERY_HINTS = False 848 RETURNING_END = False 849 NVL2_SUPPORTED = False 850 ALTER_TABLE_INCLUDE_COLUMN_KEYWORD = False 851 LIMIT_FETCH = "FETCH" 852 COMPUTED_COLUMN_WITH_TYPE = False 853 CTE_RECURSIVE_KEYWORD_REQUIRED = False 854 ENSURE_BOOLS = True 855 NULL_ORDERING_SUPPORTED = None 856 SUPPORTS_SINGLE_ARG_CONCAT = False 857 TABLESAMPLE_SEED_KEYWORD = "REPEATABLE" 858 SUPPORTS_SELECT_INTO = True 859 JSON_PATH_BRACKETED_KEY_SUPPORTED = False 860 SUPPORTS_TO_NUMBER = False 861 SET_OP_MODIFIERS = False 862 COPY_PARAMS_EQ_REQUIRED = True 863 PARSE_JSON_NAME = None 864 EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE = False 865 866 EXPRESSIONS_WITHOUT_NESTED_CTES = { 867 exp.Create, 868 exp.Delete, 869 exp.Insert, 870 exp.Intersect, 871 exp.Except, 872 exp.Merge, 873 exp.Select, 874 exp.Subquery, 875 exp.Union, 876 exp.Update, 877 } 878 879 SUPPORTED_JSON_PATH_PARTS = { 880 exp.JSONPathKey, 881 exp.JSONPathRoot, 882 exp.JSONPathSubscript, 883 } 884 885 TYPE_MAPPING = { 886 **generator.Generator.TYPE_MAPPING, 887 exp.DataType.Type.BOOLEAN: "BIT", 888 exp.DataType.Type.DATETIME2: "DATETIME2", 889 exp.DataType.Type.DECIMAL: "NUMERIC", 890 exp.DataType.Type.DOUBLE: "FLOAT", 891 exp.DataType.Type.INT: "INTEGER", 892 exp.DataType.Type.ROWVERSION: "ROWVERSION", 893 exp.DataType.Type.TEXT: "VARCHAR(MAX)", 894 exp.DataType.Type.TIMESTAMP: "DATETIME2", 895 exp.DataType.Type.TIMESTAMPNTZ: "DATETIME2", 896 exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET", 897 exp.DataType.Type.SMALLDATETIME: "SMALLDATETIME", 898 exp.DataType.Type.UTINYINT: "TINYINT", 899 exp.DataType.Type.VARIANT: "SQL_VARIANT", 900 } 901 902 TYPE_MAPPING.pop(exp.DataType.Type.NCHAR) 903 TYPE_MAPPING.pop(exp.DataType.Type.NVARCHAR) 904 905 TRANSFORMS = { 906 **generator.Generator.TRANSFORMS, 907 exp.AnyValue: any_value_to_max_sql, 908 exp.ArrayToString: rename_func("STRING_AGG"), 909 exp.AutoIncrementColumnConstraint: lambda *_: "IDENTITY", 910 exp.Chr: rename_func("CHAR"), 911 exp.DateAdd: date_delta_sql("DATEADD"), 912 exp.DateDiff: date_delta_sql("DATEDIFF"), 913 exp.CTE: transforms.preprocess([qualify_derived_table_outputs]), 914 exp.CurrentDate: rename_func("GETDATE"), 915 exp.CurrentTimestamp: rename_func("GETDATE"), 916 exp.DateStrToDate: datestrtodate_sql, 917 exp.Extract: rename_func("DATEPART"), 918 exp.GeneratedAsIdentityColumnConstraint: generatedasidentitycolumnconstraint_sql, 919 exp.GroupConcat: _string_agg_sql, 920 exp.If: rename_func("IIF"), 921 exp.JSONExtract: _json_extract_sql, 922 exp.JSONExtractScalar: _json_extract_sql, 923 exp.LastDay: lambda self, e: self.func("EOMONTH", e.this), 924 exp.Ln: rename_func("LOG"), 925 exp.Max: max_or_greatest, 926 exp.MD5: lambda self, e: self.func("HASHBYTES", exp.Literal.string("MD5"), e.this), 927 exp.Min: min_or_least, 928 exp.NumberToStr: _format_sql, 929 exp.Repeat: rename_func("REPLICATE"), 930 exp.Select: transforms.preprocess( 931 [ 932 transforms.eliminate_distinct_on, 933 transforms.eliminate_semi_and_anti_joins, 934 transforms.eliminate_qualify, 935 transforms.unnest_generate_date_array_using_recursive_cte, 936 ] 937 ), 938 exp.Stddev: rename_func("STDEV"), 939 exp.StrPosition: lambda self, e: self.func( 940 "CHARINDEX", e.args.get("substr"), e.this, e.args.get("position") 941 ), 942 exp.Subquery: transforms.preprocess([qualify_derived_table_outputs]), 943 exp.SHA: lambda self, e: self.func("HASHBYTES", exp.Literal.string("SHA1"), e.this), 944 exp.SHA2: lambda self, e: self.func( 945 "HASHBYTES", exp.Literal.string(f"SHA2_{e.args.get('length', 256)}"), e.this 946 ), 947 exp.TemporaryProperty: lambda self, e: "", 948 exp.TimeStrToTime: _timestrtotime_sql, 949 exp.TimeToStr: _format_sql, 950 exp.Trim: trim_sql, 951 exp.TsOrDsAdd: date_delta_sql("DATEADD", cast=True), 952 exp.TsOrDsDiff: date_delta_sql("DATEDIFF"), 953 exp.TimestampTrunc: lambda self, e: self.func("DATETRUNC", e.unit, e.this), 954 exp.DateFromParts: rename_func("DATEFROMPARTS"), 955 } 956 957 TRANSFORMS.pop(exp.ReturnsProperty) 958 959 PROPERTIES_LOCATION = { 960 **generator.Generator.PROPERTIES_LOCATION, 961 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 962 } 963 964 def scope_resolution(self, rhs: str, scope_name: str) -> str: 965 return f"{scope_name}::{rhs}" 966 967 def select_sql(self, expression: exp.Select) -> str: 968 if expression.args.get("offset"): 969 if not expression.args.get("order"): 970 # ORDER BY is required in order to use OFFSET in a query, so we use 971 # a noop order by, since we don't really care about the order. 972 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 973 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 974 975 limit = expression.args.get("limit") 976 if isinstance(limit, exp.Limit): 977 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 978 # we replace here because otherwise TOP would be generated in select_sql 979 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 980 981 return super().select_sql(expression) 982 983 def convert_sql(self, expression: exp.Convert) -> str: 984 name = "TRY_CONVERT" if expression.args.get("safe") else "CONVERT" 985 return self.func( 986 name, expression.this, expression.expression, expression.args.get("style") 987 ) 988 989 def queryoption_sql(self, expression: exp.QueryOption) -> str: 990 option = self.sql(expression, "this") 991 value = self.sql(expression, "expression") 992 if value: 993 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 994 return f"{option} {optional_equal_sign}{value}" 995 return option 996 997 def lateral_op(self, expression: exp.Lateral) -> str: 998 cross_apply = expression.args.get("cross_apply") 999 if cross_apply is True: 1000 return "CROSS APPLY" 1001 if cross_apply is False: 1002 return "OUTER APPLY" 1003 1004 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 1005 self.unsupported("LATERAL clause is not supported.") 1006 return "LATERAL" 1007 1008 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 1009 this = expression.this 1010 split_count = len(this.name.split(".")) 1011 delimiter = expression.args.get("delimiter") 1012 part_index = expression.args.get("part_index") 1013 1014 if ( 1015 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 1016 or (delimiter and delimiter.name != ".") 1017 or not part_index 1018 or split_count > 4 1019 ): 1020 self.unsupported( 1021 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 1022 ) 1023 return "" 1024 1025 return self.func( 1026 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 1027 ) 1028 1029 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1030 nano = expression.args.get("nano") 1031 if nano is not None: 1032 nano.pop() 1033 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1034 1035 if expression.args.get("fractions") is None: 1036 expression.set("fractions", exp.Literal.number(0)) 1037 if expression.args.get("precision") is None: 1038 expression.set("precision", exp.Literal.number(0)) 1039 1040 return rename_func("TIMEFROMPARTS")(self, expression) 1041 1042 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1043 zone = expression.args.get("zone") 1044 if zone is not None: 1045 zone.pop() 1046 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1047 1048 nano = expression.args.get("nano") 1049 if nano is not None: 1050 nano.pop() 1051 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1052 1053 if expression.args.get("milli") is None: 1054 expression.set("milli", exp.Literal.number(0)) 1055 1056 return rename_func("DATETIMEFROMPARTS")(self, expression) 1057 1058 def setitem_sql(self, expression: exp.SetItem) -> str: 1059 this = expression.this 1060 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1061 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1062 return f"{self.sql(this.left)} {self.sql(this.right)}" 1063 1064 return super().setitem_sql(expression) 1065 1066 def boolean_sql(self, expression: exp.Boolean) -> str: 1067 if type(expression.parent) in BIT_TYPES or isinstance( 1068 expression.find_ancestor(exp.Values, exp.Select), exp.Values 1069 ): 1070 return "1" if expression.this else "0" 1071 1072 return "(1 = 1)" if expression.this else "(1 = 0)" 1073 1074 def is_sql(self, expression: exp.Is) -> str: 1075 if isinstance(expression.expression, exp.Boolean): 1076 return self.binary(expression, "=") 1077 return self.binary(expression, "IS") 1078 1079 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1080 sql = self.sql(expression, "this") 1081 properties = expression.args.get("properties") 1082 1083 if sql[:1] != "#" and any( 1084 isinstance(prop, exp.TemporaryProperty) 1085 for prop in (properties.expressions if properties else []) 1086 ): 1087 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1088 1089 return sql 1090 1091 def create_sql(self, expression: exp.Create) -> str: 1092 kind = expression.kind 1093 exists = expression.args.pop("exists", None) 1094 1095 like_property = expression.find(exp.LikeProperty) 1096 if like_property: 1097 ctas_expression = like_property.this 1098 else: 1099 ctas_expression = expression.expression 1100 1101 if kind == "VIEW": 1102 expression.this.set("catalog", None) 1103 with_ = expression.args.get("with") 1104 if ctas_expression and with_: 1105 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1106 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1107 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1108 ctas_expression.set("with", with_.pop()) 1109 1110 sql = super().create_sql(expression) 1111 1112 table = expression.find(exp.Table) 1113 1114 # Convert CTAS statement to SELECT .. INTO .. 1115 if kind == "TABLE" and ctas_expression: 1116 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1117 ctas_expression = ctas_expression.subquery() 1118 1119 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1120 select_into.set("into", exp.Into(this=table)) 1121 1122 if like_property: 1123 select_into.limit(0, copy=False) 1124 1125 sql = self.sql(select_into) 1126 1127 if exists: 1128 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1129 sql_with_ctes = self.prepend_ctes(expression, sql) 1130 sql_literal = self.sql(exp.Literal.string(sql_with_ctes)) 1131 if kind == "SCHEMA": 1132 return f"""IF NOT EXISTS (SELECT * FROM information_schema.schemata WHERE schema_name = {identifier}) EXEC({sql_literal})""" 1133 elif kind == "TABLE": 1134 assert table 1135 where = exp.and_( 1136 exp.column("table_name").eq(table.name), 1137 exp.column("table_schema").eq(table.db) if table.db else None, 1138 exp.column("table_catalog").eq(table.catalog) if table.catalog else None, 1139 ) 1140 return f"""IF NOT EXISTS (SELECT * FROM information_schema.tables WHERE {where}) EXEC({sql_literal})""" 1141 elif kind == "INDEX": 1142 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1143 return f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql_literal})""" 1144 elif expression.args.get("replace"): 1145 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1146 1147 return self.prepend_ctes(expression, sql) 1148 1149 def count_sql(self, expression: exp.Count) -> str: 1150 func_name = "COUNT_BIG" if expression.args.get("big_int") else "COUNT" 1151 return rename_func(func_name)(self, expression) 1152 1153 def offset_sql(self, expression: exp.Offset) -> str: 1154 return f"{super().offset_sql(expression)} ROWS" 1155 1156 def version_sql(self, expression: exp.Version) -> str: 1157 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1158 this = f"FOR {name}" 1159 expr = expression.expression 1160 kind = expression.text("kind") 1161 if kind in ("FROM", "BETWEEN"): 1162 args = expr.expressions 1163 sep = "TO" if kind == "FROM" else "AND" 1164 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1165 else: 1166 expr_sql = self.sql(expr) 1167 1168 expr_sql = f" {expr_sql}" if expr_sql else "" 1169 return f"{this} {kind}{expr_sql}" 1170 1171 def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str: 1172 table = expression.args.get("table") 1173 table = f"{table} " if table else "" 1174 return f"RETURNS {table}{self.sql(expression, 'this')}" 1175 1176 def returning_sql(self, expression: exp.Returning) -> str: 1177 into = self.sql(expression, "into") 1178 into = self.seg(f"INTO {into}") if into else "" 1179 return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}" 1180 1181 def transaction_sql(self, expression: exp.Transaction) -> str: 1182 this = self.sql(expression, "this") 1183 this = f" {this}" if this else "" 1184 mark = self.sql(expression, "mark") 1185 mark = f" WITH MARK {mark}" if mark else "" 1186 return f"BEGIN TRANSACTION{this}{mark}" 1187 1188 def commit_sql(self, expression: exp.Commit) -> str: 1189 this = self.sql(expression, "this") 1190 this = f" {this}" if this else "" 1191 durability = expression.args.get("durability") 1192 durability = ( 1193 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1194 if durability is not None 1195 else "" 1196 ) 1197 return f"COMMIT TRANSACTION{this}{durability}" 1198 1199 def rollback_sql(self, expression: exp.Rollback) -> str: 1200 this = self.sql(expression, "this") 1201 this = f" {this}" if this else "" 1202 return f"ROLLBACK TRANSACTION{this}" 1203 1204 def identifier_sql(self, expression: exp.Identifier) -> str: 1205 identifier = super().identifier_sql(expression) 1206 1207 if expression.args.get("global"): 1208 identifier = f"##{identifier}" 1209 elif expression.args.get("temporary"): 1210 identifier = f"#{identifier}" 1211 1212 return identifier 1213 1214 def constraint_sql(self, expression: exp.Constraint) -> str: 1215 this = self.sql(expression, "this") 1216 expressions = self.expressions(expression, flat=True, sep=" ") 1217 return f"CONSTRAINT {this} {expressions}" 1218 1219 def length_sql(self, expression: exp.Length) -> str: 1220 return self._uncast_text(expression, "LEN") 1221 1222 def right_sql(self, expression: exp.Right) -> str: 1223 return self._uncast_text(expression, "RIGHT") 1224 1225 def left_sql(self, expression: exp.Left) -> str: 1226 return self._uncast_text(expression, "LEFT") 1227 1228 def _uncast_text(self, expression: exp.Expression, name: str) -> str: 1229 this = expression.this 1230 if isinstance(this, exp.Cast) and this.is_type(exp.DataType.Type.TEXT): 1231 this_sql = self.sql(this, "this") 1232 else: 1233 this_sql = self.sql(this) 1234 expression_sql = self.sql(expression, "expression") 1235 return self.func(name, this_sql, expression_sql if expression_sql else None) 1236 1237 def partition_sql(self, expression: exp.Partition) -> str: 1238 return f"WITH (PARTITIONS({self.expressions(expression, flat=True)}))" 1239 1240 def alter_sql(self, expression: exp.Alter) -> str: 1241 action = seq_get(expression.args.get("actions") or [], 0) 1242 if isinstance(action, exp.AlterRename): 1243 return f"EXEC sp_rename '{self.sql(expression.this)}', '{action.this.name}'" 1244 return super().alter_sql(expression) 1245 1246 def drop_sql(self, expression: exp.Drop) -> str: 1247 if expression.args["kind"] == "VIEW": 1248 expression.this.set("catalog", None) 1249 return super().drop_sql(expression) 1250 1251 def declare_sql(self, expression: exp.Declare) -> str: 1252 return f"DECLARE {self.expressions(expression, flat=True)}" 1253 1254 def declareitem_sql(self, expression: exp.DeclareItem) -> str: 1255 variable = self.sql(expression, "this") 1256 default = self.sql(expression, "default") 1257 default = f" = {default}" if default else "" 1258 1259 kind = self.sql(expression, "kind") 1260 if isinstance(expression.args.get("kind"), exp.Schema): 1261 kind = f"TABLE {kind}" 1262 1263 return f"{variable} AS {kind}{default}" 1264 1265 def options_modifier(self, expression: exp.Expression) -> str: 1266 options = self.expressions(expression, key="options") 1267 return f" OPTION{self.wrap(options)}" if options else "" 1268 1269 def dpipe_sql(self, expression: exp.DPipe) -> str: 1270 return self.sql( 1271 reduce(lambda x, y: exp.Add(this=x, expression=y), expression.flatten()) 1272 ) 1273 1274 def isascii_sql(self, expression: exp.IsAscii) -> str: 1275 return f"(PATINDEX(CONVERT(VARCHAR(MAX), 0x255b5e002d7f5d25) COLLATE Latin1_General_BIN, {self.sql(expression.this)}) = 0)"
Whether the base comes first in the LOG
function.
Possible values: True
, False
, None
(two arguments are not supported by LOG
)
Whether the behavior of a / b
depends on the types of a
and b
.
False means a / b
is always float division.
True means a / b
is integer division if both a
and b
are integers.
A NULL
arg in CONCAT
yields NULL
by default, but in some dialects it yields an empty string.
Specifies the strategy according to which identifiers should be normalized.
Associates this dialect's time formats with their equivalent Python strftime
formats.
Inherited Members
- sqlglot.dialects.dialect.Dialect
- Dialect
- INDEX_OFFSET
- WEEK_OFFSET
- UNNEST_COLUMN_ONLY
- ALIAS_POST_TABLESAMPLE
- TABLESAMPLE_SIZE_IS_PERCENT
- IDENTIFIERS_CAN_START_WITH_DIGIT
- DPIPE_IS_STRING_CONCAT
- STRICT_STRING_CONCAT
- SUPPORTS_USER_DEFINED_TYPES
- COPY_PARAMS_ARE_CSV
- NORMALIZE_FUNCTIONS
- PRESERVE_ORIGINAL_NAMES
- NULL_ORDERING
- SAFE_DIVISION
- HEX_LOWERCASE
- DATE_FORMAT
- DATEINT_FORMAT
- FORMAT_MAPPING
- UNESCAPED_SEQUENCES
- PSEUDOCOLUMNS
- PREFER_CTE_ALIAS_COLUMN
- FORCE_EARLY_ALIAS_REF_EXPANSION
- EXPAND_ALIAS_REFS_EARLY_ONLY_IN_GROUP_BY
- SUPPORTS_ORDER_BY_ALL
- HAS_DISTINCT_ARRAY_CONSTRUCTORS
- SUPPORTS_FIXED_SIZE_ARRAYS
- STRICT_JSON_PATH_SYNTAX
- ON_CONDITION_EMPTY_BEFORE_ERROR
- ARRAY_AGG_INCLUDES_NULLS
- PROMOTE_TO_INFERRED_DATETIME_TYPE
- SUPPORTS_VALUES_DEFAULT
- NUMBERS_CAN_BE_UNDERSCORE_SEPARATED
- REGEXP_EXTRACT_DEFAULT_GROUP
- SET_OP_DISTINCT_BY_DEFAULT
- CREATABLE_KIND_MAPPING
- DATE_PART_MAPPING
- TYPE_TO_EXPRESSIONS
- ANNOTATORS
- get_or_raise
- format_time
- settings
- normalize_identifier
- case_sensitive
- can_identify
- quote_identifier
- to_json_path
- parse
- parse_into
- generate
- transpile
- tokenize
- tokenizer
- jsonpath_tokenizer
- parser
- generator
497 class Tokenizer(tokens.Tokenizer): 498 IDENTIFIERS = [("[", "]"), '"'] 499 QUOTES = ["'", '"'] 500 HEX_STRINGS = [("0x", ""), ("0X", "")] 501 VAR_SINGLE_TOKENS = {"@", "$", "#"} 502 503 KEYWORDS = { 504 **tokens.Tokenizer.KEYWORDS, 505 "CLUSTERED INDEX": TokenType.INDEX, 506 "DATETIME2": TokenType.DATETIME2, 507 "DATETIMEOFFSET": TokenType.TIMESTAMPTZ, 508 "DECLARE": TokenType.DECLARE, 509 "EXEC": TokenType.COMMAND, 510 "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT, 511 "IMAGE": TokenType.IMAGE, 512 "MONEY": TokenType.MONEY, 513 "NONCLUSTERED INDEX": TokenType.INDEX, 514 "NTEXT": TokenType.TEXT, 515 "OPTION": TokenType.OPTION, 516 "OUTPUT": TokenType.RETURNING, 517 "PRINT": TokenType.COMMAND, 518 "PROC": TokenType.PROCEDURE, 519 "REAL": TokenType.FLOAT, 520 "ROWVERSION": TokenType.ROWVERSION, 521 "SMALLDATETIME": TokenType.SMALLDATETIME, 522 "SMALLMONEY": TokenType.SMALLMONEY, 523 "SQL_VARIANT": TokenType.VARIANT, 524 "SYSTEM_USER": TokenType.CURRENT_USER, 525 "TOP": TokenType.TOP, 526 "TIMESTAMP": TokenType.ROWVERSION, 527 "TINYINT": TokenType.UTINYINT, 528 "UNIQUEIDENTIFIER": TokenType.UNIQUEIDENTIFIER, 529 "UPDATE STATISTICS": TokenType.COMMAND, 530 "XML": TokenType.XML, 531 } 532 KEYWORDS.pop("/*+") 533 534 COMMANDS = {*tokens.Tokenizer.COMMANDS, TokenType.END}
Inherited Members
- sqlglot.tokens.Tokenizer
- Tokenizer
- SINGLE_TOKENS
- BIT_STRINGS
- BYTE_STRINGS
- RAW_STRINGS
- HEREDOC_STRINGS
- UNICODE_STRINGS
- STRING_ESCAPES
- IDENTIFIER_ESCAPES
- HEREDOC_TAG_IS_IDENTIFIER
- HEREDOC_STRING_ALTERNATIVE
- STRING_ESCAPES_ALLOWED_IN_RAW_STRINGS
- NESTED_COMMENTS
- HINT_START
- TOKENS_PRECEDING_HINT
- WHITE_SPACE
- COMMAND_PREFIX_TOKENS
- NUMERIC_LITERALS
- COMMENTS
- dialect
- use_rs_tokenizer
- reset
- tokenize
- tokenize_rs
- size
- sql
- tokens
536 class Parser(parser.Parser): 537 SET_REQUIRES_ASSIGNMENT_DELIMITER = False 538 LOG_DEFAULTS_TO_LN = True 539 ALTER_TABLE_ADD_REQUIRED_FOR_EACH_COLUMN = False 540 STRING_ALIASES = True 541 NO_PAREN_IF_COMMANDS = False 542 543 QUERY_MODIFIER_PARSERS = { 544 **parser.Parser.QUERY_MODIFIER_PARSERS, 545 TokenType.OPTION: lambda self: ("options", self._parse_options()), 546 } 547 548 FUNCTIONS = { 549 **parser.Parser.FUNCTIONS, 550 "CHARINDEX": lambda args: exp.StrPosition( 551 this=seq_get(args, 1), 552 substr=seq_get(args, 0), 553 position=seq_get(args, 2), 554 ), 555 "COUNT": lambda args: exp.Count( 556 this=seq_get(args, 0), expressions=args[1:], big_int=False 557 ), 558 "COUNT_BIG": lambda args: exp.Count( 559 this=seq_get(args, 0), expressions=args[1:], big_int=True 560 ), 561 "DATEADD": build_date_delta(exp.DateAdd, unit_mapping=DATE_DELTA_INTERVAL), 562 "DATEDIFF": _build_date_delta(exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL), 563 "DATENAME": _build_formatted_time(exp.TimeToStr, full_format_mapping=True), 564 "DATEPART": _build_formatted_time(exp.TimeToStr), 565 "DATETIMEFROMPARTS": _build_datetimefromparts, 566 "EOMONTH": _build_eomonth, 567 "FORMAT": _build_format, 568 "GETDATE": exp.CurrentTimestamp.from_arg_list, 569 "HASHBYTES": _build_hashbytes, 570 "ISNULL": build_coalesce, 571 "JSON_QUERY": _build_json_query, 572 "JSON_VALUE": parser.build_extract_json_with_path(exp.JSONExtractScalar), 573 "LEN": _build_with_arg_as_text(exp.Length), 574 "LEFT": _build_with_arg_as_text(exp.Left), 575 "RIGHT": _build_with_arg_as_text(exp.Right), 576 "PARSENAME": _build_parsename, 577 "REPLICATE": exp.Repeat.from_arg_list, 578 "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)), 579 "SYSDATETIME": exp.CurrentTimestamp.from_arg_list, 580 "SUSER_NAME": exp.CurrentUser.from_arg_list, 581 "SUSER_SNAME": exp.CurrentUser.from_arg_list, 582 "SYSTEM_USER": exp.CurrentUser.from_arg_list, 583 "TIMEFROMPARTS": _build_timefromparts, 584 "DATETRUNC": _build_datetrunc, 585 } 586 587 JOIN_HINTS = {"LOOP", "HASH", "MERGE", "REMOTE"} 588 589 PROCEDURE_OPTIONS = dict.fromkeys( 590 ("ENCRYPTION", "RECOMPILE", "SCHEMABINDING", "NATIVE_COMPILATION", "EXECUTE"), tuple() 591 ) 592 593 RETURNS_TABLE_TOKENS = parser.Parser.ID_VAR_TOKENS - { 594 TokenType.TABLE, 595 *parser.Parser.TYPE_TOKENS, 596 } 597 598 STATEMENT_PARSERS = { 599 **parser.Parser.STATEMENT_PARSERS, 600 TokenType.DECLARE: lambda self: self._parse_declare(), 601 } 602 603 RANGE_PARSERS = { 604 **parser.Parser.RANGE_PARSERS, 605 TokenType.DCOLON: lambda self, this: self.expression( 606 exp.ScopeResolution, 607 this=this, 608 expression=self._parse_function() or self._parse_var(any_token=True), 609 ), 610 } 611 612 NO_PAREN_FUNCTION_PARSERS = { 613 **parser.Parser.NO_PAREN_FUNCTION_PARSERS, 614 "NEXT": lambda self: self._parse_next_value_for(), 615 } 616 617 # The DCOLON (::) operator serves as a scope resolution (exp.ScopeResolution) operator in T-SQL 618 COLUMN_OPERATORS = { 619 **parser.Parser.COLUMN_OPERATORS, 620 TokenType.DCOLON: lambda self, this, to: self.expression(exp.Cast, this=this, to=to) 621 if isinstance(to, exp.DataType) and to.this != exp.DataType.Type.USERDEFINED 622 else self.expression(exp.ScopeResolution, this=this, expression=to), 623 } 624 625 def _parse_dcolon(self) -> t.Optional[exp.Expression]: 626 # We want to use _parse_types() if the first token after :: is a known type, 627 # otherwise we could parse something like x::varchar(max) into a function 628 if self._match_set(self.TYPE_TOKENS, advance=False): 629 return self._parse_types() 630 631 return self._parse_function() or self._parse_types() 632 633 def _parse_options(self) -> t.Optional[t.List[exp.Expression]]: 634 if not self._match(TokenType.OPTION): 635 return None 636 637 def _parse_option() -> t.Optional[exp.Expression]: 638 option = self._parse_var_from_options(OPTIONS) 639 if not option: 640 return None 641 642 self._match(TokenType.EQ) 643 return self.expression( 644 exp.QueryOption, this=option, expression=self._parse_primary_or_var() 645 ) 646 647 return self._parse_wrapped_csv(_parse_option) 648 649 def _parse_projections(self) -> t.List[exp.Expression]: 650 """ 651 T-SQL supports the syntax alias = expression in the SELECT's projection list, 652 so we transform all parsed Selects to convert their EQ projections into Aliases. 653 654 See: https://learn.microsoft.com/en-us/sql/t-sql/queries/select-clause-transact-sql?view=sql-server-ver16#syntax 655 """ 656 return [ 657 ( 658 exp.alias_(projection.expression, projection.this.this, copy=False) 659 if isinstance(projection, exp.EQ) and isinstance(projection.this, exp.Column) 660 else projection 661 ) 662 for projection in super()._parse_projections() 663 ] 664 665 def _parse_commit_or_rollback(self) -> exp.Commit | exp.Rollback: 666 """Applies to SQL Server and Azure SQL Database 667 COMMIT [ { TRAN | TRANSACTION } 668 [ transaction_name | @tran_name_variable ] ] 669 [ WITH ( DELAYED_DURABILITY = { OFF | ON } ) ] 670 671 ROLLBACK { TRAN | TRANSACTION } 672 [ transaction_name | @tran_name_variable 673 | savepoint_name | @savepoint_variable ] 674 """ 675 rollback = self._prev.token_type == TokenType.ROLLBACK 676 677 self._match_texts(("TRAN", "TRANSACTION")) 678 this = self._parse_id_var() 679 680 if rollback: 681 return self.expression(exp.Rollback, this=this) 682 683 durability = None 684 if self._match_pair(TokenType.WITH, TokenType.L_PAREN): 685 self._match_text_seq("DELAYED_DURABILITY") 686 self._match(TokenType.EQ) 687 688 if self._match_text_seq("OFF"): 689 durability = False 690 else: 691 self._match(TokenType.ON) 692 durability = True 693 694 self._match_r_paren() 695 696 return self.expression(exp.Commit, this=this, durability=durability) 697 698 def _parse_transaction(self) -> exp.Transaction | exp.Command: 699 """Applies to SQL Server and Azure SQL Database 700 BEGIN { TRAN | TRANSACTION } 701 [ { transaction_name | @tran_name_variable } 702 [ WITH MARK [ 'description' ] ] 703 ] 704 """ 705 if self._match_texts(("TRAN", "TRANSACTION")): 706 transaction = self.expression(exp.Transaction, this=self._parse_id_var()) 707 if self._match_text_seq("WITH", "MARK"): 708 transaction.set("mark", self._parse_string()) 709 710 return transaction 711 712 return self._parse_as_command(self._prev) 713 714 def _parse_returns(self) -> exp.ReturnsProperty: 715 table = self._parse_id_var(any_token=False, tokens=self.RETURNS_TABLE_TOKENS) 716 returns = super()._parse_returns() 717 returns.set("table", table) 718 return returns 719 720 def _parse_convert( 721 self, strict: bool, safe: t.Optional[bool] = None 722 ) -> t.Optional[exp.Expression]: 723 this = self._parse_types() 724 self._match(TokenType.COMMA) 725 args = [this, *self._parse_csv(self._parse_assignment)] 726 convert = exp.Convert.from_arg_list(args) 727 convert.set("safe", safe) 728 convert.set("strict", strict) 729 return convert 730 731 def _parse_user_defined_function( 732 self, kind: t.Optional[TokenType] = None 733 ) -> t.Optional[exp.Expression]: 734 this = super()._parse_user_defined_function(kind=kind) 735 736 if ( 737 kind == TokenType.FUNCTION 738 or isinstance(this, exp.UserDefinedFunction) 739 or self._match(TokenType.ALIAS, advance=False) 740 ): 741 return this 742 743 if not self._match(TokenType.WITH, advance=False): 744 expressions = self._parse_csv(self._parse_function_parameter) 745 else: 746 expressions = None 747 748 return self.expression(exp.UserDefinedFunction, this=this, expressions=expressions) 749 750 def _parse_id_var( 751 self, 752 any_token: bool = True, 753 tokens: t.Optional[t.Collection[TokenType]] = None, 754 ) -> t.Optional[exp.Expression]: 755 is_temporary = self._match(TokenType.HASH) 756 is_global = is_temporary and self._match(TokenType.HASH) 757 758 this = super()._parse_id_var(any_token=any_token, tokens=tokens) 759 if this: 760 if is_global: 761 this.set("global", True) 762 elif is_temporary: 763 this.set("temporary", True) 764 765 return this 766 767 def _parse_create(self) -> exp.Create | exp.Command: 768 create = super()._parse_create() 769 770 if isinstance(create, exp.Create): 771 table = create.this.this if isinstance(create.this, exp.Schema) else create.this 772 if isinstance(table, exp.Table) and table.this.args.get("temporary"): 773 if not create.args.get("properties"): 774 create.set("properties", exp.Properties(expressions=[])) 775 776 create.args["properties"].append("expressions", exp.TemporaryProperty()) 777 778 return create 779 780 def _parse_if(self) -> t.Optional[exp.Expression]: 781 index = self._index 782 783 if self._match_text_seq("OBJECT_ID"): 784 self._parse_wrapped_csv(self._parse_string) 785 if self._match_text_seq("IS", "NOT", "NULL") and self._match(TokenType.DROP): 786 return self._parse_drop(exists=True) 787 self._retreat(index) 788 789 return super()._parse_if() 790 791 def _parse_unique(self) -> exp.UniqueColumnConstraint: 792 if self._match_texts(("CLUSTERED", "NONCLUSTERED")): 793 this = self.CONSTRAINT_PARSERS[self._prev.text.upper()](self) 794 else: 795 this = self._parse_schema(self._parse_id_var(any_token=False)) 796 797 return self.expression(exp.UniqueColumnConstraint, this=this) 798 799 def _parse_partition(self) -> t.Optional[exp.Partition]: 800 if not self._match_text_seq("WITH", "(", "PARTITIONS"): 801 return None 802 803 def parse_range(): 804 low = self._parse_bitwise() 805 high = self._parse_bitwise() if self._match_text_seq("TO") else None 806 807 return ( 808 self.expression(exp.PartitionRange, this=low, expression=high) if high else low 809 ) 810 811 partition = self.expression( 812 exp.Partition, expressions=self._parse_wrapped_csv(parse_range) 813 ) 814 815 self._match_r_paren() 816 817 return partition 818 819 def _parse_declare(self) -> exp.Declare | exp.Command: 820 index = self._index 821 expressions = self._try_parse(partial(self._parse_csv, self._parse_declareitem)) 822 823 if not expressions or self._curr: 824 self._retreat(index) 825 return self._parse_as_command(self._prev) 826 827 return self.expression(exp.Declare, expressions=expressions) 828 829 def _parse_declareitem(self) -> t.Optional[exp.DeclareItem]: 830 var = self._parse_id_var() 831 if not var: 832 return None 833 834 value = None 835 self._match(TokenType.ALIAS) 836 if self._match(TokenType.TABLE): 837 data_type = self._parse_schema() 838 else: 839 data_type = self._parse_types() 840 if self._match(TokenType.EQ): 841 value = self._parse_bitwise() 842 843 return self.expression(exp.DeclareItem, this=var, kind=data_type, default=value)
Parser consumes a list of tokens produced by the Tokenizer and produces a parsed syntax tree.
Arguments:
- error_level: The desired error level. Default: ErrorLevel.IMMEDIATE
- error_message_context: The amount of context to capture from a query string when displaying the error message (in number of characters). Default: 100
- max_errors: Maximum number of error messages to include in a raised ParseError. This is only relevant if error_level is ErrorLevel.RAISE. Default: 3
Inherited Members
- sqlglot.parser.Parser
- Parser
- NO_PAREN_FUNCTIONS
- STRUCT_TYPE_TOKENS
- NESTED_TYPE_TOKENS
- ENUM_TYPE_TOKENS
- AGGREGATE_TYPE_TOKENS
- TYPE_TOKENS
- SIGNED_TO_UNSIGNED_TYPE_TOKEN
- SUBQUERY_PREDICATES
- RESERVED_TOKENS
- DB_CREATABLES
- CREATABLES
- ALTERABLES
- INTERVAL_VARS
- ALIAS_TOKENS
- ARRAY_CONSTRUCTORS
- COMMENT_TABLE_ALIAS_TOKENS
- UPDATE_ALIAS_TOKENS
- TRIM_TYPES
- FUNC_TOKENS
- CONJUNCTION
- ASSIGNMENT
- DISJUNCTION
- EQUALITY
- COMPARISON
- BITWISE
- TERM
- FACTOR
- EXPONENT
- TIMES
- TIMESTAMPS
- SET_OPERATIONS
- JOIN_METHODS
- JOIN_SIDES
- JOIN_KINDS
- LAMBDAS
- EXPRESSION_PARSERS
- UNARY_PARSERS
- STRING_PARSERS
- NUMERIC_PARSERS
- PRIMARY_PARSERS
- PLACEHOLDER_PARSERS
- PROPERTY_PARSERS
- CONSTRAINT_PARSERS
- ALTER_PARSERS
- ALTER_ALTER_PARSERS
- SCHEMA_UNNAMED_CONSTRAINTS
- INVALID_FUNC_NAME_TOKENS
- FUNCTIONS_WITH_ALIASED_ARGS
- KEY_VALUE_DEFINITIONS
- FUNCTION_PARSERS
- SET_PARSERS
- SHOW_PARSERS
- TYPE_LITERAL_PARSERS
- TYPE_CONVERTERS
- DDL_SELECT_TOKENS
- PRE_VOLATILE_TOKENS
- TRANSACTION_KIND
- TRANSACTION_CHARACTERISTICS
- CONFLICT_ACTIONS
- CREATE_SEQUENCE
- ISOLATED_LOADING_OPTIONS
- USABLES
- CAST_ACTIONS
- SCHEMA_BINDING_OPTIONS
- EXECUTE_AS_OPTIONS
- KEY_CONSTRAINT_OPTIONS
- INSERT_ALTERNATIVES
- CLONE_KEYWORDS
- HISTORICAL_DATA_PREFIX
- HISTORICAL_DATA_KIND
- OPCLASS_FOLLOW_KEYWORDS
- OPTYPE_FOLLOW_TOKENS
- TABLE_INDEX_HINT_TOKENS
- VIEW_ATTRIBUTES
- WINDOW_ALIAS_TOKENS
- WINDOW_BEFORE_PAREN_TOKENS
- WINDOW_SIDES
- JSON_KEY_VALUE_SEPARATOR_TOKENS
- FETCH_TOKENS
- ADD_CONSTRAINT_TOKENS
- DISTINCT_TOKENS
- NULL_TOKENS
- UNNEST_OFFSET_ALIAS_TOKENS
- SELECT_START_TOKENS
- COPY_INTO_VARLEN_OPTIONS
- IS_JSON_PREDICATE_KIND
- ODBC_DATETIME_LITERALS
- ON_CONDITION_TOKENS
- PRIVILEGE_FOLLOW_TOKENS
- DESCRIBE_STYLES
- ANALYZE_STYLES
- ANALYZE_EXPRESSION_PARSERS
- PARTITION_KEYWORDS
- AMBIGUOUS_ALIAS_TOKENS
- OPERATION_MODIFIERS
- STRICT_CAST
- PREFIXED_PIVOT_COLUMNS
- IDENTIFY_PIVOT_STRINGS
- TABLESAMPLE_CSV
- DEFAULT_SAMPLING_METHOD
- TRIM_PATTERN_FIRST
- MODIFIERS_ATTACHED_TO_SET_OP
- SET_OP_MODIFIERS
- JSON_ARROWS_REQUIRE_JSON_TYPE
- COLON_IS_VARIANT_EXTRACT
- VALUES_FOLLOWED_BY_PAREN
- SUPPORTS_IMPLICIT_UNNEST
- INTERVAL_SPANS
- SUPPORTS_PARTITION_SELECTION
- WRAPPED_TRANSFORM_COLUMN_CONSTRAINT
- OPTIONAL_ALIAS_TOKEN_CTE
- error_level
- error_message_context
- max_errors
- dialect
- reset
- parse
- parse_into
- check_errors
- raise_error
- expression
- validate_expression
- errors
- sql
845 class Generator(generator.Generator): 846 LIMIT_IS_TOP = True 847 QUERY_HINTS = False 848 RETURNING_END = False 849 NVL2_SUPPORTED = False 850 ALTER_TABLE_INCLUDE_COLUMN_KEYWORD = False 851 LIMIT_FETCH = "FETCH" 852 COMPUTED_COLUMN_WITH_TYPE = False 853 CTE_RECURSIVE_KEYWORD_REQUIRED = False 854 ENSURE_BOOLS = True 855 NULL_ORDERING_SUPPORTED = None 856 SUPPORTS_SINGLE_ARG_CONCAT = False 857 TABLESAMPLE_SEED_KEYWORD = "REPEATABLE" 858 SUPPORTS_SELECT_INTO = True 859 JSON_PATH_BRACKETED_KEY_SUPPORTED = False 860 SUPPORTS_TO_NUMBER = False 861 SET_OP_MODIFIERS = False 862 COPY_PARAMS_EQ_REQUIRED = True 863 PARSE_JSON_NAME = None 864 EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE = False 865 866 EXPRESSIONS_WITHOUT_NESTED_CTES = { 867 exp.Create, 868 exp.Delete, 869 exp.Insert, 870 exp.Intersect, 871 exp.Except, 872 exp.Merge, 873 exp.Select, 874 exp.Subquery, 875 exp.Union, 876 exp.Update, 877 } 878 879 SUPPORTED_JSON_PATH_PARTS = { 880 exp.JSONPathKey, 881 exp.JSONPathRoot, 882 exp.JSONPathSubscript, 883 } 884 885 TYPE_MAPPING = { 886 **generator.Generator.TYPE_MAPPING, 887 exp.DataType.Type.BOOLEAN: "BIT", 888 exp.DataType.Type.DATETIME2: "DATETIME2", 889 exp.DataType.Type.DECIMAL: "NUMERIC", 890 exp.DataType.Type.DOUBLE: "FLOAT", 891 exp.DataType.Type.INT: "INTEGER", 892 exp.DataType.Type.ROWVERSION: "ROWVERSION", 893 exp.DataType.Type.TEXT: "VARCHAR(MAX)", 894 exp.DataType.Type.TIMESTAMP: "DATETIME2", 895 exp.DataType.Type.TIMESTAMPNTZ: "DATETIME2", 896 exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET", 897 exp.DataType.Type.SMALLDATETIME: "SMALLDATETIME", 898 exp.DataType.Type.UTINYINT: "TINYINT", 899 exp.DataType.Type.VARIANT: "SQL_VARIANT", 900 } 901 902 TYPE_MAPPING.pop(exp.DataType.Type.NCHAR) 903 TYPE_MAPPING.pop(exp.DataType.Type.NVARCHAR) 904 905 TRANSFORMS = { 906 **generator.Generator.TRANSFORMS, 907 exp.AnyValue: any_value_to_max_sql, 908 exp.ArrayToString: rename_func("STRING_AGG"), 909 exp.AutoIncrementColumnConstraint: lambda *_: "IDENTITY", 910 exp.Chr: rename_func("CHAR"), 911 exp.DateAdd: date_delta_sql("DATEADD"), 912 exp.DateDiff: date_delta_sql("DATEDIFF"), 913 exp.CTE: transforms.preprocess([qualify_derived_table_outputs]), 914 exp.CurrentDate: rename_func("GETDATE"), 915 exp.CurrentTimestamp: rename_func("GETDATE"), 916 exp.DateStrToDate: datestrtodate_sql, 917 exp.Extract: rename_func("DATEPART"), 918 exp.GeneratedAsIdentityColumnConstraint: generatedasidentitycolumnconstraint_sql, 919 exp.GroupConcat: _string_agg_sql, 920 exp.If: rename_func("IIF"), 921 exp.JSONExtract: _json_extract_sql, 922 exp.JSONExtractScalar: _json_extract_sql, 923 exp.LastDay: lambda self, e: self.func("EOMONTH", e.this), 924 exp.Ln: rename_func("LOG"), 925 exp.Max: max_or_greatest, 926 exp.MD5: lambda self, e: self.func("HASHBYTES", exp.Literal.string("MD5"), e.this), 927 exp.Min: min_or_least, 928 exp.NumberToStr: _format_sql, 929 exp.Repeat: rename_func("REPLICATE"), 930 exp.Select: transforms.preprocess( 931 [ 932 transforms.eliminate_distinct_on, 933 transforms.eliminate_semi_and_anti_joins, 934 transforms.eliminate_qualify, 935 transforms.unnest_generate_date_array_using_recursive_cte, 936 ] 937 ), 938 exp.Stddev: rename_func("STDEV"), 939 exp.StrPosition: lambda self, e: self.func( 940 "CHARINDEX", e.args.get("substr"), e.this, e.args.get("position") 941 ), 942 exp.Subquery: transforms.preprocess([qualify_derived_table_outputs]), 943 exp.SHA: lambda self, e: self.func("HASHBYTES", exp.Literal.string("SHA1"), e.this), 944 exp.SHA2: lambda self, e: self.func( 945 "HASHBYTES", exp.Literal.string(f"SHA2_{e.args.get('length', 256)}"), e.this 946 ), 947 exp.TemporaryProperty: lambda self, e: "", 948 exp.TimeStrToTime: _timestrtotime_sql, 949 exp.TimeToStr: _format_sql, 950 exp.Trim: trim_sql, 951 exp.TsOrDsAdd: date_delta_sql("DATEADD", cast=True), 952 exp.TsOrDsDiff: date_delta_sql("DATEDIFF"), 953 exp.TimestampTrunc: lambda self, e: self.func("DATETRUNC", e.unit, e.this), 954 exp.DateFromParts: rename_func("DATEFROMPARTS"), 955 } 956 957 TRANSFORMS.pop(exp.ReturnsProperty) 958 959 PROPERTIES_LOCATION = { 960 **generator.Generator.PROPERTIES_LOCATION, 961 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 962 } 963 964 def scope_resolution(self, rhs: str, scope_name: str) -> str: 965 return f"{scope_name}::{rhs}" 966 967 def select_sql(self, expression: exp.Select) -> str: 968 if expression.args.get("offset"): 969 if not expression.args.get("order"): 970 # ORDER BY is required in order to use OFFSET in a query, so we use 971 # a noop order by, since we don't really care about the order. 972 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 973 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 974 975 limit = expression.args.get("limit") 976 if isinstance(limit, exp.Limit): 977 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 978 # we replace here because otherwise TOP would be generated in select_sql 979 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 980 981 return super().select_sql(expression) 982 983 def convert_sql(self, expression: exp.Convert) -> str: 984 name = "TRY_CONVERT" if expression.args.get("safe") else "CONVERT" 985 return self.func( 986 name, expression.this, expression.expression, expression.args.get("style") 987 ) 988 989 def queryoption_sql(self, expression: exp.QueryOption) -> str: 990 option = self.sql(expression, "this") 991 value = self.sql(expression, "expression") 992 if value: 993 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 994 return f"{option} {optional_equal_sign}{value}" 995 return option 996 997 def lateral_op(self, expression: exp.Lateral) -> str: 998 cross_apply = expression.args.get("cross_apply") 999 if cross_apply is True: 1000 return "CROSS APPLY" 1001 if cross_apply is False: 1002 return "OUTER APPLY" 1003 1004 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 1005 self.unsupported("LATERAL clause is not supported.") 1006 return "LATERAL" 1007 1008 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 1009 this = expression.this 1010 split_count = len(this.name.split(".")) 1011 delimiter = expression.args.get("delimiter") 1012 part_index = expression.args.get("part_index") 1013 1014 if ( 1015 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 1016 or (delimiter and delimiter.name != ".") 1017 or not part_index 1018 or split_count > 4 1019 ): 1020 self.unsupported( 1021 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 1022 ) 1023 return "" 1024 1025 return self.func( 1026 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 1027 ) 1028 1029 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1030 nano = expression.args.get("nano") 1031 if nano is not None: 1032 nano.pop() 1033 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1034 1035 if expression.args.get("fractions") is None: 1036 expression.set("fractions", exp.Literal.number(0)) 1037 if expression.args.get("precision") is None: 1038 expression.set("precision", exp.Literal.number(0)) 1039 1040 return rename_func("TIMEFROMPARTS")(self, expression) 1041 1042 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1043 zone = expression.args.get("zone") 1044 if zone is not None: 1045 zone.pop() 1046 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1047 1048 nano = expression.args.get("nano") 1049 if nano is not None: 1050 nano.pop() 1051 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1052 1053 if expression.args.get("milli") is None: 1054 expression.set("milli", exp.Literal.number(0)) 1055 1056 return rename_func("DATETIMEFROMPARTS")(self, expression) 1057 1058 def setitem_sql(self, expression: exp.SetItem) -> str: 1059 this = expression.this 1060 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1061 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1062 return f"{self.sql(this.left)} {self.sql(this.right)}" 1063 1064 return super().setitem_sql(expression) 1065 1066 def boolean_sql(self, expression: exp.Boolean) -> str: 1067 if type(expression.parent) in BIT_TYPES or isinstance( 1068 expression.find_ancestor(exp.Values, exp.Select), exp.Values 1069 ): 1070 return "1" if expression.this else "0" 1071 1072 return "(1 = 1)" if expression.this else "(1 = 0)" 1073 1074 def is_sql(self, expression: exp.Is) -> str: 1075 if isinstance(expression.expression, exp.Boolean): 1076 return self.binary(expression, "=") 1077 return self.binary(expression, "IS") 1078 1079 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1080 sql = self.sql(expression, "this") 1081 properties = expression.args.get("properties") 1082 1083 if sql[:1] != "#" and any( 1084 isinstance(prop, exp.TemporaryProperty) 1085 for prop in (properties.expressions if properties else []) 1086 ): 1087 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1088 1089 return sql 1090 1091 def create_sql(self, expression: exp.Create) -> str: 1092 kind = expression.kind 1093 exists = expression.args.pop("exists", None) 1094 1095 like_property = expression.find(exp.LikeProperty) 1096 if like_property: 1097 ctas_expression = like_property.this 1098 else: 1099 ctas_expression = expression.expression 1100 1101 if kind == "VIEW": 1102 expression.this.set("catalog", None) 1103 with_ = expression.args.get("with") 1104 if ctas_expression and with_: 1105 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1106 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1107 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1108 ctas_expression.set("with", with_.pop()) 1109 1110 sql = super().create_sql(expression) 1111 1112 table = expression.find(exp.Table) 1113 1114 # Convert CTAS statement to SELECT .. INTO .. 1115 if kind == "TABLE" and ctas_expression: 1116 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1117 ctas_expression = ctas_expression.subquery() 1118 1119 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1120 select_into.set("into", exp.Into(this=table)) 1121 1122 if like_property: 1123 select_into.limit(0, copy=False) 1124 1125 sql = self.sql(select_into) 1126 1127 if exists: 1128 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1129 sql_with_ctes = self.prepend_ctes(expression, sql) 1130 sql_literal = self.sql(exp.Literal.string(sql_with_ctes)) 1131 if kind == "SCHEMA": 1132 return f"""IF NOT EXISTS (SELECT * FROM information_schema.schemata WHERE schema_name = {identifier}) EXEC({sql_literal})""" 1133 elif kind == "TABLE": 1134 assert table 1135 where = exp.and_( 1136 exp.column("table_name").eq(table.name), 1137 exp.column("table_schema").eq(table.db) if table.db else None, 1138 exp.column("table_catalog").eq(table.catalog) if table.catalog else None, 1139 ) 1140 return f"""IF NOT EXISTS (SELECT * FROM information_schema.tables WHERE {where}) EXEC({sql_literal})""" 1141 elif kind == "INDEX": 1142 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1143 return f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql_literal})""" 1144 elif expression.args.get("replace"): 1145 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1146 1147 return self.prepend_ctes(expression, sql) 1148 1149 def count_sql(self, expression: exp.Count) -> str: 1150 func_name = "COUNT_BIG" if expression.args.get("big_int") else "COUNT" 1151 return rename_func(func_name)(self, expression) 1152 1153 def offset_sql(self, expression: exp.Offset) -> str: 1154 return f"{super().offset_sql(expression)} ROWS" 1155 1156 def version_sql(self, expression: exp.Version) -> str: 1157 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1158 this = f"FOR {name}" 1159 expr = expression.expression 1160 kind = expression.text("kind") 1161 if kind in ("FROM", "BETWEEN"): 1162 args = expr.expressions 1163 sep = "TO" if kind == "FROM" else "AND" 1164 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1165 else: 1166 expr_sql = self.sql(expr) 1167 1168 expr_sql = f" {expr_sql}" if expr_sql else "" 1169 return f"{this} {kind}{expr_sql}" 1170 1171 def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str: 1172 table = expression.args.get("table") 1173 table = f"{table} " if table else "" 1174 return f"RETURNS {table}{self.sql(expression, 'this')}" 1175 1176 def returning_sql(self, expression: exp.Returning) -> str: 1177 into = self.sql(expression, "into") 1178 into = self.seg(f"INTO {into}") if into else "" 1179 return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}" 1180 1181 def transaction_sql(self, expression: exp.Transaction) -> str: 1182 this = self.sql(expression, "this") 1183 this = f" {this}" if this else "" 1184 mark = self.sql(expression, "mark") 1185 mark = f" WITH MARK {mark}" if mark else "" 1186 return f"BEGIN TRANSACTION{this}{mark}" 1187 1188 def commit_sql(self, expression: exp.Commit) -> str: 1189 this = self.sql(expression, "this") 1190 this = f" {this}" if this else "" 1191 durability = expression.args.get("durability") 1192 durability = ( 1193 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1194 if durability is not None 1195 else "" 1196 ) 1197 return f"COMMIT TRANSACTION{this}{durability}" 1198 1199 def rollback_sql(self, expression: exp.Rollback) -> str: 1200 this = self.sql(expression, "this") 1201 this = f" {this}" if this else "" 1202 return f"ROLLBACK TRANSACTION{this}" 1203 1204 def identifier_sql(self, expression: exp.Identifier) -> str: 1205 identifier = super().identifier_sql(expression) 1206 1207 if expression.args.get("global"): 1208 identifier = f"##{identifier}" 1209 elif expression.args.get("temporary"): 1210 identifier = f"#{identifier}" 1211 1212 return identifier 1213 1214 def constraint_sql(self, expression: exp.Constraint) -> str: 1215 this = self.sql(expression, "this") 1216 expressions = self.expressions(expression, flat=True, sep=" ") 1217 return f"CONSTRAINT {this} {expressions}" 1218 1219 def length_sql(self, expression: exp.Length) -> str: 1220 return self._uncast_text(expression, "LEN") 1221 1222 def right_sql(self, expression: exp.Right) -> str: 1223 return self._uncast_text(expression, "RIGHT") 1224 1225 def left_sql(self, expression: exp.Left) -> str: 1226 return self._uncast_text(expression, "LEFT") 1227 1228 def _uncast_text(self, expression: exp.Expression, name: str) -> str: 1229 this = expression.this 1230 if isinstance(this, exp.Cast) and this.is_type(exp.DataType.Type.TEXT): 1231 this_sql = self.sql(this, "this") 1232 else: 1233 this_sql = self.sql(this) 1234 expression_sql = self.sql(expression, "expression") 1235 return self.func(name, this_sql, expression_sql if expression_sql else None) 1236 1237 def partition_sql(self, expression: exp.Partition) -> str: 1238 return f"WITH (PARTITIONS({self.expressions(expression, flat=True)}))" 1239 1240 def alter_sql(self, expression: exp.Alter) -> str: 1241 action = seq_get(expression.args.get("actions") or [], 0) 1242 if isinstance(action, exp.AlterRename): 1243 return f"EXEC sp_rename '{self.sql(expression.this)}', '{action.this.name}'" 1244 return super().alter_sql(expression) 1245 1246 def drop_sql(self, expression: exp.Drop) -> str: 1247 if expression.args["kind"] == "VIEW": 1248 expression.this.set("catalog", None) 1249 return super().drop_sql(expression) 1250 1251 def declare_sql(self, expression: exp.Declare) -> str: 1252 return f"DECLARE {self.expressions(expression, flat=True)}" 1253 1254 def declareitem_sql(self, expression: exp.DeclareItem) -> str: 1255 variable = self.sql(expression, "this") 1256 default = self.sql(expression, "default") 1257 default = f" = {default}" if default else "" 1258 1259 kind = self.sql(expression, "kind") 1260 if isinstance(expression.args.get("kind"), exp.Schema): 1261 kind = f"TABLE {kind}" 1262 1263 return f"{variable} AS {kind}{default}" 1264 1265 def options_modifier(self, expression: exp.Expression) -> str: 1266 options = self.expressions(expression, key="options") 1267 return f" OPTION{self.wrap(options)}" if options else "" 1268 1269 def dpipe_sql(self, expression: exp.DPipe) -> str: 1270 return self.sql( 1271 reduce(lambda x, y: exp.Add(this=x, expression=y), expression.flatten()) 1272 ) 1273 1274 def isascii_sql(self, expression: exp.IsAscii) -> str: 1275 return f"(PATINDEX(CONVERT(VARCHAR(MAX), 0x255b5e002d7f5d25) COLLATE Latin1_General_BIN, {self.sql(expression.this)}) = 0)"
Generator converts a given syntax tree to the corresponding SQL string.
Arguments:
- pretty: Whether to format the produced SQL string. Default: False.
- identify: Determines when an identifier should be quoted. Possible values are: False (default): Never quote, except in cases where it's mandatory by the dialect. True or 'always': Always quote. 'safe': Only quote identifiers that are case insensitive.
- normalize: Whether to normalize identifiers to lowercase. Default: False.
- pad: The pad size in a formatted string. For example, this affects the indentation of a projection in a query, relative to its nesting level. Default: 2.
- indent: The indentation size in a formatted string. For example, this affects the
indentation of subqueries and filters under a
WHERE
clause. Default: 2. - normalize_functions: How to normalize function names. Possible values are: "upper" or True (default): Convert names to uppercase. "lower": Convert names to lowercase. False: Disables function name normalization.
- unsupported_level: Determines the generator's behavior when it encounters unsupported expressions. Default ErrorLevel.WARN.
- max_unsupported: Maximum number of unsupported messages to include in a raised UnsupportedError. This is only relevant if unsupported_level is ErrorLevel.RAISE. Default: 3
- leading_comma: Whether the comma is leading or trailing in select expressions. This is only relevant when generating in pretty mode. Default: False
- max_text_width: The max number of characters in a segment before creating new lines in pretty mode. The default is on the smaller end because the length only represents a segment and not the true line length. Default: 80
- comments: Whether to preserve comments in the output SQL code. Default: True
967 def select_sql(self, expression: exp.Select) -> str: 968 if expression.args.get("offset"): 969 if not expression.args.get("order"): 970 # ORDER BY is required in order to use OFFSET in a query, so we use 971 # a noop order by, since we don't really care about the order. 972 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 973 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 974 975 limit = expression.args.get("limit") 976 if isinstance(limit, exp.Limit): 977 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 978 # we replace here because otherwise TOP would be generated in select_sql 979 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 980 981 return super().select_sql(expression)
989 def queryoption_sql(self, expression: exp.QueryOption) -> str: 990 option = self.sql(expression, "this") 991 value = self.sql(expression, "expression") 992 if value: 993 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 994 return f"{option} {optional_equal_sign}{value}" 995 return option
997 def lateral_op(self, expression: exp.Lateral) -> str: 998 cross_apply = expression.args.get("cross_apply") 999 if cross_apply is True: 1000 return "CROSS APPLY" 1001 if cross_apply is False: 1002 return "OUTER APPLY" 1003 1004 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 1005 self.unsupported("LATERAL clause is not supported.") 1006 return "LATERAL"
1008 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 1009 this = expression.this 1010 split_count = len(this.name.split(".")) 1011 delimiter = expression.args.get("delimiter") 1012 part_index = expression.args.get("part_index") 1013 1014 if ( 1015 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 1016 or (delimiter and delimiter.name != ".") 1017 or not part_index 1018 or split_count > 4 1019 ): 1020 self.unsupported( 1021 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 1022 ) 1023 return "" 1024 1025 return self.func( 1026 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 1027 )
1029 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1030 nano = expression.args.get("nano") 1031 if nano is not None: 1032 nano.pop() 1033 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1034 1035 if expression.args.get("fractions") is None: 1036 expression.set("fractions", exp.Literal.number(0)) 1037 if expression.args.get("precision") is None: 1038 expression.set("precision", exp.Literal.number(0)) 1039 1040 return rename_func("TIMEFROMPARTS")(self, expression)
1042 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1043 zone = expression.args.get("zone") 1044 if zone is not None: 1045 zone.pop() 1046 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1047 1048 nano = expression.args.get("nano") 1049 if nano is not None: 1050 nano.pop() 1051 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1052 1053 if expression.args.get("milli") is None: 1054 expression.set("milli", exp.Literal.number(0)) 1055 1056 return rename_func("DATETIMEFROMPARTS")(self, expression)
1058 def setitem_sql(self, expression: exp.SetItem) -> str: 1059 this = expression.this 1060 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1061 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1062 return f"{self.sql(this.left)} {self.sql(this.right)}" 1063 1064 return super().setitem_sql(expression)
1079 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1080 sql = self.sql(expression, "this") 1081 properties = expression.args.get("properties") 1082 1083 if sql[:1] != "#" and any( 1084 isinstance(prop, exp.TemporaryProperty) 1085 for prop in (properties.expressions if properties else []) 1086 ): 1087 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1088 1089 return sql
1091 def create_sql(self, expression: exp.Create) -> str: 1092 kind = expression.kind 1093 exists = expression.args.pop("exists", None) 1094 1095 like_property = expression.find(exp.LikeProperty) 1096 if like_property: 1097 ctas_expression = like_property.this 1098 else: 1099 ctas_expression = expression.expression 1100 1101 if kind == "VIEW": 1102 expression.this.set("catalog", None) 1103 with_ = expression.args.get("with") 1104 if ctas_expression and with_: 1105 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1106 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1107 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1108 ctas_expression.set("with", with_.pop()) 1109 1110 sql = super().create_sql(expression) 1111 1112 table = expression.find(exp.Table) 1113 1114 # Convert CTAS statement to SELECT .. INTO .. 1115 if kind == "TABLE" and ctas_expression: 1116 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1117 ctas_expression = ctas_expression.subquery() 1118 1119 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1120 select_into.set("into", exp.Into(this=table)) 1121 1122 if like_property: 1123 select_into.limit(0, copy=False) 1124 1125 sql = self.sql(select_into) 1126 1127 if exists: 1128 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1129 sql_with_ctes = self.prepend_ctes(expression, sql) 1130 sql_literal = self.sql(exp.Literal.string(sql_with_ctes)) 1131 if kind == "SCHEMA": 1132 return f"""IF NOT EXISTS (SELECT * FROM information_schema.schemata WHERE schema_name = {identifier}) EXEC({sql_literal})""" 1133 elif kind == "TABLE": 1134 assert table 1135 where = exp.and_( 1136 exp.column("table_name").eq(table.name), 1137 exp.column("table_schema").eq(table.db) if table.db else None, 1138 exp.column("table_catalog").eq(table.catalog) if table.catalog else None, 1139 ) 1140 return f"""IF NOT EXISTS (SELECT * FROM information_schema.tables WHERE {where}) EXEC({sql_literal})""" 1141 elif kind == "INDEX": 1142 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1143 return f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql_literal})""" 1144 elif expression.args.get("replace"): 1145 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1146 1147 return self.prepend_ctes(expression, sql)
1156 def version_sql(self, expression: exp.Version) -> str: 1157 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1158 this = f"FOR {name}" 1159 expr = expression.expression 1160 kind = expression.text("kind") 1161 if kind in ("FROM", "BETWEEN"): 1162 args = expr.expressions 1163 sep = "TO" if kind == "FROM" else "AND" 1164 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1165 else: 1166 expr_sql = self.sql(expr) 1167 1168 expr_sql = f" {expr_sql}" if expr_sql else "" 1169 return f"{this} {kind}{expr_sql}"
1188 def commit_sql(self, expression: exp.Commit) -> str: 1189 this = self.sql(expression, "this") 1190 this = f" {this}" if this else "" 1191 durability = expression.args.get("durability") 1192 durability = ( 1193 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1194 if durability is not None 1195 else "" 1196 ) 1197 return f"COMMIT TRANSACTION{this}{durability}"
1204 def identifier_sql(self, expression: exp.Identifier) -> str: 1205 identifier = super().identifier_sql(expression) 1206 1207 if expression.args.get("global"): 1208 identifier = f"##{identifier}" 1209 elif expression.args.get("temporary"): 1210 identifier = f"#{identifier}" 1211 1212 return identifier
1254 def declareitem_sql(self, expression: exp.DeclareItem) -> str: 1255 variable = self.sql(expression, "this") 1256 default = self.sql(expression, "default") 1257 default = f" = {default}" if default else "" 1258 1259 kind = self.sql(expression, "kind") 1260 if isinstance(expression.args.get("kind"), exp.Schema): 1261 kind = f"TABLE {kind}" 1262 1263 return f"{variable} AS {kind}{default}"
Inherited Members
- sqlglot.generator.Generator
- Generator
- IGNORE_NULLS_IN_FUNC
- LOCKING_READS_SUPPORTED
- WRAP_DERIVED_VALUES
- CREATE_FUNCTION_RETURN_AS
- MATCHED_BY_SOURCE
- SINGLE_STRING_INTERVAL
- INTERVAL_ALLOWS_PLURAL_FORM
- LIMIT_ONLY_LITERALS
- RENAME_TABLE_WITH_DB
- GROUPINGS_SEP
- INDEX_ON
- JOIN_HINTS
- TABLE_HINTS
- QUERY_HINT_SEP
- IS_BOOL_ALLOWED
- DUPLICATE_KEY_UPDATE_WITH_SET
- EXTRACT_ALLOWS_QUOTES
- TZ_TO_WITH_TIME_ZONE
- VALUES_AS_TABLE
- UNNEST_WITH_ORDINALITY
- AGGREGATE_FILTER_SUPPORTED
- SEMI_ANTI_JOIN_WITH_SIDE
- SUPPORTS_TABLE_COPY
- TABLESAMPLE_REQUIRES_PARENS
- TABLESAMPLE_SIZE_IS_ROWS
- TABLESAMPLE_KEYWORDS
- TABLESAMPLE_WITH_METHOD
- COLLATE_IS_FUNC
- DATA_TYPE_SPECIFIERS_ALLOWED
- LAST_DAY_SUPPORTS_DATE_PART
- SUPPORTS_TABLE_ALIAS_COLUMNS
- UNPIVOT_ALIASES_ARE_IDENTIFIERS
- JSON_KEY_VALUE_PAIR_SEP
- INSERT_OVERWRITE
- SUPPORTS_UNLOGGED_TABLES
- SUPPORTS_CREATE_TABLE_LIKE
- LIKE_PROPERTY_INSIDE_SCHEMA
- MULTI_ARG_DISTINCT
- JSON_TYPE_REQUIRED_FOR_EXTRACTION
- JSON_PATH_SINGLE_QUOTE_ESCAPE
- CAN_IMPLEMENT_ARRAY_ANY
- COPY_PARAMS_ARE_WRAPPED
- COPY_HAS_INTO_KEYWORD
- STAR_EXCEPT
- HEX_FUNC
- WITH_PROPERTIES_PREFIX
- QUOTE_JSON_PATH
- PAD_FILL_PATTERN_IS_REQUIRED
- SUPPORTS_EXPLODING_PROJECTIONS
- ARRAY_CONCAT_IS_VAR_LEN
- SUPPORTS_CONVERT_TIMEZONE
- SUPPORTS_MEDIAN
- SUPPORTS_UNIX_SECONDS
- ARRAY_SIZE_NAME
- ARRAY_SIZE_DIM_REQUIRED
- TIME_PART_SINGULARS
- TOKEN_MAPPING
- STRUCT_DELIMITER
- PARAMETER_TOKEN
- NAMED_PLACEHOLDER_TOKEN
- EXPRESSION_PRECEDES_PROPERTIES_CREATABLES
- RESERVED_KEYWORDS
- WITH_SEPARATED_COMMENTS
- EXCLUDE_COMMENTS
- UNWRAPPED_INTERVAL_VALUES
- PARAMETERIZABLE_TEXT_TYPES
- SENTINEL_LINE_BREAK
- pretty
- identify
- normalize
- pad
- unsupported_level
- max_unsupported
- leading_comma
- max_text_width
- comments
- dialect
- normalize_functions
- unsupported_messages
- generate
- preprocess
- unsupported
- sep
- seg
- pad_comment
- maybe_comment
- wrap
- no_identify
- normalize_func
- indent
- sql
- uncache_sql
- cache_sql
- characterset_sql
- column_parts
- column_sql
- columnposition_sql
- columndef_sql
- columnconstraint_sql
- computedcolumnconstraint_sql
- autoincrementcolumnconstraint_sql
- compresscolumnconstraint_sql
- generatedasidentitycolumnconstraint_sql
- generatedasrowcolumnconstraint_sql
- periodforsystemtimeconstraint_sql
- notnullcolumnconstraint_sql
- transformcolumnconstraint_sql
- primarykeycolumnconstraint_sql
- uniquecolumnconstraint_sql
- sequenceproperties_sql
- clone_sql
- describe_sql
- heredoc_sql
- prepend_ctes
- with_sql
- cte_sql
- tablealias_sql
- bitstring_sql
- hexstring_sql
- bytestring_sql
- unicodestring_sql
- rawstring_sql
- datatypeparam_sql
- datatype_sql
- directory_sql
- delete_sql
- set_operation
- set_operations
- fetch_sql
- filter_sql
- hint_sql
- indexparameters_sql
- index_sql
- hex_sql
- lowerhex_sql
- inputoutputformat_sql
- national_sql
- properties_sql
- root_properties
- properties
- with_properties
- locate_properties
- property_name
- property_sql
- likeproperty_sql
- fallbackproperty_sql
- journalproperty_sql
- freespaceproperty_sql
- checksumproperty_sql
- mergeblockratioproperty_sql
- datablocksizeproperty_sql
- blockcompressionproperty_sql
- isolatedloadingproperty_sql
- partitionboundspec_sql
- partitionedofproperty_sql
- lockingproperty_sql
- withdataproperty_sql
- withsystemversioningproperty_sql
- insert_sql
- introducer_sql
- kill_sql
- pseudotype_sql
- objectidentifier_sql
- onconflict_sql
- rowformatdelimitedproperty_sql
- withtablehint_sql
- indextablehint_sql
- historicaldata_sql
- table_parts
- table_sql
- tablesample_sql
- pivot_sql
- tuple_sql
- update_sql
- values_sql
- var_sql
- into_sql
- from_sql
- groupingsets_sql
- rollup_sql
- cube_sql
- group_sql
- having_sql
- connect_sql
- prior_sql
- join_sql
- lambda_sql
- lateral_sql
- limit_sql
- set_sql
- pragma_sql
- lock_sql
- literal_sql
- escape_str
- loaddata_sql
- null_sql
- order_sql
- withfill_sql
- cluster_sql
- distribute_sql
- sort_sql
- ordered_sql
- matchrecognizemeasure_sql
- matchrecognize_sql
- query_modifiers
- offset_limit_modifiers
- after_limit_modifiers
- schema_sql
- schema_columns_sql
- star_sql
- parameter_sql
- sessionparameter_sql
- placeholder_sql
- subquery_sql
- qualify_sql
- unnest_sql
- prewhere_sql
- where_sql
- window_sql
- partition_by_sql
- windowspec_sql
- withingroup_sql
- between_sql
- bracket_offset_expressions
- bracket_sql
- all_sql
- any_sql
- exists_sql
- case_sql
- nextvaluefor_sql
- extract_sql
- trim_sql
- convert_concat_args
- concat_sql
- concatws_sql
- check_sql
- foreignkey_sql
- primarykey_sql
- if_sql
- matchagainst_sql
- jsonkeyvalue_sql
- jsonpath_sql
- json_path_part
- formatjson_sql
- jsonobject_sql
- jsonobjectagg_sql
- jsonarray_sql
- jsonarrayagg_sql
- jsoncolumndef_sql
- jsonschema_sql
- jsontable_sql
- openjsoncolumndef_sql
- openjson_sql
- in_sql
- in_unnest_op
- interval_sql
- return_sql
- reference_sql
- anonymous_sql
- paren_sql
- neg_sql
- not_sql
- alias_sql
- pivotalias_sql
- aliases_sql
- atindex_sql
- attimezone_sql
- fromtimezone_sql
- add_sql
- and_sql
- or_sql
- xor_sql
- connector_sql
- bitwiseand_sql
- bitwiseleftshift_sql
- bitwisenot_sql
- bitwiseor_sql
- bitwiserightshift_sql
- bitwisexor_sql
- cast_sql
- currentdate_sql
- collate_sql
- command_sql
- comment_sql
- mergetreettlaction_sql
- mergetreettl_sql
- altercolumn_sql
- alterdiststyle_sql
- altersortkey_sql
- alterrename_sql
- renamecolumn_sql
- alterset_sql
- add_column_sql
- droppartition_sql
- addconstraint_sql
- distinct_sql
- ignorenulls_sql
- respectnulls_sql
- havingmax_sql
- intdiv_sql
- div_sql
- overlaps_sql
- distance_sql
- dot_sql
- eq_sql
- propertyeq_sql
- escape_sql
- glob_sql
- gt_sql
- gte_sql
- ilike_sql
- ilikeany_sql
- like_sql
- likeany_sql
- similarto_sql
- lt_sql
- lte_sql
- mod_sql
- mul_sql
- neq_sql
- nullsafeeq_sql
- nullsafeneq_sql
- slice_sql
- sub_sql
- trycast_sql
- try_sql
- log_sql
- use_sql
- binary
- ceil_floor
- function_fallback_sql
- func
- format_args
- too_wide
- format_time
- expressions
- op_expressions
- naked_property
- tag_sql
- token_sql
- userdefinedfunction_sql
- joinhint_sql
- kwarg_sql
- when_sql
- whens_sql
- merge_sql
- tochar_sql
- tonumber_sql
- dictproperty_sql
- dictrange_sql
- dictsubproperty_sql
- duplicatekeyproperty_sql
- uniquekeyproperty_sql
- distributedbyproperty_sql
- oncluster_sql
- clusteredbyproperty_sql
- anyvalue_sql
- querytransform_sql
- indexconstraintoption_sql
- checkcolumnconstraint_sql
- indexcolumnconstraint_sql
- nvl2_sql
- comprehension_sql
- columnprefix_sql
- opclass_sql
- predict_sql
- forin_sql
- refresh_sql
- toarray_sql
- tsordstotime_sql
- tsordstotimestamp_sql
- tsordstodatetime_sql
- tsordstodate_sql
- unixdate_sql
- lastday_sql
- dateadd_sql
- arrayany_sql
- struct_sql
- partitionrange_sql
- truncatetable_sql
- copyparameter_sql
- credentials_sql
- copy_sql
- semicolon_sql
- datadeletionproperty_sql
- maskingpolicycolumnconstraint_sql
- gapfill_sql
- scoperesolution_sql
- parsejson_sql
- rand_sql
- changes_sql
- pad_sql
- summarize_sql
- explodinggenerateseries_sql
- arrayconcat_sql
- converttimezone_sql
- json_sql
- jsonvalue_sql
- conditionalinsert_sql
- multitableinserts_sql
- oncondition_sql
- jsonexists_sql
- arrayagg_sql
- apply_sql
- grant_sql
- grantprivilege_sql
- grantprincipal_sql
- columns_sql
- overlay_sql
- todouble_sql
- string_sql
- median_sql
- overflowtruncatebehavior_sql
- unixseconds_sql
- arraysize_sql
- attach_sql
- detach_sql
- attachoption_sql
- featuresattime_sql
- watermarkcolumnconstraint_sql
- encodeproperty_sql
- includeproperty_sql
- xmlelement_sql
- partitionbyrangeproperty_sql
- partitionbyrangepropertydynamic_sql
- unpivotcolumns_sql
- analyzesample_sql
- analyzestatistics_sql
- analyzehistogram_sql
- analyzedelete_sql
- analyzelistchainedrows_sql
- analyzevalidate_sql
- analyze_sql
- xmltable_sql