sqlglot.dialects.tsql
1from __future__ import annotations 2 3import datetime 4import re 5import typing as t 6from functools import partial, reduce 7 8from sqlglot import exp, generator, parser, tokens, transforms 9from sqlglot.dialects.dialect import ( 10 Dialect, 11 NormalizationStrategy, 12 any_value_to_max_sql, 13 date_delta_sql, 14 datestrtodate_sql, 15 generatedasidentitycolumnconstraint_sql, 16 max_or_greatest, 17 min_or_least, 18 build_date_delta, 19 rename_func, 20 trim_sql, 21 timestrtotime_sql, 22) 23from sqlglot.helper import seq_get 24from sqlglot.parser import build_coalesce 25from sqlglot.time import format_time 26from sqlglot.tokens import TokenType 27 28if t.TYPE_CHECKING: 29 from sqlglot._typing import E 30 31FULL_FORMAT_TIME_MAPPING = { 32 "weekday": "%A", 33 "dw": "%A", 34 "w": "%A", 35 "month": "%B", 36 "mm": "%B", 37 "m": "%B", 38} 39 40DATE_DELTA_INTERVAL = { 41 "year": "year", 42 "yyyy": "year", 43 "yy": "year", 44 "quarter": "quarter", 45 "qq": "quarter", 46 "q": "quarter", 47 "month": "month", 48 "mm": "month", 49 "m": "month", 50 "week": "week", 51 "ww": "week", 52 "wk": "week", 53 "day": "day", 54 "dd": "day", 55 "d": "day", 56} 57 58 59DATE_FMT_RE = re.compile("([dD]{1,2})|([mM]{1,2})|([yY]{1,4})|([hH]{1,2})|([sS]{1,2})") 60 61# N = Numeric, C=Currency 62TRANSPILE_SAFE_NUMBER_FMT = {"N", "C"} 63 64DEFAULT_START_DATE = datetime.date(1900, 1, 1) 65 66BIT_TYPES = {exp.EQ, exp.NEQ, exp.Is, exp.In, exp.Select, exp.Alias} 67 68# Unsupported options: 69# - OPTIMIZE FOR ( @variable_name { UNKNOWN | = <literal_constant> } [ , ...n ] ) 70# - TABLE HINT 71OPTIONS: parser.OPTIONS_TYPE = { 72 **dict.fromkeys( 73 ( 74 "DISABLE_OPTIMIZED_PLAN_FORCING", 75 "FAST", 76 "IGNORE_NONCLUSTERED_COLUMNSTORE_INDEX", 77 "LABEL", 78 "MAXDOP", 79 "MAXRECURSION", 80 "MAX_GRANT_PERCENT", 81 "MIN_GRANT_PERCENT", 82 "NO_PERFORMANCE_SPOOL", 83 "QUERYTRACEON", 84 "RECOMPILE", 85 ), 86 tuple(), 87 ), 88 "CONCAT": ("UNION",), 89 "DISABLE": ("EXTERNALPUSHDOWN", "SCALEOUTEXECUTION"), 90 "EXPAND": ("VIEWS",), 91 "FORCE": ("EXTERNALPUSHDOWN", "ORDER", "SCALEOUTEXECUTION"), 92 "HASH": ("GROUP", "JOIN", "UNION"), 93 "KEEP": ("PLAN",), 94 "KEEPFIXED": ("PLAN",), 95 "LOOP": ("JOIN",), 96 "MERGE": ("JOIN", "UNION"), 97 "OPTIMIZE": (("FOR", "UNKNOWN"),), 98 "ORDER": ("GROUP",), 99 "PARAMETERIZATION": ("FORCED", "SIMPLE"), 100 "ROBUST": ("PLAN",), 101 "USE": ("PLAN",), 102} 103 104OPTIONS_THAT_REQUIRE_EQUAL = ("MAX_GRANT_PERCENT", "MIN_GRANT_PERCENT", "LABEL") 105 106 107def _build_formatted_time( 108 exp_class: t.Type[E], full_format_mapping: t.Optional[bool] = None 109) -> t.Callable[[t.List], E]: 110 def _builder(args: t.List) -> E: 111 assert len(args) == 2 112 113 return exp_class( 114 this=exp.cast(args[1], exp.DataType.Type.DATETIME), 115 format=exp.Literal.string( 116 format_time( 117 args[0].name.lower(), 118 ( 119 {**TSQL.TIME_MAPPING, **FULL_FORMAT_TIME_MAPPING} 120 if full_format_mapping 121 else TSQL.TIME_MAPPING 122 ), 123 ) 124 ), 125 ) 126 127 return _builder 128 129 130def _build_format(args: t.List) -> exp.NumberToStr | exp.TimeToStr: 131 this = seq_get(args, 0) 132 fmt = seq_get(args, 1) 133 culture = seq_get(args, 2) 134 135 number_fmt = fmt and (fmt.name in TRANSPILE_SAFE_NUMBER_FMT or not DATE_FMT_RE.search(fmt.name)) 136 137 if number_fmt: 138 return exp.NumberToStr(this=this, format=fmt, culture=culture) 139 140 if fmt: 141 fmt = exp.Literal.string( 142 format_time(fmt.name, TSQL.FORMAT_TIME_MAPPING) 143 if len(fmt.name) == 1 144 else format_time(fmt.name, TSQL.TIME_MAPPING) 145 ) 146 147 return exp.TimeToStr(this=this, format=fmt, culture=culture) 148 149 150def _build_eomonth(args: t.List) -> exp.LastDay: 151 date = exp.TsOrDsToDate(this=seq_get(args, 0)) 152 month_lag = seq_get(args, 1) 153 154 if month_lag is None: 155 this: exp.Expression = date 156 else: 157 unit = DATE_DELTA_INTERVAL.get("month") 158 this = exp.DateAdd(this=date, expression=month_lag, unit=unit and exp.var(unit)) 159 160 return exp.LastDay(this=this) 161 162 163def _build_hashbytes(args: t.List) -> exp.Expression: 164 kind, data = args 165 kind = kind.name.upper() if kind.is_string else "" 166 167 if kind == "MD5": 168 args.pop(0) 169 return exp.MD5(this=data) 170 if kind in ("SHA", "SHA1"): 171 args.pop(0) 172 return exp.SHA(this=data) 173 if kind == "SHA2_256": 174 return exp.SHA2(this=data, length=exp.Literal.number(256)) 175 if kind == "SHA2_512": 176 return exp.SHA2(this=data, length=exp.Literal.number(512)) 177 178 return exp.func("HASHBYTES", *args) 179 180 181DATEPART_ONLY_FORMATS = {"DW", "WK", "HOUR", "QUARTER"} 182 183 184def _format_sql(self: TSQL.Generator, expression: exp.NumberToStr | exp.TimeToStr) -> str: 185 fmt = expression.args["format"] 186 187 if not isinstance(expression, exp.NumberToStr): 188 if fmt.is_string: 189 mapped_fmt = format_time(fmt.name, TSQL.INVERSE_TIME_MAPPING) 190 191 name = (mapped_fmt or "").upper() 192 if name in DATEPART_ONLY_FORMATS: 193 return self.func("DATEPART", name, expression.this) 194 195 fmt_sql = self.sql(exp.Literal.string(mapped_fmt)) 196 else: 197 fmt_sql = self.format_time(expression) or self.sql(fmt) 198 else: 199 fmt_sql = self.sql(fmt) 200 201 return self.func("FORMAT", expression.this, fmt_sql, expression.args.get("culture")) 202 203 204def _string_agg_sql(self: TSQL.Generator, expression: exp.GroupConcat) -> str: 205 this = expression.this 206 distinct = expression.find(exp.Distinct) 207 if distinct: 208 # exp.Distinct can appear below an exp.Order or an exp.GroupConcat expression 209 self.unsupported("T-SQL STRING_AGG doesn't support DISTINCT.") 210 this = distinct.pop().expressions[0] 211 212 order = "" 213 if isinstance(expression.this, exp.Order): 214 if expression.this.this: 215 this = expression.this.this.pop() 216 # Order has a leading space 217 order = f" WITHIN GROUP ({self.sql(expression.this)[1:]})" 218 219 separator = expression.args.get("separator") or exp.Literal.string(",") 220 return f"STRING_AGG({self.format_args(this, separator)}){order}" 221 222 223def _build_date_delta( 224 exp_class: t.Type[E], unit_mapping: t.Optional[t.Dict[str, str]] = None 225) -> t.Callable[[t.List], E]: 226 def _builder(args: t.List) -> E: 227 unit = seq_get(args, 0) 228 if unit and unit_mapping: 229 unit = exp.var(unit_mapping.get(unit.name.lower(), unit.name)) 230 231 start_date = seq_get(args, 1) 232 if start_date and start_date.is_number: 233 # Numeric types are valid DATETIME values 234 if start_date.is_int: 235 adds = DEFAULT_START_DATE + datetime.timedelta(days=int(start_date.this)) 236 start_date = exp.Literal.string(adds.strftime("%F")) 237 else: 238 # We currently don't handle float values, i.e. they're not converted to equivalent DATETIMEs. 239 # This is not a problem when generating T-SQL code, it is when transpiling to other dialects. 240 return exp_class(this=seq_get(args, 2), expression=start_date, unit=unit) 241 242 return exp_class( 243 this=exp.TimeStrToTime(this=seq_get(args, 2)), 244 expression=exp.TimeStrToTime(this=start_date), 245 unit=unit, 246 ) 247 248 return _builder 249 250 251def qualify_derived_table_outputs(expression: exp.Expression) -> exp.Expression: 252 """Ensures all (unnamed) output columns are aliased for CTEs and Subqueries.""" 253 alias = expression.args.get("alias") 254 255 if ( 256 isinstance(expression, (exp.CTE, exp.Subquery)) 257 and isinstance(alias, exp.TableAlias) 258 and not alias.columns 259 ): 260 from sqlglot.optimizer.qualify_columns import qualify_outputs 261 262 # We keep track of the unaliased column projection indexes instead of the expressions 263 # themselves, because the latter are going to be replaced by new nodes when the aliases 264 # are added and hence we won't be able to reach these newly added Alias parents 265 query = expression.this 266 unaliased_column_indexes = ( 267 i for i, c in enumerate(query.selects) if isinstance(c, exp.Column) and not c.alias 268 ) 269 270 qualify_outputs(query) 271 272 # Preserve the quoting information of columns for newly added Alias nodes 273 query_selects = query.selects 274 for select_index in unaliased_column_indexes: 275 alias = query_selects[select_index] 276 column = alias.this 277 if isinstance(column.this, exp.Identifier): 278 alias.args["alias"].set("quoted", column.this.quoted) 279 280 return expression 281 282 283# https://learn.microsoft.com/en-us/sql/t-sql/functions/datetimefromparts-transact-sql?view=sql-server-ver16#syntax 284def _build_datetimefromparts(args: t.List) -> exp.TimestampFromParts: 285 return exp.TimestampFromParts( 286 year=seq_get(args, 0), 287 month=seq_get(args, 1), 288 day=seq_get(args, 2), 289 hour=seq_get(args, 3), 290 min=seq_get(args, 4), 291 sec=seq_get(args, 5), 292 milli=seq_get(args, 6), 293 ) 294 295 296# https://learn.microsoft.com/en-us/sql/t-sql/functions/timefromparts-transact-sql?view=sql-server-ver16#syntax 297def _build_timefromparts(args: t.List) -> exp.TimeFromParts: 298 return exp.TimeFromParts( 299 hour=seq_get(args, 0), 300 min=seq_get(args, 1), 301 sec=seq_get(args, 2), 302 fractions=seq_get(args, 3), 303 precision=seq_get(args, 4), 304 ) 305 306 307def _build_with_arg_as_text( 308 klass: t.Type[exp.Expression], 309) -> t.Callable[[t.List[exp.Expression]], exp.Expression]: 310 def _parse(args: t.List[exp.Expression]) -> exp.Expression: 311 this = seq_get(args, 0) 312 313 if this and not this.is_string: 314 this = exp.cast(this, exp.DataType.Type.TEXT) 315 316 expression = seq_get(args, 1) 317 kwargs = {"this": this} 318 319 if expression: 320 kwargs["expression"] = expression 321 322 return klass(**kwargs) 323 324 return _parse 325 326 327# https://learn.microsoft.com/en-us/sql/t-sql/functions/parsename-transact-sql?view=sql-server-ver16 328def _build_parsename(args: t.List) -> exp.SplitPart | exp.Anonymous: 329 # PARSENAME(...) will be stored into exp.SplitPart if: 330 # - All args are literals 331 # - The part index (2nd arg) is <= 4 (max valid value, otherwise TSQL returns NULL) 332 if len(args) == 2 and all(isinstance(arg, exp.Literal) for arg in args): 333 this = args[0] 334 part_index = args[1] 335 split_count = len(this.name.split(".")) 336 if split_count <= 4: 337 return exp.SplitPart( 338 this=this, 339 delimiter=exp.Literal.string("."), 340 part_index=exp.Literal.number(split_count + 1 - part_index.to_py()), 341 ) 342 343 return exp.Anonymous(this="PARSENAME", expressions=args) 344 345 346def _build_json_query(args: t.List, dialect: Dialect) -> exp.JSONExtract: 347 if len(args) == 1: 348 # The default value for path is '$'. As a result, if you don't provide a 349 # value for path, JSON_QUERY returns the input expression. 350 args.append(exp.Literal.string("$")) 351 352 return parser.build_extract_json_with_path(exp.JSONExtract)(args, dialect) 353 354 355def _json_extract_sql( 356 self: TSQL.Generator, expression: exp.JSONExtract | exp.JSONExtractScalar 357) -> str: 358 json_query = self.func("JSON_QUERY", expression.this, expression.expression) 359 json_value = self.func("JSON_VALUE", expression.this, expression.expression) 360 return self.func("ISNULL", json_query, json_value) 361 362 363def _timestrtotime_sql(self: TSQL.Generator, expression: exp.TimeStrToTime): 364 sql = timestrtotime_sql(self, expression) 365 if expression.args.get("zone"): 366 # If there is a timezone, produce an expression like: 367 # CAST('2020-01-01 12:13:14-08:00' AS DATETIMEOFFSET) AT TIME ZONE 'UTC' 368 # If you dont have AT TIME ZONE 'UTC', wrapping that expression in another cast back to DATETIME2 just drops the timezone information 369 return self.sql(exp.AtTimeZone(this=sql, zone=exp.Literal.string("UTC"))) 370 return sql 371 372 373class TSQL(Dialect): 374 SUPPORTS_SEMI_ANTI_JOIN = False 375 LOG_BASE_FIRST = False 376 TYPED_DIVISION = True 377 CONCAT_COALESCE = True 378 NORMALIZATION_STRATEGY = NormalizationStrategy.CASE_INSENSITIVE 379 380 TIME_FORMAT = "'yyyy-mm-dd hh:mm:ss'" 381 382 TIME_MAPPING = { 383 "year": "%Y", 384 "dayofyear": "%j", 385 "day": "%d", 386 "dy": "%d", 387 "y": "%Y", 388 "week": "%W", 389 "ww": "%W", 390 "wk": "%W", 391 "hour": "%h", 392 "hh": "%I", 393 "minute": "%M", 394 "mi": "%M", 395 "n": "%M", 396 "second": "%S", 397 "ss": "%S", 398 "s": "%-S", 399 "millisecond": "%f", 400 "ms": "%f", 401 "weekday": "%w", 402 "dw": "%w", 403 "month": "%m", 404 "mm": "%M", 405 "m": "%-M", 406 "Y": "%Y", 407 "YYYY": "%Y", 408 "YY": "%y", 409 "MMMM": "%B", 410 "MMM": "%b", 411 "MM": "%m", 412 "M": "%-m", 413 "dddd": "%A", 414 "dd": "%d", 415 "d": "%-d", 416 "HH": "%H", 417 "H": "%-H", 418 "h": "%-I", 419 "ffffff": "%f", 420 "yyyy": "%Y", 421 "yy": "%y", 422 } 423 424 CONVERT_FORMAT_MAPPING = { 425 "0": "%b %d %Y %-I:%M%p", 426 "1": "%m/%d/%y", 427 "2": "%y.%m.%d", 428 "3": "%d/%m/%y", 429 "4": "%d.%m.%y", 430 "5": "%d-%m-%y", 431 "6": "%d %b %y", 432 "7": "%b %d, %y", 433 "8": "%H:%M:%S", 434 "9": "%b %d %Y %-I:%M:%S:%f%p", 435 "10": "mm-dd-yy", 436 "11": "yy/mm/dd", 437 "12": "yymmdd", 438 "13": "%d %b %Y %H:%M:ss:%f", 439 "14": "%H:%M:%S:%f", 440 "20": "%Y-%m-%d %H:%M:%S", 441 "21": "%Y-%m-%d %H:%M:%S.%f", 442 "22": "%m/%d/%y %-I:%M:%S %p", 443 "23": "%Y-%m-%d", 444 "24": "%H:%M:%S", 445 "25": "%Y-%m-%d %H:%M:%S.%f", 446 "100": "%b %d %Y %-I:%M%p", 447 "101": "%m/%d/%Y", 448 "102": "%Y.%m.%d", 449 "103": "%d/%m/%Y", 450 "104": "%d.%m.%Y", 451 "105": "%d-%m-%Y", 452 "106": "%d %b %Y", 453 "107": "%b %d, %Y", 454 "108": "%H:%M:%S", 455 "109": "%b %d %Y %-I:%M:%S:%f%p", 456 "110": "%m-%d-%Y", 457 "111": "%Y/%m/%d", 458 "112": "%Y%m%d", 459 "113": "%d %b %Y %H:%M:%S:%f", 460 "114": "%H:%M:%S:%f", 461 "120": "%Y-%m-%d %H:%M:%S", 462 "121": "%Y-%m-%d %H:%M:%S.%f", 463 } 464 465 FORMAT_TIME_MAPPING = { 466 "y": "%B %Y", 467 "d": "%m/%d/%Y", 468 "H": "%-H", 469 "h": "%-I", 470 "s": "%Y-%m-%d %H:%M:%S", 471 "D": "%A,%B,%Y", 472 "f": "%A,%B,%Y %-I:%M %p", 473 "F": "%A,%B,%Y %-I:%M:%S %p", 474 "g": "%m/%d/%Y %-I:%M %p", 475 "G": "%m/%d/%Y %-I:%M:%S %p", 476 "M": "%B %-d", 477 "m": "%B %-d", 478 "O": "%Y-%m-%dT%H:%M:%S", 479 "u": "%Y-%M-%D %H:%M:%S%z", 480 "U": "%A, %B %D, %Y %H:%M:%S%z", 481 "T": "%-I:%M:%S %p", 482 "t": "%-I:%M", 483 "Y": "%a %Y", 484 } 485 486 class Tokenizer(tokens.Tokenizer): 487 IDENTIFIERS = [("[", "]"), '"'] 488 QUOTES = ["'", '"'] 489 HEX_STRINGS = [("0x", ""), ("0X", "")] 490 VAR_SINGLE_TOKENS = {"@", "$", "#"} 491 492 KEYWORDS = { 493 **tokens.Tokenizer.KEYWORDS, 494 "CLUSTERED INDEX": TokenType.INDEX, 495 "DATETIME2": TokenType.DATETIME, 496 "DATETIMEOFFSET": TokenType.TIMESTAMPTZ, 497 "DECLARE": TokenType.DECLARE, 498 "EXEC": TokenType.COMMAND, 499 "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT, 500 "IMAGE": TokenType.IMAGE, 501 "MONEY": TokenType.MONEY, 502 "NONCLUSTERED INDEX": TokenType.INDEX, 503 "NTEXT": TokenType.TEXT, 504 "OPTION": TokenType.OPTION, 505 "OUTPUT": TokenType.RETURNING, 506 "PRINT": TokenType.COMMAND, 507 "PROC": TokenType.PROCEDURE, 508 "REAL": TokenType.FLOAT, 509 "ROWVERSION": TokenType.ROWVERSION, 510 "SMALLDATETIME": TokenType.DATETIME, 511 "SMALLMONEY": TokenType.SMALLMONEY, 512 "SQL_VARIANT": TokenType.VARIANT, 513 "SYSTEM_USER": TokenType.CURRENT_USER, 514 "TOP": TokenType.TOP, 515 "TIMESTAMP": TokenType.ROWVERSION, 516 "TINYINT": TokenType.UTINYINT, 517 "UNIQUEIDENTIFIER": TokenType.UNIQUEIDENTIFIER, 518 "UPDATE STATISTICS": TokenType.COMMAND, 519 "XML": TokenType.XML, 520 } 521 KEYWORDS.pop("/*+") 522 523 COMMANDS = {*tokens.Tokenizer.COMMANDS, TokenType.END} 524 525 class Parser(parser.Parser): 526 SET_REQUIRES_ASSIGNMENT_DELIMITER = False 527 LOG_DEFAULTS_TO_LN = True 528 ALTER_TABLE_ADD_REQUIRED_FOR_EACH_COLUMN = False 529 STRING_ALIASES = True 530 NO_PAREN_IF_COMMANDS = False 531 532 QUERY_MODIFIER_PARSERS = { 533 **parser.Parser.QUERY_MODIFIER_PARSERS, 534 TokenType.OPTION: lambda self: ("options", self._parse_options()), 535 } 536 537 FUNCTIONS = { 538 **parser.Parser.FUNCTIONS, 539 "CHARINDEX": lambda args: exp.StrPosition( 540 this=seq_get(args, 1), 541 substr=seq_get(args, 0), 542 position=seq_get(args, 2), 543 ), 544 "COUNT": lambda args: exp.Count( 545 this=seq_get(args, 0), expressions=args[1:], big_int=False 546 ), 547 "COUNT_BIG": lambda args: exp.Count( 548 this=seq_get(args, 0), expressions=args[1:], big_int=True 549 ), 550 "DATEADD": build_date_delta(exp.DateAdd, unit_mapping=DATE_DELTA_INTERVAL), 551 "DATEDIFF": _build_date_delta(exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL), 552 "DATENAME": _build_formatted_time(exp.TimeToStr, full_format_mapping=True), 553 "DATEPART": _build_formatted_time(exp.TimeToStr), 554 "DATETIMEFROMPARTS": _build_datetimefromparts, 555 "EOMONTH": _build_eomonth, 556 "FORMAT": _build_format, 557 "GETDATE": exp.CurrentTimestamp.from_arg_list, 558 "HASHBYTES": _build_hashbytes, 559 "ISNULL": build_coalesce, 560 "JSON_QUERY": _build_json_query, 561 "JSON_VALUE": parser.build_extract_json_with_path(exp.JSONExtractScalar), 562 "LEN": _build_with_arg_as_text(exp.Length), 563 "LEFT": _build_with_arg_as_text(exp.Left), 564 "RIGHT": _build_with_arg_as_text(exp.Right), 565 "PARSENAME": _build_parsename, 566 "REPLICATE": exp.Repeat.from_arg_list, 567 "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)), 568 "SYSDATETIME": exp.CurrentTimestamp.from_arg_list, 569 "SUSER_NAME": exp.CurrentUser.from_arg_list, 570 "SUSER_SNAME": exp.CurrentUser.from_arg_list, 571 "SYSTEM_USER": exp.CurrentUser.from_arg_list, 572 "TIMEFROMPARTS": _build_timefromparts, 573 } 574 575 JOIN_HINTS = {"LOOP", "HASH", "MERGE", "REMOTE"} 576 577 PROCEDURE_OPTIONS = dict.fromkeys( 578 ("ENCRYPTION", "RECOMPILE", "SCHEMABINDING", "NATIVE_COMPILATION", "EXECUTE"), tuple() 579 ) 580 581 RETURNS_TABLE_TOKENS = parser.Parser.ID_VAR_TOKENS - { 582 TokenType.TABLE, 583 *parser.Parser.TYPE_TOKENS, 584 } 585 586 STATEMENT_PARSERS = { 587 **parser.Parser.STATEMENT_PARSERS, 588 TokenType.DECLARE: lambda self: self._parse_declare(), 589 } 590 591 RANGE_PARSERS = { 592 **parser.Parser.RANGE_PARSERS, 593 TokenType.DCOLON: lambda self, this: self.expression( 594 exp.ScopeResolution, 595 this=this, 596 expression=self._parse_function() or self._parse_var(any_token=True), 597 ), 598 } 599 600 # The DCOLON (::) operator serves as a scope resolution (exp.ScopeResolution) operator in T-SQL 601 COLUMN_OPERATORS = { 602 **parser.Parser.COLUMN_OPERATORS, 603 TokenType.DCOLON: lambda self, this, to: self.expression(exp.Cast, this=this, to=to) 604 if isinstance(to, exp.DataType) and to.this != exp.DataType.Type.USERDEFINED 605 else self.expression(exp.ScopeResolution, this=this, expression=to), 606 } 607 608 def _parse_dcolon(self) -> t.Optional[exp.Expression]: 609 # We want to use _parse_types() if the first token after :: is a known type, 610 # otherwise we could parse something like x::varchar(max) into a function 611 if self._match_set(self.TYPE_TOKENS, advance=False): 612 return self._parse_types() 613 614 return self._parse_function() or self._parse_types() 615 616 def _parse_options(self) -> t.Optional[t.List[exp.Expression]]: 617 if not self._match(TokenType.OPTION): 618 return None 619 620 def _parse_option() -> t.Optional[exp.Expression]: 621 option = self._parse_var_from_options(OPTIONS) 622 if not option: 623 return None 624 625 self._match(TokenType.EQ) 626 return self.expression( 627 exp.QueryOption, this=option, expression=self._parse_primary_or_var() 628 ) 629 630 return self._parse_wrapped_csv(_parse_option) 631 632 def _parse_projections(self) -> t.List[exp.Expression]: 633 """ 634 T-SQL supports the syntax alias = expression in the SELECT's projection list, 635 so we transform all parsed Selects to convert their EQ projections into Aliases. 636 637 See: https://learn.microsoft.com/en-us/sql/t-sql/queries/select-clause-transact-sql?view=sql-server-ver16#syntax 638 """ 639 return [ 640 ( 641 exp.alias_(projection.expression, projection.this.this, copy=False) 642 if isinstance(projection, exp.EQ) and isinstance(projection.this, exp.Column) 643 else projection 644 ) 645 for projection in super()._parse_projections() 646 ] 647 648 def _parse_commit_or_rollback(self) -> exp.Commit | exp.Rollback: 649 """Applies to SQL Server and Azure SQL Database 650 COMMIT [ { TRAN | TRANSACTION } 651 [ transaction_name | @tran_name_variable ] ] 652 [ WITH ( DELAYED_DURABILITY = { OFF | ON } ) ] 653 654 ROLLBACK { TRAN | TRANSACTION } 655 [ transaction_name | @tran_name_variable 656 | savepoint_name | @savepoint_variable ] 657 """ 658 rollback = self._prev.token_type == TokenType.ROLLBACK 659 660 self._match_texts(("TRAN", "TRANSACTION")) 661 this = self._parse_id_var() 662 663 if rollback: 664 return self.expression(exp.Rollback, this=this) 665 666 durability = None 667 if self._match_pair(TokenType.WITH, TokenType.L_PAREN): 668 self._match_text_seq("DELAYED_DURABILITY") 669 self._match(TokenType.EQ) 670 671 if self._match_text_seq("OFF"): 672 durability = False 673 else: 674 self._match(TokenType.ON) 675 durability = True 676 677 self._match_r_paren() 678 679 return self.expression(exp.Commit, this=this, durability=durability) 680 681 def _parse_transaction(self) -> exp.Transaction | exp.Command: 682 """Applies to SQL Server and Azure SQL Database 683 BEGIN { TRAN | TRANSACTION } 684 [ { transaction_name | @tran_name_variable } 685 [ WITH MARK [ 'description' ] ] 686 ] 687 """ 688 if self._match_texts(("TRAN", "TRANSACTION")): 689 transaction = self.expression(exp.Transaction, this=self._parse_id_var()) 690 if self._match_text_seq("WITH", "MARK"): 691 transaction.set("mark", self._parse_string()) 692 693 return transaction 694 695 return self._parse_as_command(self._prev) 696 697 def _parse_returns(self) -> exp.ReturnsProperty: 698 table = self._parse_id_var(any_token=False, tokens=self.RETURNS_TABLE_TOKENS) 699 returns = super()._parse_returns() 700 returns.set("table", table) 701 return returns 702 703 def _parse_convert( 704 self, strict: bool, safe: t.Optional[bool] = None 705 ) -> t.Optional[exp.Expression]: 706 this = self._parse_types() 707 self._match(TokenType.COMMA) 708 args = [this, *self._parse_csv(self._parse_assignment)] 709 convert = exp.Convert.from_arg_list(args) 710 convert.set("safe", safe) 711 convert.set("strict", strict) 712 return convert 713 714 def _parse_user_defined_function( 715 self, kind: t.Optional[TokenType] = None 716 ) -> t.Optional[exp.Expression]: 717 this = super()._parse_user_defined_function(kind=kind) 718 719 if ( 720 kind == TokenType.FUNCTION 721 or isinstance(this, exp.UserDefinedFunction) 722 or self._match(TokenType.ALIAS, advance=False) 723 ): 724 return this 725 726 if not self._match(TokenType.WITH, advance=False): 727 expressions = self._parse_csv(self._parse_function_parameter) 728 else: 729 expressions = None 730 731 return self.expression(exp.UserDefinedFunction, this=this, expressions=expressions) 732 733 def _parse_id_var( 734 self, 735 any_token: bool = True, 736 tokens: t.Optional[t.Collection[TokenType]] = None, 737 ) -> t.Optional[exp.Expression]: 738 is_temporary = self._match(TokenType.HASH) 739 is_global = is_temporary and self._match(TokenType.HASH) 740 741 this = super()._parse_id_var(any_token=any_token, tokens=tokens) 742 if this: 743 if is_global: 744 this.set("global", True) 745 elif is_temporary: 746 this.set("temporary", True) 747 748 return this 749 750 def _parse_create(self) -> exp.Create | exp.Command: 751 create = super()._parse_create() 752 753 if isinstance(create, exp.Create): 754 table = create.this.this if isinstance(create.this, exp.Schema) else create.this 755 if isinstance(table, exp.Table) and table.this.args.get("temporary"): 756 if not create.args.get("properties"): 757 create.set("properties", exp.Properties(expressions=[])) 758 759 create.args["properties"].append("expressions", exp.TemporaryProperty()) 760 761 return create 762 763 def _parse_if(self) -> t.Optional[exp.Expression]: 764 index = self._index 765 766 if self._match_text_seq("OBJECT_ID"): 767 self._parse_wrapped_csv(self._parse_string) 768 if self._match_text_seq("IS", "NOT", "NULL") and self._match(TokenType.DROP): 769 return self._parse_drop(exists=True) 770 self._retreat(index) 771 772 return super()._parse_if() 773 774 def _parse_unique(self) -> exp.UniqueColumnConstraint: 775 if self._match_texts(("CLUSTERED", "NONCLUSTERED")): 776 this = self.CONSTRAINT_PARSERS[self._prev.text.upper()](self) 777 else: 778 this = self._parse_schema(self._parse_id_var(any_token=False)) 779 780 return self.expression(exp.UniqueColumnConstraint, this=this) 781 782 def _parse_partition(self) -> t.Optional[exp.Partition]: 783 if not self._match_text_seq("WITH", "(", "PARTITIONS"): 784 return None 785 786 def parse_range(): 787 low = self._parse_bitwise() 788 high = self._parse_bitwise() if self._match_text_seq("TO") else None 789 790 return ( 791 self.expression(exp.PartitionRange, this=low, expression=high) if high else low 792 ) 793 794 partition = self.expression( 795 exp.Partition, expressions=self._parse_wrapped_csv(parse_range) 796 ) 797 798 self._match_r_paren() 799 800 return partition 801 802 def _parse_declare(self) -> exp.Declare | exp.Command: 803 index = self._index 804 expressions = self._try_parse(partial(self._parse_csv, self._parse_declareitem)) 805 806 if not expressions or self._curr: 807 self._retreat(index) 808 return self._parse_as_command(self._prev) 809 810 return self.expression(exp.Declare, expressions=expressions) 811 812 def _parse_declareitem(self) -> t.Optional[exp.DeclareItem]: 813 var = self._parse_id_var() 814 if not var: 815 return None 816 817 value = None 818 self._match(TokenType.ALIAS) 819 if self._match(TokenType.TABLE): 820 data_type = self._parse_schema() 821 else: 822 data_type = self._parse_types() 823 if self._match(TokenType.EQ): 824 value = self._parse_bitwise() 825 826 return self.expression(exp.DeclareItem, this=var, kind=data_type, default=value) 827 828 class Generator(generator.Generator): 829 LIMIT_IS_TOP = True 830 QUERY_HINTS = False 831 RETURNING_END = False 832 NVL2_SUPPORTED = False 833 ALTER_TABLE_INCLUDE_COLUMN_KEYWORD = False 834 LIMIT_FETCH = "FETCH" 835 COMPUTED_COLUMN_WITH_TYPE = False 836 CTE_RECURSIVE_KEYWORD_REQUIRED = False 837 ENSURE_BOOLS = True 838 NULL_ORDERING_SUPPORTED = None 839 SUPPORTS_SINGLE_ARG_CONCAT = False 840 TABLESAMPLE_SEED_KEYWORD = "REPEATABLE" 841 SUPPORTS_SELECT_INTO = True 842 JSON_PATH_BRACKETED_KEY_SUPPORTED = False 843 SUPPORTS_TO_NUMBER = False 844 SET_OP_MODIFIERS = False 845 COPY_PARAMS_EQ_REQUIRED = True 846 PARSE_JSON_NAME = None 847 EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE = False 848 849 EXPRESSIONS_WITHOUT_NESTED_CTES = { 850 exp.Create, 851 exp.Delete, 852 exp.Insert, 853 exp.Intersect, 854 exp.Except, 855 exp.Merge, 856 exp.Select, 857 exp.Subquery, 858 exp.Union, 859 exp.Update, 860 } 861 862 SUPPORTED_JSON_PATH_PARTS = { 863 exp.JSONPathKey, 864 exp.JSONPathRoot, 865 exp.JSONPathSubscript, 866 } 867 868 TYPE_MAPPING = { 869 **generator.Generator.TYPE_MAPPING, 870 exp.DataType.Type.BOOLEAN: "BIT", 871 exp.DataType.Type.DECIMAL: "NUMERIC", 872 exp.DataType.Type.DATETIME: "DATETIME2", 873 exp.DataType.Type.DOUBLE: "FLOAT", 874 exp.DataType.Type.INT: "INTEGER", 875 exp.DataType.Type.ROWVERSION: "ROWVERSION", 876 exp.DataType.Type.TEXT: "VARCHAR(MAX)", 877 exp.DataType.Type.TIMESTAMP: "DATETIME2", 878 exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET", 879 exp.DataType.Type.UTINYINT: "TINYINT", 880 exp.DataType.Type.VARIANT: "SQL_VARIANT", 881 } 882 883 TYPE_MAPPING.pop(exp.DataType.Type.NCHAR) 884 TYPE_MAPPING.pop(exp.DataType.Type.NVARCHAR) 885 886 TRANSFORMS = { 887 **generator.Generator.TRANSFORMS, 888 exp.AnyValue: any_value_to_max_sql, 889 exp.ArrayToString: rename_func("STRING_AGG"), 890 exp.AutoIncrementColumnConstraint: lambda *_: "IDENTITY", 891 exp.DateAdd: date_delta_sql("DATEADD"), 892 exp.DateDiff: date_delta_sql("DATEDIFF"), 893 exp.CTE: transforms.preprocess([qualify_derived_table_outputs]), 894 exp.CurrentDate: rename_func("GETDATE"), 895 exp.CurrentTimestamp: rename_func("GETDATE"), 896 exp.DateStrToDate: datestrtodate_sql, 897 exp.Extract: rename_func("DATEPART"), 898 exp.GeneratedAsIdentityColumnConstraint: generatedasidentitycolumnconstraint_sql, 899 exp.GroupConcat: _string_agg_sql, 900 exp.If: rename_func("IIF"), 901 exp.JSONExtract: _json_extract_sql, 902 exp.JSONExtractScalar: _json_extract_sql, 903 exp.LastDay: lambda self, e: self.func("EOMONTH", e.this), 904 exp.Ln: rename_func("LOG"), 905 exp.Max: max_or_greatest, 906 exp.MD5: lambda self, e: self.func("HASHBYTES", exp.Literal.string("MD5"), e.this), 907 exp.Min: min_or_least, 908 exp.NumberToStr: _format_sql, 909 exp.Repeat: rename_func("REPLICATE"), 910 exp.Select: transforms.preprocess( 911 [ 912 transforms.eliminate_distinct_on, 913 transforms.eliminate_semi_and_anti_joins, 914 transforms.eliminate_qualify, 915 transforms.unnest_generate_date_array_using_recursive_cte, 916 ] 917 ), 918 exp.Stddev: rename_func("STDEV"), 919 exp.StrPosition: lambda self, e: self.func( 920 "CHARINDEX", e.args.get("substr"), e.this, e.args.get("position") 921 ), 922 exp.Subquery: transforms.preprocess([qualify_derived_table_outputs]), 923 exp.SHA: lambda self, e: self.func("HASHBYTES", exp.Literal.string("SHA1"), e.this), 924 exp.SHA2: lambda self, e: self.func( 925 "HASHBYTES", exp.Literal.string(f"SHA2_{e.args.get('length', 256)}"), e.this 926 ), 927 exp.TemporaryProperty: lambda self, e: "", 928 exp.TimeStrToTime: _timestrtotime_sql, 929 exp.TimeToStr: _format_sql, 930 exp.Trim: trim_sql, 931 exp.TsOrDsAdd: date_delta_sql("DATEADD", cast=True), 932 exp.TsOrDsDiff: date_delta_sql("DATEDIFF"), 933 } 934 935 TRANSFORMS.pop(exp.ReturnsProperty) 936 937 PROPERTIES_LOCATION = { 938 **generator.Generator.PROPERTIES_LOCATION, 939 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 940 } 941 942 def scope_resolution(self, rhs: str, scope_name: str) -> str: 943 return f"{scope_name}::{rhs}" 944 945 def select_sql(self, expression: exp.Select) -> str: 946 if expression.args.get("offset"): 947 if not expression.args.get("order"): 948 # ORDER BY is required in order to use OFFSET in a query, so we use 949 # a noop order by, since we don't really care about the order. 950 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 951 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 952 953 limit = expression.args.get("limit") 954 if isinstance(limit, exp.Limit): 955 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 956 # we replace here because otherwise TOP would be generated in select_sql 957 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 958 959 return super().select_sql(expression) 960 961 def convert_sql(self, expression: exp.Convert) -> str: 962 name = "TRY_CONVERT" if expression.args.get("safe") else "CONVERT" 963 return self.func( 964 name, expression.this, expression.expression, expression.args.get("style") 965 ) 966 967 def queryoption_sql(self, expression: exp.QueryOption) -> str: 968 option = self.sql(expression, "this") 969 value = self.sql(expression, "expression") 970 if value: 971 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 972 return f"{option} {optional_equal_sign}{value}" 973 return option 974 975 def lateral_op(self, expression: exp.Lateral) -> str: 976 cross_apply = expression.args.get("cross_apply") 977 if cross_apply is True: 978 return "CROSS APPLY" 979 if cross_apply is False: 980 return "OUTER APPLY" 981 982 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 983 self.unsupported("LATERAL clause is not supported.") 984 return "LATERAL" 985 986 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 987 this = expression.this 988 split_count = len(this.name.split(".")) 989 delimiter = expression.args.get("delimiter") 990 part_index = expression.args.get("part_index") 991 992 if ( 993 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 994 or (delimiter and delimiter.name != ".") 995 or not part_index 996 or split_count > 4 997 ): 998 self.unsupported( 999 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 1000 ) 1001 return "" 1002 1003 return self.func( 1004 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 1005 ) 1006 1007 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1008 nano = expression.args.get("nano") 1009 if nano is not None: 1010 nano.pop() 1011 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1012 1013 if expression.args.get("fractions") is None: 1014 expression.set("fractions", exp.Literal.number(0)) 1015 if expression.args.get("precision") is None: 1016 expression.set("precision", exp.Literal.number(0)) 1017 1018 return rename_func("TIMEFROMPARTS")(self, expression) 1019 1020 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1021 zone = expression.args.get("zone") 1022 if zone is not None: 1023 zone.pop() 1024 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1025 1026 nano = expression.args.get("nano") 1027 if nano is not None: 1028 nano.pop() 1029 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1030 1031 if expression.args.get("milli") is None: 1032 expression.set("milli", exp.Literal.number(0)) 1033 1034 return rename_func("DATETIMEFROMPARTS")(self, expression) 1035 1036 def setitem_sql(self, expression: exp.SetItem) -> str: 1037 this = expression.this 1038 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1039 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1040 return f"{self.sql(this.left)} {self.sql(this.right)}" 1041 1042 return super().setitem_sql(expression) 1043 1044 def boolean_sql(self, expression: exp.Boolean) -> str: 1045 if type(expression.parent) in BIT_TYPES or isinstance( 1046 expression.find_ancestor(exp.Values, exp.Select), exp.Values 1047 ): 1048 return "1" if expression.this else "0" 1049 1050 return "(1 = 1)" if expression.this else "(1 = 0)" 1051 1052 def is_sql(self, expression: exp.Is) -> str: 1053 if isinstance(expression.expression, exp.Boolean): 1054 return self.binary(expression, "=") 1055 return self.binary(expression, "IS") 1056 1057 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1058 sql = self.sql(expression, "this") 1059 properties = expression.args.get("properties") 1060 1061 if sql[:1] != "#" and any( 1062 isinstance(prop, exp.TemporaryProperty) 1063 for prop in (properties.expressions if properties else []) 1064 ): 1065 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1066 1067 return sql 1068 1069 def create_sql(self, expression: exp.Create) -> str: 1070 kind = expression.kind 1071 exists = expression.args.pop("exists", None) 1072 1073 like_property = expression.find(exp.LikeProperty) 1074 if like_property: 1075 ctas_expression = like_property.this 1076 else: 1077 ctas_expression = expression.expression 1078 1079 if kind == "VIEW": 1080 expression.this.set("catalog", None) 1081 with_ = expression.args.get("with") 1082 if ctas_expression and with_: 1083 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1084 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1085 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1086 ctas_expression.set("with", with_.pop()) 1087 1088 sql = super().create_sql(expression) 1089 1090 table = expression.find(exp.Table) 1091 1092 # Convert CTAS statement to SELECT .. INTO .. 1093 if kind == "TABLE" and ctas_expression: 1094 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1095 ctas_expression = ctas_expression.subquery() 1096 1097 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1098 select_into.set("into", exp.Into(this=table)) 1099 1100 if like_property: 1101 select_into.limit(0, copy=False) 1102 1103 sql = self.sql(select_into) 1104 1105 if exists: 1106 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1107 sql_with_ctes = self.prepend_ctes(expression, sql) 1108 sql_literal = self.sql(exp.Literal.string(sql_with_ctes)) 1109 if kind == "SCHEMA": 1110 return f"""IF NOT EXISTS (SELECT * FROM information_schema.schemata WHERE schema_name = {identifier}) EXEC({sql_literal})""" 1111 elif kind == "TABLE": 1112 assert table 1113 where = exp.and_( 1114 exp.column("table_name").eq(table.name), 1115 exp.column("table_schema").eq(table.db) if table.db else None, 1116 exp.column("table_catalog").eq(table.catalog) if table.catalog else None, 1117 ) 1118 return f"""IF NOT EXISTS (SELECT * FROM information_schema.tables WHERE {where}) EXEC({sql_literal})""" 1119 elif kind == "INDEX": 1120 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1121 return f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql_literal})""" 1122 elif expression.args.get("replace"): 1123 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1124 1125 return self.prepend_ctes(expression, sql) 1126 1127 def count_sql(self, expression: exp.Count) -> str: 1128 func_name = "COUNT_BIG" if expression.args.get("big_int") else "COUNT" 1129 return rename_func(func_name)(self, expression) 1130 1131 def offset_sql(self, expression: exp.Offset) -> str: 1132 return f"{super().offset_sql(expression)} ROWS" 1133 1134 def version_sql(self, expression: exp.Version) -> str: 1135 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1136 this = f"FOR {name}" 1137 expr = expression.expression 1138 kind = expression.text("kind") 1139 if kind in ("FROM", "BETWEEN"): 1140 args = expr.expressions 1141 sep = "TO" if kind == "FROM" else "AND" 1142 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1143 else: 1144 expr_sql = self.sql(expr) 1145 1146 expr_sql = f" {expr_sql}" if expr_sql else "" 1147 return f"{this} {kind}{expr_sql}" 1148 1149 def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str: 1150 table = expression.args.get("table") 1151 table = f"{table} " if table else "" 1152 return f"RETURNS {table}{self.sql(expression, 'this')}" 1153 1154 def returning_sql(self, expression: exp.Returning) -> str: 1155 into = self.sql(expression, "into") 1156 into = self.seg(f"INTO {into}") if into else "" 1157 return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}" 1158 1159 def transaction_sql(self, expression: exp.Transaction) -> str: 1160 this = self.sql(expression, "this") 1161 this = f" {this}" if this else "" 1162 mark = self.sql(expression, "mark") 1163 mark = f" WITH MARK {mark}" if mark else "" 1164 return f"BEGIN TRANSACTION{this}{mark}" 1165 1166 def commit_sql(self, expression: exp.Commit) -> str: 1167 this = self.sql(expression, "this") 1168 this = f" {this}" if this else "" 1169 durability = expression.args.get("durability") 1170 durability = ( 1171 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1172 if durability is not None 1173 else "" 1174 ) 1175 return f"COMMIT TRANSACTION{this}{durability}" 1176 1177 def rollback_sql(self, expression: exp.Rollback) -> str: 1178 this = self.sql(expression, "this") 1179 this = f" {this}" if this else "" 1180 return f"ROLLBACK TRANSACTION{this}" 1181 1182 def identifier_sql(self, expression: exp.Identifier) -> str: 1183 identifier = super().identifier_sql(expression) 1184 1185 if expression.args.get("global"): 1186 identifier = f"##{identifier}" 1187 elif expression.args.get("temporary"): 1188 identifier = f"#{identifier}" 1189 1190 return identifier 1191 1192 def constraint_sql(self, expression: exp.Constraint) -> str: 1193 this = self.sql(expression, "this") 1194 expressions = self.expressions(expression, flat=True, sep=" ") 1195 return f"CONSTRAINT {this} {expressions}" 1196 1197 def length_sql(self, expression: exp.Length) -> str: 1198 return self._uncast_text(expression, "LEN") 1199 1200 def right_sql(self, expression: exp.Right) -> str: 1201 return self._uncast_text(expression, "RIGHT") 1202 1203 def left_sql(self, expression: exp.Left) -> str: 1204 return self._uncast_text(expression, "LEFT") 1205 1206 def _uncast_text(self, expression: exp.Expression, name: str) -> str: 1207 this = expression.this 1208 if isinstance(this, exp.Cast) and this.is_type(exp.DataType.Type.TEXT): 1209 this_sql = self.sql(this, "this") 1210 else: 1211 this_sql = self.sql(this) 1212 expression_sql = self.sql(expression, "expression") 1213 return self.func(name, this_sql, expression_sql if expression_sql else None) 1214 1215 def partition_sql(self, expression: exp.Partition) -> str: 1216 return f"WITH (PARTITIONS({self.expressions(expression, flat=True)}))" 1217 1218 def alter_sql(self, expression: exp.Alter) -> str: 1219 action = seq_get(expression.args.get("actions") or [], 0) 1220 if isinstance(action, exp.AlterRename): 1221 return f"EXEC sp_rename '{self.sql(expression.this)}', '{action.this.name}'" 1222 return super().alter_sql(expression) 1223 1224 def drop_sql(self, expression: exp.Drop) -> str: 1225 if expression.args["kind"] == "VIEW": 1226 expression.this.set("catalog", None) 1227 return super().drop_sql(expression) 1228 1229 def declare_sql(self, expression: exp.Declare) -> str: 1230 return f"DECLARE {self.expressions(expression, flat=True)}" 1231 1232 def declareitem_sql(self, expression: exp.DeclareItem) -> str: 1233 variable = self.sql(expression, "this") 1234 default = self.sql(expression, "default") 1235 default = f" = {default}" if default else "" 1236 1237 kind = self.sql(expression, "kind") 1238 if isinstance(expression.args.get("kind"), exp.Schema): 1239 kind = f"TABLE {kind}" 1240 1241 return f"{variable} AS {kind}{default}" 1242 1243 def options_modifier(self, expression: exp.Expression) -> str: 1244 options = self.expressions(expression, key="options") 1245 return f" OPTION{self.wrap(options)}" if options else "" 1246 1247 def dpipe_sql(self, expression: exp.DPipe) -> str: 1248 return self.sql( 1249 reduce(lambda x, y: exp.Add(this=x, expression=y), expression.flatten()) 1250 )
FULL_FORMAT_TIME_MAPPING =
{'weekday': '%A', 'dw': '%A', 'w': '%A', 'month': '%B', 'mm': '%B', 'm': '%B'}
DATE_DELTA_INTERVAL =
{'year': 'year', 'yyyy': 'year', 'yy': 'year', 'quarter': 'quarter', 'qq': 'quarter', 'q': 'quarter', 'month': 'month', 'mm': 'month', 'm': 'month', 'week': 'week', 'ww': 'week', 'wk': 'week', 'day': 'day', 'dd': 'day', 'd': 'day'}
DATE_FMT_RE =
re.compile('([dD]{1,2})|([mM]{1,2})|([yY]{1,4})|([hH]{1,2})|([sS]{1,2})')
TRANSPILE_SAFE_NUMBER_FMT =
{'C', 'N'}
DEFAULT_START_DATE =
datetime.date(1900, 1, 1)
BIT_TYPES =
{<class 'sqlglot.expressions.NEQ'>, <class 'sqlglot.expressions.Is'>, <class 'sqlglot.expressions.EQ'>, <class 'sqlglot.expressions.Select'>, <class 'sqlglot.expressions.In'>, <class 'sqlglot.expressions.Alias'>}
OPTIONS: Dict[str, Sequence[Union[Sequence[str], str]]] =
{'DISABLE_OPTIMIZED_PLAN_FORCING': (), 'FAST': (), 'IGNORE_NONCLUSTERED_COLUMNSTORE_INDEX': (), 'LABEL': (), 'MAXDOP': (), 'MAXRECURSION': (), 'MAX_GRANT_PERCENT': (), 'MIN_GRANT_PERCENT': (), 'NO_PERFORMANCE_SPOOL': (), 'QUERYTRACEON': (), 'RECOMPILE': (), 'CONCAT': ('UNION',), 'DISABLE': ('EXTERNALPUSHDOWN', 'SCALEOUTEXECUTION'), 'EXPAND': ('VIEWS',), 'FORCE': ('EXTERNALPUSHDOWN', 'ORDER', 'SCALEOUTEXECUTION'), 'HASH': ('GROUP', 'JOIN', 'UNION'), 'KEEP': ('PLAN',), 'KEEPFIXED': ('PLAN',), 'LOOP': ('JOIN',), 'MERGE': ('JOIN', 'UNION'), 'OPTIMIZE': (('FOR', 'UNKNOWN'),), 'ORDER': ('GROUP',), 'PARAMETERIZATION': ('FORCED', 'SIMPLE'), 'ROBUST': ('PLAN',), 'USE': ('PLAN',)}
OPTIONS_THAT_REQUIRE_EQUAL =
('MAX_GRANT_PERCENT', 'MIN_GRANT_PERCENT', 'LABEL')
DATEPART_ONLY_FORMATS =
{'WK', 'HOUR', 'QUARTER', 'DW'}
def
qualify_derived_table_outputs( expression: sqlglot.expressions.Expression) -> sqlglot.expressions.Expression:
252def qualify_derived_table_outputs(expression: exp.Expression) -> exp.Expression: 253 """Ensures all (unnamed) output columns are aliased for CTEs and Subqueries.""" 254 alias = expression.args.get("alias") 255 256 if ( 257 isinstance(expression, (exp.CTE, exp.Subquery)) 258 and isinstance(alias, exp.TableAlias) 259 and not alias.columns 260 ): 261 from sqlglot.optimizer.qualify_columns import qualify_outputs 262 263 # We keep track of the unaliased column projection indexes instead of the expressions 264 # themselves, because the latter are going to be replaced by new nodes when the aliases 265 # are added and hence we won't be able to reach these newly added Alias parents 266 query = expression.this 267 unaliased_column_indexes = ( 268 i for i, c in enumerate(query.selects) if isinstance(c, exp.Column) and not c.alias 269 ) 270 271 qualify_outputs(query) 272 273 # Preserve the quoting information of columns for newly added Alias nodes 274 query_selects = query.selects 275 for select_index in unaliased_column_indexes: 276 alias = query_selects[select_index] 277 column = alias.this 278 if isinstance(column.this, exp.Identifier): 279 alias.args["alias"].set("quoted", column.this.quoted) 280 281 return expression
Ensures all (unnamed) output columns are aliased for CTEs and Subqueries.
374class TSQL(Dialect): 375 SUPPORTS_SEMI_ANTI_JOIN = False 376 LOG_BASE_FIRST = False 377 TYPED_DIVISION = True 378 CONCAT_COALESCE = True 379 NORMALIZATION_STRATEGY = NormalizationStrategy.CASE_INSENSITIVE 380 381 TIME_FORMAT = "'yyyy-mm-dd hh:mm:ss'" 382 383 TIME_MAPPING = { 384 "year": "%Y", 385 "dayofyear": "%j", 386 "day": "%d", 387 "dy": "%d", 388 "y": "%Y", 389 "week": "%W", 390 "ww": "%W", 391 "wk": "%W", 392 "hour": "%h", 393 "hh": "%I", 394 "minute": "%M", 395 "mi": "%M", 396 "n": "%M", 397 "second": "%S", 398 "ss": "%S", 399 "s": "%-S", 400 "millisecond": "%f", 401 "ms": "%f", 402 "weekday": "%w", 403 "dw": "%w", 404 "month": "%m", 405 "mm": "%M", 406 "m": "%-M", 407 "Y": "%Y", 408 "YYYY": "%Y", 409 "YY": "%y", 410 "MMMM": "%B", 411 "MMM": "%b", 412 "MM": "%m", 413 "M": "%-m", 414 "dddd": "%A", 415 "dd": "%d", 416 "d": "%-d", 417 "HH": "%H", 418 "H": "%-H", 419 "h": "%-I", 420 "ffffff": "%f", 421 "yyyy": "%Y", 422 "yy": "%y", 423 } 424 425 CONVERT_FORMAT_MAPPING = { 426 "0": "%b %d %Y %-I:%M%p", 427 "1": "%m/%d/%y", 428 "2": "%y.%m.%d", 429 "3": "%d/%m/%y", 430 "4": "%d.%m.%y", 431 "5": "%d-%m-%y", 432 "6": "%d %b %y", 433 "7": "%b %d, %y", 434 "8": "%H:%M:%S", 435 "9": "%b %d %Y %-I:%M:%S:%f%p", 436 "10": "mm-dd-yy", 437 "11": "yy/mm/dd", 438 "12": "yymmdd", 439 "13": "%d %b %Y %H:%M:ss:%f", 440 "14": "%H:%M:%S:%f", 441 "20": "%Y-%m-%d %H:%M:%S", 442 "21": "%Y-%m-%d %H:%M:%S.%f", 443 "22": "%m/%d/%y %-I:%M:%S %p", 444 "23": "%Y-%m-%d", 445 "24": "%H:%M:%S", 446 "25": "%Y-%m-%d %H:%M:%S.%f", 447 "100": "%b %d %Y %-I:%M%p", 448 "101": "%m/%d/%Y", 449 "102": "%Y.%m.%d", 450 "103": "%d/%m/%Y", 451 "104": "%d.%m.%Y", 452 "105": "%d-%m-%Y", 453 "106": "%d %b %Y", 454 "107": "%b %d, %Y", 455 "108": "%H:%M:%S", 456 "109": "%b %d %Y %-I:%M:%S:%f%p", 457 "110": "%m-%d-%Y", 458 "111": "%Y/%m/%d", 459 "112": "%Y%m%d", 460 "113": "%d %b %Y %H:%M:%S:%f", 461 "114": "%H:%M:%S:%f", 462 "120": "%Y-%m-%d %H:%M:%S", 463 "121": "%Y-%m-%d %H:%M:%S.%f", 464 } 465 466 FORMAT_TIME_MAPPING = { 467 "y": "%B %Y", 468 "d": "%m/%d/%Y", 469 "H": "%-H", 470 "h": "%-I", 471 "s": "%Y-%m-%d %H:%M:%S", 472 "D": "%A,%B,%Y", 473 "f": "%A,%B,%Y %-I:%M %p", 474 "F": "%A,%B,%Y %-I:%M:%S %p", 475 "g": "%m/%d/%Y %-I:%M %p", 476 "G": "%m/%d/%Y %-I:%M:%S %p", 477 "M": "%B %-d", 478 "m": "%B %-d", 479 "O": "%Y-%m-%dT%H:%M:%S", 480 "u": "%Y-%M-%D %H:%M:%S%z", 481 "U": "%A, %B %D, %Y %H:%M:%S%z", 482 "T": "%-I:%M:%S %p", 483 "t": "%-I:%M", 484 "Y": "%a %Y", 485 } 486 487 class Tokenizer(tokens.Tokenizer): 488 IDENTIFIERS = [("[", "]"), '"'] 489 QUOTES = ["'", '"'] 490 HEX_STRINGS = [("0x", ""), ("0X", "")] 491 VAR_SINGLE_TOKENS = {"@", "$", "#"} 492 493 KEYWORDS = { 494 **tokens.Tokenizer.KEYWORDS, 495 "CLUSTERED INDEX": TokenType.INDEX, 496 "DATETIME2": TokenType.DATETIME, 497 "DATETIMEOFFSET": TokenType.TIMESTAMPTZ, 498 "DECLARE": TokenType.DECLARE, 499 "EXEC": TokenType.COMMAND, 500 "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT, 501 "IMAGE": TokenType.IMAGE, 502 "MONEY": TokenType.MONEY, 503 "NONCLUSTERED INDEX": TokenType.INDEX, 504 "NTEXT": TokenType.TEXT, 505 "OPTION": TokenType.OPTION, 506 "OUTPUT": TokenType.RETURNING, 507 "PRINT": TokenType.COMMAND, 508 "PROC": TokenType.PROCEDURE, 509 "REAL": TokenType.FLOAT, 510 "ROWVERSION": TokenType.ROWVERSION, 511 "SMALLDATETIME": TokenType.DATETIME, 512 "SMALLMONEY": TokenType.SMALLMONEY, 513 "SQL_VARIANT": TokenType.VARIANT, 514 "SYSTEM_USER": TokenType.CURRENT_USER, 515 "TOP": TokenType.TOP, 516 "TIMESTAMP": TokenType.ROWVERSION, 517 "TINYINT": TokenType.UTINYINT, 518 "UNIQUEIDENTIFIER": TokenType.UNIQUEIDENTIFIER, 519 "UPDATE STATISTICS": TokenType.COMMAND, 520 "XML": TokenType.XML, 521 } 522 KEYWORDS.pop("/*+") 523 524 COMMANDS = {*tokens.Tokenizer.COMMANDS, TokenType.END} 525 526 class Parser(parser.Parser): 527 SET_REQUIRES_ASSIGNMENT_DELIMITER = False 528 LOG_DEFAULTS_TO_LN = True 529 ALTER_TABLE_ADD_REQUIRED_FOR_EACH_COLUMN = False 530 STRING_ALIASES = True 531 NO_PAREN_IF_COMMANDS = False 532 533 QUERY_MODIFIER_PARSERS = { 534 **parser.Parser.QUERY_MODIFIER_PARSERS, 535 TokenType.OPTION: lambda self: ("options", self._parse_options()), 536 } 537 538 FUNCTIONS = { 539 **parser.Parser.FUNCTIONS, 540 "CHARINDEX": lambda args: exp.StrPosition( 541 this=seq_get(args, 1), 542 substr=seq_get(args, 0), 543 position=seq_get(args, 2), 544 ), 545 "COUNT": lambda args: exp.Count( 546 this=seq_get(args, 0), expressions=args[1:], big_int=False 547 ), 548 "COUNT_BIG": lambda args: exp.Count( 549 this=seq_get(args, 0), expressions=args[1:], big_int=True 550 ), 551 "DATEADD": build_date_delta(exp.DateAdd, unit_mapping=DATE_DELTA_INTERVAL), 552 "DATEDIFF": _build_date_delta(exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL), 553 "DATENAME": _build_formatted_time(exp.TimeToStr, full_format_mapping=True), 554 "DATEPART": _build_formatted_time(exp.TimeToStr), 555 "DATETIMEFROMPARTS": _build_datetimefromparts, 556 "EOMONTH": _build_eomonth, 557 "FORMAT": _build_format, 558 "GETDATE": exp.CurrentTimestamp.from_arg_list, 559 "HASHBYTES": _build_hashbytes, 560 "ISNULL": build_coalesce, 561 "JSON_QUERY": _build_json_query, 562 "JSON_VALUE": parser.build_extract_json_with_path(exp.JSONExtractScalar), 563 "LEN": _build_with_arg_as_text(exp.Length), 564 "LEFT": _build_with_arg_as_text(exp.Left), 565 "RIGHT": _build_with_arg_as_text(exp.Right), 566 "PARSENAME": _build_parsename, 567 "REPLICATE": exp.Repeat.from_arg_list, 568 "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)), 569 "SYSDATETIME": exp.CurrentTimestamp.from_arg_list, 570 "SUSER_NAME": exp.CurrentUser.from_arg_list, 571 "SUSER_SNAME": exp.CurrentUser.from_arg_list, 572 "SYSTEM_USER": exp.CurrentUser.from_arg_list, 573 "TIMEFROMPARTS": _build_timefromparts, 574 } 575 576 JOIN_HINTS = {"LOOP", "HASH", "MERGE", "REMOTE"} 577 578 PROCEDURE_OPTIONS = dict.fromkeys( 579 ("ENCRYPTION", "RECOMPILE", "SCHEMABINDING", "NATIVE_COMPILATION", "EXECUTE"), tuple() 580 ) 581 582 RETURNS_TABLE_TOKENS = parser.Parser.ID_VAR_TOKENS - { 583 TokenType.TABLE, 584 *parser.Parser.TYPE_TOKENS, 585 } 586 587 STATEMENT_PARSERS = { 588 **parser.Parser.STATEMENT_PARSERS, 589 TokenType.DECLARE: lambda self: self._parse_declare(), 590 } 591 592 RANGE_PARSERS = { 593 **parser.Parser.RANGE_PARSERS, 594 TokenType.DCOLON: lambda self, this: self.expression( 595 exp.ScopeResolution, 596 this=this, 597 expression=self._parse_function() or self._parse_var(any_token=True), 598 ), 599 } 600 601 # The DCOLON (::) operator serves as a scope resolution (exp.ScopeResolution) operator in T-SQL 602 COLUMN_OPERATORS = { 603 **parser.Parser.COLUMN_OPERATORS, 604 TokenType.DCOLON: lambda self, this, to: self.expression(exp.Cast, this=this, to=to) 605 if isinstance(to, exp.DataType) and to.this != exp.DataType.Type.USERDEFINED 606 else self.expression(exp.ScopeResolution, this=this, expression=to), 607 } 608 609 def _parse_dcolon(self) -> t.Optional[exp.Expression]: 610 # We want to use _parse_types() if the first token after :: is a known type, 611 # otherwise we could parse something like x::varchar(max) into a function 612 if self._match_set(self.TYPE_TOKENS, advance=False): 613 return self._parse_types() 614 615 return self._parse_function() or self._parse_types() 616 617 def _parse_options(self) -> t.Optional[t.List[exp.Expression]]: 618 if not self._match(TokenType.OPTION): 619 return None 620 621 def _parse_option() -> t.Optional[exp.Expression]: 622 option = self._parse_var_from_options(OPTIONS) 623 if not option: 624 return None 625 626 self._match(TokenType.EQ) 627 return self.expression( 628 exp.QueryOption, this=option, expression=self._parse_primary_or_var() 629 ) 630 631 return self._parse_wrapped_csv(_parse_option) 632 633 def _parse_projections(self) -> t.List[exp.Expression]: 634 """ 635 T-SQL supports the syntax alias = expression in the SELECT's projection list, 636 so we transform all parsed Selects to convert their EQ projections into Aliases. 637 638 See: https://learn.microsoft.com/en-us/sql/t-sql/queries/select-clause-transact-sql?view=sql-server-ver16#syntax 639 """ 640 return [ 641 ( 642 exp.alias_(projection.expression, projection.this.this, copy=False) 643 if isinstance(projection, exp.EQ) and isinstance(projection.this, exp.Column) 644 else projection 645 ) 646 for projection in super()._parse_projections() 647 ] 648 649 def _parse_commit_or_rollback(self) -> exp.Commit | exp.Rollback: 650 """Applies to SQL Server and Azure SQL Database 651 COMMIT [ { TRAN | TRANSACTION } 652 [ transaction_name | @tran_name_variable ] ] 653 [ WITH ( DELAYED_DURABILITY = { OFF | ON } ) ] 654 655 ROLLBACK { TRAN | TRANSACTION } 656 [ transaction_name | @tran_name_variable 657 | savepoint_name | @savepoint_variable ] 658 """ 659 rollback = self._prev.token_type == TokenType.ROLLBACK 660 661 self._match_texts(("TRAN", "TRANSACTION")) 662 this = self._parse_id_var() 663 664 if rollback: 665 return self.expression(exp.Rollback, this=this) 666 667 durability = None 668 if self._match_pair(TokenType.WITH, TokenType.L_PAREN): 669 self._match_text_seq("DELAYED_DURABILITY") 670 self._match(TokenType.EQ) 671 672 if self._match_text_seq("OFF"): 673 durability = False 674 else: 675 self._match(TokenType.ON) 676 durability = True 677 678 self._match_r_paren() 679 680 return self.expression(exp.Commit, this=this, durability=durability) 681 682 def _parse_transaction(self) -> exp.Transaction | exp.Command: 683 """Applies to SQL Server and Azure SQL Database 684 BEGIN { TRAN | TRANSACTION } 685 [ { transaction_name | @tran_name_variable } 686 [ WITH MARK [ 'description' ] ] 687 ] 688 """ 689 if self._match_texts(("TRAN", "TRANSACTION")): 690 transaction = self.expression(exp.Transaction, this=self._parse_id_var()) 691 if self._match_text_seq("WITH", "MARK"): 692 transaction.set("mark", self._parse_string()) 693 694 return transaction 695 696 return self._parse_as_command(self._prev) 697 698 def _parse_returns(self) -> exp.ReturnsProperty: 699 table = self._parse_id_var(any_token=False, tokens=self.RETURNS_TABLE_TOKENS) 700 returns = super()._parse_returns() 701 returns.set("table", table) 702 return returns 703 704 def _parse_convert( 705 self, strict: bool, safe: t.Optional[bool] = None 706 ) -> t.Optional[exp.Expression]: 707 this = self._parse_types() 708 self._match(TokenType.COMMA) 709 args = [this, *self._parse_csv(self._parse_assignment)] 710 convert = exp.Convert.from_arg_list(args) 711 convert.set("safe", safe) 712 convert.set("strict", strict) 713 return convert 714 715 def _parse_user_defined_function( 716 self, kind: t.Optional[TokenType] = None 717 ) -> t.Optional[exp.Expression]: 718 this = super()._parse_user_defined_function(kind=kind) 719 720 if ( 721 kind == TokenType.FUNCTION 722 or isinstance(this, exp.UserDefinedFunction) 723 or self._match(TokenType.ALIAS, advance=False) 724 ): 725 return this 726 727 if not self._match(TokenType.WITH, advance=False): 728 expressions = self._parse_csv(self._parse_function_parameter) 729 else: 730 expressions = None 731 732 return self.expression(exp.UserDefinedFunction, this=this, expressions=expressions) 733 734 def _parse_id_var( 735 self, 736 any_token: bool = True, 737 tokens: t.Optional[t.Collection[TokenType]] = None, 738 ) -> t.Optional[exp.Expression]: 739 is_temporary = self._match(TokenType.HASH) 740 is_global = is_temporary and self._match(TokenType.HASH) 741 742 this = super()._parse_id_var(any_token=any_token, tokens=tokens) 743 if this: 744 if is_global: 745 this.set("global", True) 746 elif is_temporary: 747 this.set("temporary", True) 748 749 return this 750 751 def _parse_create(self) -> exp.Create | exp.Command: 752 create = super()._parse_create() 753 754 if isinstance(create, exp.Create): 755 table = create.this.this if isinstance(create.this, exp.Schema) else create.this 756 if isinstance(table, exp.Table) and table.this.args.get("temporary"): 757 if not create.args.get("properties"): 758 create.set("properties", exp.Properties(expressions=[])) 759 760 create.args["properties"].append("expressions", exp.TemporaryProperty()) 761 762 return create 763 764 def _parse_if(self) -> t.Optional[exp.Expression]: 765 index = self._index 766 767 if self._match_text_seq("OBJECT_ID"): 768 self._parse_wrapped_csv(self._parse_string) 769 if self._match_text_seq("IS", "NOT", "NULL") and self._match(TokenType.DROP): 770 return self._parse_drop(exists=True) 771 self._retreat(index) 772 773 return super()._parse_if() 774 775 def _parse_unique(self) -> exp.UniqueColumnConstraint: 776 if self._match_texts(("CLUSTERED", "NONCLUSTERED")): 777 this = self.CONSTRAINT_PARSERS[self._prev.text.upper()](self) 778 else: 779 this = self._parse_schema(self._parse_id_var(any_token=False)) 780 781 return self.expression(exp.UniqueColumnConstraint, this=this) 782 783 def _parse_partition(self) -> t.Optional[exp.Partition]: 784 if not self._match_text_seq("WITH", "(", "PARTITIONS"): 785 return None 786 787 def parse_range(): 788 low = self._parse_bitwise() 789 high = self._parse_bitwise() if self._match_text_seq("TO") else None 790 791 return ( 792 self.expression(exp.PartitionRange, this=low, expression=high) if high else low 793 ) 794 795 partition = self.expression( 796 exp.Partition, expressions=self._parse_wrapped_csv(parse_range) 797 ) 798 799 self._match_r_paren() 800 801 return partition 802 803 def _parse_declare(self) -> exp.Declare | exp.Command: 804 index = self._index 805 expressions = self._try_parse(partial(self._parse_csv, self._parse_declareitem)) 806 807 if not expressions or self._curr: 808 self._retreat(index) 809 return self._parse_as_command(self._prev) 810 811 return self.expression(exp.Declare, expressions=expressions) 812 813 def _parse_declareitem(self) -> t.Optional[exp.DeclareItem]: 814 var = self._parse_id_var() 815 if not var: 816 return None 817 818 value = None 819 self._match(TokenType.ALIAS) 820 if self._match(TokenType.TABLE): 821 data_type = self._parse_schema() 822 else: 823 data_type = self._parse_types() 824 if self._match(TokenType.EQ): 825 value = self._parse_bitwise() 826 827 return self.expression(exp.DeclareItem, this=var, kind=data_type, default=value) 828 829 class Generator(generator.Generator): 830 LIMIT_IS_TOP = True 831 QUERY_HINTS = False 832 RETURNING_END = False 833 NVL2_SUPPORTED = False 834 ALTER_TABLE_INCLUDE_COLUMN_KEYWORD = False 835 LIMIT_FETCH = "FETCH" 836 COMPUTED_COLUMN_WITH_TYPE = False 837 CTE_RECURSIVE_KEYWORD_REQUIRED = False 838 ENSURE_BOOLS = True 839 NULL_ORDERING_SUPPORTED = None 840 SUPPORTS_SINGLE_ARG_CONCAT = False 841 TABLESAMPLE_SEED_KEYWORD = "REPEATABLE" 842 SUPPORTS_SELECT_INTO = True 843 JSON_PATH_BRACKETED_KEY_SUPPORTED = False 844 SUPPORTS_TO_NUMBER = False 845 SET_OP_MODIFIERS = False 846 COPY_PARAMS_EQ_REQUIRED = True 847 PARSE_JSON_NAME = None 848 EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE = False 849 850 EXPRESSIONS_WITHOUT_NESTED_CTES = { 851 exp.Create, 852 exp.Delete, 853 exp.Insert, 854 exp.Intersect, 855 exp.Except, 856 exp.Merge, 857 exp.Select, 858 exp.Subquery, 859 exp.Union, 860 exp.Update, 861 } 862 863 SUPPORTED_JSON_PATH_PARTS = { 864 exp.JSONPathKey, 865 exp.JSONPathRoot, 866 exp.JSONPathSubscript, 867 } 868 869 TYPE_MAPPING = { 870 **generator.Generator.TYPE_MAPPING, 871 exp.DataType.Type.BOOLEAN: "BIT", 872 exp.DataType.Type.DECIMAL: "NUMERIC", 873 exp.DataType.Type.DATETIME: "DATETIME2", 874 exp.DataType.Type.DOUBLE: "FLOAT", 875 exp.DataType.Type.INT: "INTEGER", 876 exp.DataType.Type.ROWVERSION: "ROWVERSION", 877 exp.DataType.Type.TEXT: "VARCHAR(MAX)", 878 exp.DataType.Type.TIMESTAMP: "DATETIME2", 879 exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET", 880 exp.DataType.Type.UTINYINT: "TINYINT", 881 exp.DataType.Type.VARIANT: "SQL_VARIANT", 882 } 883 884 TYPE_MAPPING.pop(exp.DataType.Type.NCHAR) 885 TYPE_MAPPING.pop(exp.DataType.Type.NVARCHAR) 886 887 TRANSFORMS = { 888 **generator.Generator.TRANSFORMS, 889 exp.AnyValue: any_value_to_max_sql, 890 exp.ArrayToString: rename_func("STRING_AGG"), 891 exp.AutoIncrementColumnConstraint: lambda *_: "IDENTITY", 892 exp.DateAdd: date_delta_sql("DATEADD"), 893 exp.DateDiff: date_delta_sql("DATEDIFF"), 894 exp.CTE: transforms.preprocess([qualify_derived_table_outputs]), 895 exp.CurrentDate: rename_func("GETDATE"), 896 exp.CurrentTimestamp: rename_func("GETDATE"), 897 exp.DateStrToDate: datestrtodate_sql, 898 exp.Extract: rename_func("DATEPART"), 899 exp.GeneratedAsIdentityColumnConstraint: generatedasidentitycolumnconstraint_sql, 900 exp.GroupConcat: _string_agg_sql, 901 exp.If: rename_func("IIF"), 902 exp.JSONExtract: _json_extract_sql, 903 exp.JSONExtractScalar: _json_extract_sql, 904 exp.LastDay: lambda self, e: self.func("EOMONTH", e.this), 905 exp.Ln: rename_func("LOG"), 906 exp.Max: max_or_greatest, 907 exp.MD5: lambda self, e: self.func("HASHBYTES", exp.Literal.string("MD5"), e.this), 908 exp.Min: min_or_least, 909 exp.NumberToStr: _format_sql, 910 exp.Repeat: rename_func("REPLICATE"), 911 exp.Select: transforms.preprocess( 912 [ 913 transforms.eliminate_distinct_on, 914 transforms.eliminate_semi_and_anti_joins, 915 transforms.eliminate_qualify, 916 transforms.unnest_generate_date_array_using_recursive_cte, 917 ] 918 ), 919 exp.Stddev: rename_func("STDEV"), 920 exp.StrPosition: lambda self, e: self.func( 921 "CHARINDEX", e.args.get("substr"), e.this, e.args.get("position") 922 ), 923 exp.Subquery: transforms.preprocess([qualify_derived_table_outputs]), 924 exp.SHA: lambda self, e: self.func("HASHBYTES", exp.Literal.string("SHA1"), e.this), 925 exp.SHA2: lambda self, e: self.func( 926 "HASHBYTES", exp.Literal.string(f"SHA2_{e.args.get('length', 256)}"), e.this 927 ), 928 exp.TemporaryProperty: lambda self, e: "", 929 exp.TimeStrToTime: _timestrtotime_sql, 930 exp.TimeToStr: _format_sql, 931 exp.Trim: trim_sql, 932 exp.TsOrDsAdd: date_delta_sql("DATEADD", cast=True), 933 exp.TsOrDsDiff: date_delta_sql("DATEDIFF"), 934 } 935 936 TRANSFORMS.pop(exp.ReturnsProperty) 937 938 PROPERTIES_LOCATION = { 939 **generator.Generator.PROPERTIES_LOCATION, 940 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 941 } 942 943 def scope_resolution(self, rhs: str, scope_name: str) -> str: 944 return f"{scope_name}::{rhs}" 945 946 def select_sql(self, expression: exp.Select) -> str: 947 if expression.args.get("offset"): 948 if not expression.args.get("order"): 949 # ORDER BY is required in order to use OFFSET in a query, so we use 950 # a noop order by, since we don't really care about the order. 951 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 952 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 953 954 limit = expression.args.get("limit") 955 if isinstance(limit, exp.Limit): 956 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 957 # we replace here because otherwise TOP would be generated in select_sql 958 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 959 960 return super().select_sql(expression) 961 962 def convert_sql(self, expression: exp.Convert) -> str: 963 name = "TRY_CONVERT" if expression.args.get("safe") else "CONVERT" 964 return self.func( 965 name, expression.this, expression.expression, expression.args.get("style") 966 ) 967 968 def queryoption_sql(self, expression: exp.QueryOption) -> str: 969 option = self.sql(expression, "this") 970 value = self.sql(expression, "expression") 971 if value: 972 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 973 return f"{option} {optional_equal_sign}{value}" 974 return option 975 976 def lateral_op(self, expression: exp.Lateral) -> str: 977 cross_apply = expression.args.get("cross_apply") 978 if cross_apply is True: 979 return "CROSS APPLY" 980 if cross_apply is False: 981 return "OUTER APPLY" 982 983 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 984 self.unsupported("LATERAL clause is not supported.") 985 return "LATERAL" 986 987 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 988 this = expression.this 989 split_count = len(this.name.split(".")) 990 delimiter = expression.args.get("delimiter") 991 part_index = expression.args.get("part_index") 992 993 if ( 994 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 995 or (delimiter and delimiter.name != ".") 996 or not part_index 997 or split_count > 4 998 ): 999 self.unsupported( 1000 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 1001 ) 1002 return "" 1003 1004 return self.func( 1005 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 1006 ) 1007 1008 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1009 nano = expression.args.get("nano") 1010 if nano is not None: 1011 nano.pop() 1012 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1013 1014 if expression.args.get("fractions") is None: 1015 expression.set("fractions", exp.Literal.number(0)) 1016 if expression.args.get("precision") is None: 1017 expression.set("precision", exp.Literal.number(0)) 1018 1019 return rename_func("TIMEFROMPARTS")(self, expression) 1020 1021 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1022 zone = expression.args.get("zone") 1023 if zone is not None: 1024 zone.pop() 1025 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1026 1027 nano = expression.args.get("nano") 1028 if nano is not None: 1029 nano.pop() 1030 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1031 1032 if expression.args.get("milli") is None: 1033 expression.set("milli", exp.Literal.number(0)) 1034 1035 return rename_func("DATETIMEFROMPARTS")(self, expression) 1036 1037 def setitem_sql(self, expression: exp.SetItem) -> str: 1038 this = expression.this 1039 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1040 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1041 return f"{self.sql(this.left)} {self.sql(this.right)}" 1042 1043 return super().setitem_sql(expression) 1044 1045 def boolean_sql(self, expression: exp.Boolean) -> str: 1046 if type(expression.parent) in BIT_TYPES or isinstance( 1047 expression.find_ancestor(exp.Values, exp.Select), exp.Values 1048 ): 1049 return "1" if expression.this else "0" 1050 1051 return "(1 = 1)" if expression.this else "(1 = 0)" 1052 1053 def is_sql(self, expression: exp.Is) -> str: 1054 if isinstance(expression.expression, exp.Boolean): 1055 return self.binary(expression, "=") 1056 return self.binary(expression, "IS") 1057 1058 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1059 sql = self.sql(expression, "this") 1060 properties = expression.args.get("properties") 1061 1062 if sql[:1] != "#" and any( 1063 isinstance(prop, exp.TemporaryProperty) 1064 for prop in (properties.expressions if properties else []) 1065 ): 1066 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1067 1068 return sql 1069 1070 def create_sql(self, expression: exp.Create) -> str: 1071 kind = expression.kind 1072 exists = expression.args.pop("exists", None) 1073 1074 like_property = expression.find(exp.LikeProperty) 1075 if like_property: 1076 ctas_expression = like_property.this 1077 else: 1078 ctas_expression = expression.expression 1079 1080 if kind == "VIEW": 1081 expression.this.set("catalog", None) 1082 with_ = expression.args.get("with") 1083 if ctas_expression and with_: 1084 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1085 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1086 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1087 ctas_expression.set("with", with_.pop()) 1088 1089 sql = super().create_sql(expression) 1090 1091 table = expression.find(exp.Table) 1092 1093 # Convert CTAS statement to SELECT .. INTO .. 1094 if kind == "TABLE" and ctas_expression: 1095 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1096 ctas_expression = ctas_expression.subquery() 1097 1098 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1099 select_into.set("into", exp.Into(this=table)) 1100 1101 if like_property: 1102 select_into.limit(0, copy=False) 1103 1104 sql = self.sql(select_into) 1105 1106 if exists: 1107 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1108 sql_with_ctes = self.prepend_ctes(expression, sql) 1109 sql_literal = self.sql(exp.Literal.string(sql_with_ctes)) 1110 if kind == "SCHEMA": 1111 return f"""IF NOT EXISTS (SELECT * FROM information_schema.schemata WHERE schema_name = {identifier}) EXEC({sql_literal})""" 1112 elif kind == "TABLE": 1113 assert table 1114 where = exp.and_( 1115 exp.column("table_name").eq(table.name), 1116 exp.column("table_schema").eq(table.db) if table.db else None, 1117 exp.column("table_catalog").eq(table.catalog) if table.catalog else None, 1118 ) 1119 return f"""IF NOT EXISTS (SELECT * FROM information_schema.tables WHERE {where}) EXEC({sql_literal})""" 1120 elif kind == "INDEX": 1121 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1122 return f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql_literal})""" 1123 elif expression.args.get("replace"): 1124 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1125 1126 return self.prepend_ctes(expression, sql) 1127 1128 def count_sql(self, expression: exp.Count) -> str: 1129 func_name = "COUNT_BIG" if expression.args.get("big_int") else "COUNT" 1130 return rename_func(func_name)(self, expression) 1131 1132 def offset_sql(self, expression: exp.Offset) -> str: 1133 return f"{super().offset_sql(expression)} ROWS" 1134 1135 def version_sql(self, expression: exp.Version) -> str: 1136 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1137 this = f"FOR {name}" 1138 expr = expression.expression 1139 kind = expression.text("kind") 1140 if kind in ("FROM", "BETWEEN"): 1141 args = expr.expressions 1142 sep = "TO" if kind == "FROM" else "AND" 1143 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1144 else: 1145 expr_sql = self.sql(expr) 1146 1147 expr_sql = f" {expr_sql}" if expr_sql else "" 1148 return f"{this} {kind}{expr_sql}" 1149 1150 def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str: 1151 table = expression.args.get("table") 1152 table = f"{table} " if table else "" 1153 return f"RETURNS {table}{self.sql(expression, 'this')}" 1154 1155 def returning_sql(self, expression: exp.Returning) -> str: 1156 into = self.sql(expression, "into") 1157 into = self.seg(f"INTO {into}") if into else "" 1158 return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}" 1159 1160 def transaction_sql(self, expression: exp.Transaction) -> str: 1161 this = self.sql(expression, "this") 1162 this = f" {this}" if this else "" 1163 mark = self.sql(expression, "mark") 1164 mark = f" WITH MARK {mark}" if mark else "" 1165 return f"BEGIN TRANSACTION{this}{mark}" 1166 1167 def commit_sql(self, expression: exp.Commit) -> str: 1168 this = self.sql(expression, "this") 1169 this = f" {this}" if this else "" 1170 durability = expression.args.get("durability") 1171 durability = ( 1172 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1173 if durability is not None 1174 else "" 1175 ) 1176 return f"COMMIT TRANSACTION{this}{durability}" 1177 1178 def rollback_sql(self, expression: exp.Rollback) -> str: 1179 this = self.sql(expression, "this") 1180 this = f" {this}" if this else "" 1181 return f"ROLLBACK TRANSACTION{this}" 1182 1183 def identifier_sql(self, expression: exp.Identifier) -> str: 1184 identifier = super().identifier_sql(expression) 1185 1186 if expression.args.get("global"): 1187 identifier = f"##{identifier}" 1188 elif expression.args.get("temporary"): 1189 identifier = f"#{identifier}" 1190 1191 return identifier 1192 1193 def constraint_sql(self, expression: exp.Constraint) -> str: 1194 this = self.sql(expression, "this") 1195 expressions = self.expressions(expression, flat=True, sep=" ") 1196 return f"CONSTRAINT {this} {expressions}" 1197 1198 def length_sql(self, expression: exp.Length) -> str: 1199 return self._uncast_text(expression, "LEN") 1200 1201 def right_sql(self, expression: exp.Right) -> str: 1202 return self._uncast_text(expression, "RIGHT") 1203 1204 def left_sql(self, expression: exp.Left) -> str: 1205 return self._uncast_text(expression, "LEFT") 1206 1207 def _uncast_text(self, expression: exp.Expression, name: str) -> str: 1208 this = expression.this 1209 if isinstance(this, exp.Cast) and this.is_type(exp.DataType.Type.TEXT): 1210 this_sql = self.sql(this, "this") 1211 else: 1212 this_sql = self.sql(this) 1213 expression_sql = self.sql(expression, "expression") 1214 return self.func(name, this_sql, expression_sql if expression_sql else None) 1215 1216 def partition_sql(self, expression: exp.Partition) -> str: 1217 return f"WITH (PARTITIONS({self.expressions(expression, flat=True)}))" 1218 1219 def alter_sql(self, expression: exp.Alter) -> str: 1220 action = seq_get(expression.args.get("actions") or [], 0) 1221 if isinstance(action, exp.AlterRename): 1222 return f"EXEC sp_rename '{self.sql(expression.this)}', '{action.this.name}'" 1223 return super().alter_sql(expression) 1224 1225 def drop_sql(self, expression: exp.Drop) -> str: 1226 if expression.args["kind"] == "VIEW": 1227 expression.this.set("catalog", None) 1228 return super().drop_sql(expression) 1229 1230 def declare_sql(self, expression: exp.Declare) -> str: 1231 return f"DECLARE {self.expressions(expression, flat=True)}" 1232 1233 def declareitem_sql(self, expression: exp.DeclareItem) -> str: 1234 variable = self.sql(expression, "this") 1235 default = self.sql(expression, "default") 1236 default = f" = {default}" if default else "" 1237 1238 kind = self.sql(expression, "kind") 1239 if isinstance(expression.args.get("kind"), exp.Schema): 1240 kind = f"TABLE {kind}" 1241 1242 return f"{variable} AS {kind}{default}" 1243 1244 def options_modifier(self, expression: exp.Expression) -> str: 1245 options = self.expressions(expression, key="options") 1246 return f" OPTION{self.wrap(options)}" if options else "" 1247 1248 def dpipe_sql(self, expression: exp.DPipe) -> str: 1249 return self.sql( 1250 reduce(lambda x, y: exp.Add(this=x, expression=y), expression.flatten()) 1251 )
LOG_BASE_FIRST: Optional[bool] =
False
Whether the base comes first in the LOG
function.
Possible values: True
, False
, None
(two arguments are not supported by LOG
)
TYPED_DIVISION =
True
Whether the behavior of a / b
depends on the types of a
and b
.
False means a / b
is always float division.
True means a / b
is integer division if both a
and b
are integers.
CONCAT_COALESCE =
True
A NULL
arg in CONCAT
yields NULL
by default, but in some dialects it yields an empty string.
NORMALIZATION_STRATEGY =
<NormalizationStrategy.CASE_INSENSITIVE: 'CASE_INSENSITIVE'>
Specifies the strategy according to which identifiers should be normalized.
TIME_MAPPING: Dict[str, str] =
{'year': '%Y', 'dayofyear': '%j', 'day': '%d', 'dy': '%d', 'y': '%Y', 'week': '%W', 'ww': '%W', 'wk': '%W', 'hour': '%h', 'hh': '%I', 'minute': '%M', 'mi': '%M', 'n': '%M', 'second': '%S', 'ss': '%S', 's': '%-S', 'millisecond': '%f', 'ms': '%f', 'weekday': '%w', 'dw': '%w', 'month': '%m', 'mm': '%M', 'm': '%-M', 'Y': '%Y', 'YYYY': '%Y', 'YY': '%y', 'MMMM': '%B', 'MMM': '%b', 'MM': '%m', 'M': '%-m', 'dddd': '%A', 'dd': '%d', 'd': '%-d', 'HH': '%H', 'H': '%-H', 'h': '%-I', 'ffffff': '%f', 'yyyy': '%Y', 'yy': '%y'}
Associates this dialect's time formats with their equivalent Python strftime
formats.
CONVERT_FORMAT_MAPPING =
{'0': '%b %d %Y %-I:%M%p', '1': '%m/%d/%y', '2': '%y.%m.%d', '3': '%d/%m/%y', '4': '%d.%m.%y', '5': '%d-%m-%y', '6': '%d %b %y', '7': '%b %d, %y', '8': '%H:%M:%S', '9': '%b %d %Y %-I:%M:%S:%f%p', '10': 'mm-dd-yy', '11': 'yy/mm/dd', '12': 'yymmdd', '13': '%d %b %Y %H:%M:ss:%f', '14': '%H:%M:%S:%f', '20': '%Y-%m-%d %H:%M:%S', '21': '%Y-%m-%d %H:%M:%S.%f', '22': '%m/%d/%y %-I:%M:%S %p', '23': '%Y-%m-%d', '24': '%H:%M:%S', '25': '%Y-%m-%d %H:%M:%S.%f', '100': '%b %d %Y %-I:%M%p', '101': '%m/%d/%Y', '102': '%Y.%m.%d', '103': '%d/%m/%Y', '104': '%d.%m.%Y', '105': '%d-%m-%Y', '106': '%d %b %Y', '107': '%b %d, %Y', '108': '%H:%M:%S', '109': '%b %d %Y %-I:%M:%S:%f%p', '110': '%m-%d-%Y', '111': '%Y/%m/%d', '112': '%Y%m%d', '113': '%d %b %Y %H:%M:%S:%f', '114': '%H:%M:%S:%f', '120': '%Y-%m-%d %H:%M:%S', '121': '%Y-%m-%d %H:%M:%S.%f'}
FORMAT_TIME_MAPPING =
{'y': '%B %Y', 'd': '%m/%d/%Y', 'H': '%-H', 'h': '%-I', 's': '%Y-%m-%d %H:%M:%S', 'D': '%A,%B,%Y', 'f': '%A,%B,%Y %-I:%M %p', 'F': '%A,%B,%Y %-I:%M:%S %p', 'g': '%m/%d/%Y %-I:%M %p', 'G': '%m/%d/%Y %-I:%M:%S %p', 'M': '%B %-d', 'm': '%B %-d', 'O': '%Y-%m-%dT%H:%M:%S', 'u': '%Y-%M-%D %H:%M:%S%z', 'U': '%A, %B %D, %Y %H:%M:%S%z', 'T': '%-I:%M:%S %p', 't': '%-I:%M', 'Y': '%a %Y'}
tokenizer_class =
<class 'TSQL.Tokenizer'>
parser_class =
<class 'TSQL.Parser'>
generator_class =
<class 'TSQL.Generator'>
TIME_TRIE: Dict =
{'y': {'e': {'a': {'r': {0: True}}}, 0: True, 'y': {'y': {'y': {0: True}}, 0: True}}, 'd': {'a': {'y': {'o': {'f': {'y': {'e': {'a': {'r': {0: True}}}}}}, 0: True}}, 'y': {0: True}, 'w': {0: True}, 'd': {'d': {'d': {0: True}}, 0: True}, 0: True}, 'w': {'e': {'e': {'k': {0: True, 'd': {'a': {'y': {0: True}}}}}}, 'w': {0: True}, 'k': {0: True}}, 'h': {'o': {'u': {'r': {0: True}}}, 'h': {0: True}, 0: True}, 'm': {'i': {'n': {'u': {'t': {'e': {0: True}}}}, 0: True, 'l': {'l': {'i': {'s': {'e': {'c': {'o': {'n': {'d': {0: True}}}}}}}}}}, 's': {0: True}, 'o': {'n': {'t': {'h': {0: True}}}}, 'm': {0: True}, 0: True}, 'n': {0: True}, 's': {'e': {'c': {'o': {'n': {'d': {0: True}}}}}, 's': {0: True}, 0: True}, 'Y': {0: True, 'Y': {'Y': {'Y': {0: True}}, 0: True}}, 'M': {'M': {'M': {'M': {0: True}, 0: True}, 0: True}, 0: True}, 'H': {'H': {0: True}, 0: True}, 'f': {'f': {'f': {'f': {'f': {'f': {0: True}}}}}}}
FORMAT_TRIE: Dict =
{'y': {'e': {'a': {'r': {0: True}}}, 0: True, 'y': {'y': {'y': {0: True}}, 0: True}}, 'd': {'a': {'y': {'o': {'f': {'y': {'e': {'a': {'r': {0: True}}}}}}, 0: True}}, 'y': {0: True}, 'w': {0: True}, 'd': {'d': {'d': {0: True}}, 0: True}, 0: True}, 'w': {'e': {'e': {'k': {0: True, 'd': {'a': {'y': {0: True}}}}}}, 'w': {0: True}, 'k': {0: True}}, 'h': {'o': {'u': {'r': {0: True}}}, 'h': {0: True}, 0: True}, 'm': {'i': {'n': {'u': {'t': {'e': {0: True}}}}, 0: True, 'l': {'l': {'i': {'s': {'e': {'c': {'o': {'n': {'d': {0: True}}}}}}}}}}, 's': {0: True}, 'o': {'n': {'t': {'h': {0: True}}}}, 'm': {0: True}, 0: True}, 'n': {0: True}, 's': {'e': {'c': {'o': {'n': {'d': {0: True}}}}}, 's': {0: True}, 0: True}, 'Y': {0: True, 'Y': {'Y': {'Y': {0: True}}, 0: True}}, 'M': {'M': {'M': {'M': {0: True}, 0: True}, 0: True}, 0: True}, 'H': {'H': {0: True}, 0: True}, 'f': {'f': {'f': {'f': {'f': {'f': {0: True}}}}}}}
INVERSE_TIME_MAPPING: Dict[str, str] =
{'%Y': 'yyyy', '%j': 'dayofyear', '%d': 'dd', '%W': 'wk', '%h': 'hour', '%I': 'hh', '%M': 'mm', '%S': 'ss', '%-S': 's', '%f': 'ffffff', '%w': 'dw', '%m': 'MM', '%-M': 'm', '%y': 'yy', '%B': 'MMMM', '%b': 'MMM', '%-m': 'M', '%A': 'dddd', '%-d': 'd', '%H': 'HH', '%-H': 'H', '%-I': 'h'}
INVERSE_TIME_TRIE: Dict =
{'%': {'Y': {0: True}, 'j': {0: True}, 'd': {0: True}, 'W': {0: True}, 'h': {0: True}, 'I': {0: True}, 'M': {0: True}, 'S': {0: True}, '-': {'S': {0: True}, 'M': {0: True}, 'm': {0: True}, 'd': {0: True}, 'H': {0: True}, 'I': {0: True}}, 'f': {0: True}, 'w': {0: True}, 'm': {0: True}, 'y': {0: True}, 'B': {0: True}, 'b': {0: True}, 'A': {0: True}, 'H': {0: True}}}
Inherited Members
- sqlglot.dialects.dialect.Dialect
- Dialect
- INDEX_OFFSET
- WEEK_OFFSET
- UNNEST_COLUMN_ONLY
- ALIAS_POST_TABLESAMPLE
- TABLESAMPLE_SIZE_IS_PERCENT
- IDENTIFIERS_CAN_START_WITH_DIGIT
- DPIPE_IS_STRING_CONCAT
- STRICT_STRING_CONCAT
- SUPPORTS_USER_DEFINED_TYPES
- COPY_PARAMS_ARE_CSV
- NORMALIZE_FUNCTIONS
- PRESERVE_ORIGINAL_NAMES
- NULL_ORDERING
- SAFE_DIVISION
- HEX_LOWERCASE
- DATE_FORMAT
- DATEINT_FORMAT
- FORMAT_MAPPING
- UNESCAPED_SEQUENCES
- PSEUDOCOLUMNS
- PREFER_CTE_ALIAS_COLUMN
- FORCE_EARLY_ALIAS_REF_EXPANSION
- EXPAND_ALIAS_REFS_EARLY_ONLY_IN_GROUP_BY
- SUPPORTS_ORDER_BY_ALL
- HAS_DISTINCT_ARRAY_CONSTRUCTORS
- SUPPORTS_FIXED_SIZE_ARRAYS
- STRICT_JSON_PATH_SYNTAX
- ON_CONDITION_EMPTY_BEFORE_ERROR
- ARRAY_AGG_INCLUDES_NULLS
- PROMOTE_TO_INFERRED_DATETIME_TYPE
- SUPPORTS_VALUES_DEFAULT
- REGEXP_EXTRACT_DEFAULT_GROUP
- SET_OP_DISTINCT_BY_DEFAULT
- CREATABLE_KIND_MAPPING
- DATE_PART_MAPPING
- TYPE_TO_EXPRESSIONS
- ANNOTATORS
- get_or_raise
- format_time
- settings
- normalize_identifier
- case_sensitive
- can_identify
- quote_identifier
- to_json_path
- parse
- parse_into
- generate
- transpile
- tokenize
- tokenizer
- jsonpath_tokenizer
- parser
- generator
487 class Tokenizer(tokens.Tokenizer): 488 IDENTIFIERS = [("[", "]"), '"'] 489 QUOTES = ["'", '"'] 490 HEX_STRINGS = [("0x", ""), ("0X", "")] 491 VAR_SINGLE_TOKENS = {"@", "$", "#"} 492 493 KEYWORDS = { 494 **tokens.Tokenizer.KEYWORDS, 495 "CLUSTERED INDEX": TokenType.INDEX, 496 "DATETIME2": TokenType.DATETIME, 497 "DATETIMEOFFSET": TokenType.TIMESTAMPTZ, 498 "DECLARE": TokenType.DECLARE, 499 "EXEC": TokenType.COMMAND, 500 "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT, 501 "IMAGE": TokenType.IMAGE, 502 "MONEY": TokenType.MONEY, 503 "NONCLUSTERED INDEX": TokenType.INDEX, 504 "NTEXT": TokenType.TEXT, 505 "OPTION": TokenType.OPTION, 506 "OUTPUT": TokenType.RETURNING, 507 "PRINT": TokenType.COMMAND, 508 "PROC": TokenType.PROCEDURE, 509 "REAL": TokenType.FLOAT, 510 "ROWVERSION": TokenType.ROWVERSION, 511 "SMALLDATETIME": TokenType.DATETIME, 512 "SMALLMONEY": TokenType.SMALLMONEY, 513 "SQL_VARIANT": TokenType.VARIANT, 514 "SYSTEM_USER": TokenType.CURRENT_USER, 515 "TOP": TokenType.TOP, 516 "TIMESTAMP": TokenType.ROWVERSION, 517 "TINYINT": TokenType.UTINYINT, 518 "UNIQUEIDENTIFIER": TokenType.UNIQUEIDENTIFIER, 519 "UPDATE STATISTICS": TokenType.COMMAND, 520 "XML": TokenType.XML, 521 } 522 KEYWORDS.pop("/*+") 523 524 COMMANDS = {*tokens.Tokenizer.COMMANDS, TokenType.END}
KEYWORDS =
{'{%': <TokenType.BLOCK_START: 'BLOCK_START'>, '{%+': <TokenType.BLOCK_START: 'BLOCK_START'>, '{%-': <TokenType.BLOCK_START: 'BLOCK_START'>, '%}': <TokenType.BLOCK_END: 'BLOCK_END'>, '+%}': <TokenType.BLOCK_END: 'BLOCK_END'>, '-%}': <TokenType.BLOCK_END: 'BLOCK_END'>, '{{+': <TokenType.BLOCK_START: 'BLOCK_START'>, '{{-': <TokenType.BLOCK_START: 'BLOCK_START'>, '+}}': <TokenType.BLOCK_END: 'BLOCK_END'>, '-}}': <TokenType.BLOCK_END: 'BLOCK_END'>, '==': <TokenType.EQ: 'EQ'>, '::': <TokenType.DCOLON: 'DCOLON'>, '||': <TokenType.DPIPE: 'DPIPE'>, '>=': <TokenType.GTE: 'GTE'>, '<=': <TokenType.LTE: 'LTE'>, '<>': <TokenType.NEQ: 'NEQ'>, '!=': <TokenType.NEQ: 'NEQ'>, ':=': <TokenType.COLON_EQ: 'COLON_EQ'>, '<=>': <TokenType.NULLSAFE_EQ: 'NULLSAFE_EQ'>, '->': <TokenType.ARROW: 'ARROW'>, '->>': <TokenType.DARROW: 'DARROW'>, '=>': <TokenType.FARROW: 'FARROW'>, '#>': <TokenType.HASH_ARROW: 'HASH_ARROW'>, '#>>': <TokenType.DHASH_ARROW: 'DHASH_ARROW'>, '<->': <TokenType.LR_ARROW: 'LR_ARROW'>, '&&': <TokenType.DAMP: 'DAMP'>, '??': <TokenType.DQMARK: 'DQMARK'>, '~~~': <TokenType.GLOB: 'GLOB'>, '~~': <TokenType.LIKE: 'LIKE'>, '~~*': <TokenType.ILIKE: 'ILIKE'>, '~*': <TokenType.IRLIKE: 'IRLIKE'>, 'ALL': <TokenType.ALL: 'ALL'>, 'ALWAYS': <TokenType.ALWAYS: 'ALWAYS'>, 'AND': <TokenType.AND: 'AND'>, 'ANTI': <TokenType.ANTI: 'ANTI'>, 'ANY': <TokenType.ANY: 'ANY'>, 'ASC': <TokenType.ASC: 'ASC'>, 'AS': <TokenType.ALIAS: 'ALIAS'>, 'ASOF': <TokenType.ASOF: 'ASOF'>, 'AUTOINCREMENT': <TokenType.AUTO_INCREMENT: 'AUTO_INCREMENT'>, 'AUTO_INCREMENT': <TokenType.AUTO_INCREMENT: 'AUTO_INCREMENT'>, 'BEGIN': <TokenType.BEGIN: 'BEGIN'>, 'BETWEEN': <TokenType.BETWEEN: 'BETWEEN'>, 'CACHE': <TokenType.CACHE: 'CACHE'>, 'UNCACHE': <TokenType.UNCACHE: 'UNCACHE'>, 'CASE': <TokenType.CASE: 'CASE'>, 'CHARACTER SET': <TokenType.CHARACTER_SET: 'CHARACTER_SET'>, 'CLUSTER BY': <TokenType.CLUSTER_BY: 'CLUSTER_BY'>, 'COLLATE': <TokenType.COLLATE: 'COLLATE'>, 'COLUMN': <TokenType.COLUMN: 'COLUMN'>, 'COMMIT': <TokenType.COMMIT: 'COMMIT'>, 'CONNECT BY': <TokenType.CONNECT_BY: 'CONNECT_BY'>, 'CONSTRAINT': <TokenType.CONSTRAINT: 'CONSTRAINT'>, 'COPY': <TokenType.COPY: 'COPY'>, 'CREATE': <TokenType.CREATE: 'CREATE'>, 'CROSS': <TokenType.CROSS: 'CROSS'>, 'CUBE': <TokenType.CUBE: 'CUBE'>, 'CURRENT_DATE': <TokenType.CURRENT_DATE: 'CURRENT_DATE'>, 'CURRENT_TIME': <TokenType.CURRENT_TIME: 'CURRENT_TIME'>, 'CURRENT_TIMESTAMP': <TokenType.CURRENT_TIMESTAMP: 'CURRENT_TIMESTAMP'>, 'CURRENT_USER': <TokenType.CURRENT_USER: 'CURRENT_USER'>, 'DATABASE': <TokenType.DATABASE: 'DATABASE'>, 'DEFAULT': <TokenType.DEFAULT: 'DEFAULT'>, 'DELETE': <TokenType.DELETE: 'DELETE'>, 'DESC': <TokenType.DESC: 'DESC'>, 'DESCRIBE': <TokenType.DESCRIBE: 'DESCRIBE'>, 'DISTINCT': <TokenType.DISTINCT: 'DISTINCT'>, 'DISTRIBUTE BY': <TokenType.DISTRIBUTE_BY: 'DISTRIBUTE_BY'>, 'DIV': <TokenType.DIV: 'DIV'>, 'DROP': <TokenType.DROP: 'DROP'>, 'ELSE': <TokenType.ELSE: 'ELSE'>, 'END': <TokenType.END: 'END'>, 'ENUM': <TokenType.ENUM: 'ENUM'>, 'ESCAPE': <TokenType.ESCAPE: 'ESCAPE'>, 'EXCEPT': <TokenType.EXCEPT: 'EXCEPT'>, 'EXECUTE': <TokenType.EXECUTE: 'EXECUTE'>, 'EXISTS': <TokenType.EXISTS: 'EXISTS'>, 'FALSE': <TokenType.FALSE: 'FALSE'>, 'FETCH': <TokenType.FETCH: 'FETCH'>, 'FILTER': <TokenType.FILTER: 'FILTER'>, 'FIRST': <TokenType.FIRST: 'FIRST'>, 'FULL': <TokenType.FULL: 'FULL'>, 'FUNCTION': <TokenType.FUNCTION: 'FUNCTION'>, 'FOR': <TokenType.FOR: 'FOR'>, 'FOREIGN KEY': <TokenType.FOREIGN_KEY: 'FOREIGN_KEY'>, 'FORMAT': <TokenType.FORMAT: 'FORMAT'>, 'FROM': <TokenType.FROM: 'FROM'>, 'GEOGRAPHY': <TokenType.GEOGRAPHY: 'GEOGRAPHY'>, 'GEOMETRY': <TokenType.GEOMETRY: 'GEOMETRY'>, 'GLOB': <TokenType.GLOB: 'GLOB'>, 'GROUP BY': <TokenType.GROUP_BY: 'GROUP_BY'>, 'GROUPING SETS': <TokenType.GROUPING_SETS: 'GROUPING_SETS'>, 'HAVING': <TokenType.HAVING: 'HAVING'>, 'ILIKE': <TokenType.ILIKE: 'ILIKE'>, 'IN': <TokenType.IN: 'IN'>, 'INDEX': <TokenType.INDEX: 'INDEX'>, 'INET': <TokenType.INET: 'INET'>, 'INNER': <TokenType.INNER: 'INNER'>, 'INSERT': <TokenType.INSERT: 'INSERT'>, 'INTERVAL': <TokenType.INTERVAL: 'INTERVAL'>, 'INTERSECT': <TokenType.INTERSECT: 'INTERSECT'>, 'INTO': <TokenType.INTO: 'INTO'>, 'IS': <TokenType.IS: 'IS'>, 'ISNULL': <TokenType.ISNULL: 'ISNULL'>, 'JOIN': <TokenType.JOIN: 'JOIN'>, 'KEEP': <TokenType.KEEP: 'KEEP'>, 'KILL': <TokenType.KILL: 'KILL'>, 'LATERAL': <TokenType.LATERAL: 'LATERAL'>, 'LEFT': <TokenType.LEFT: 'LEFT'>, 'LIKE': <TokenType.LIKE: 'LIKE'>, 'LIMIT': <TokenType.LIMIT: 'LIMIT'>, 'LOAD': <TokenType.LOAD: 'LOAD'>, 'LOCK': <TokenType.LOCK: 'LOCK'>, 'MERGE': <TokenType.MERGE: 'MERGE'>, 'NATURAL': <TokenType.NATURAL: 'NATURAL'>, 'NEXT': <TokenType.NEXT: 'NEXT'>, 'NOT': <TokenType.NOT: 'NOT'>, 'NOTNULL': <TokenType.NOTNULL: 'NOTNULL'>, 'NULL': <TokenType.NULL: 'NULL'>, 'OBJECT': <TokenType.OBJECT: 'OBJECT'>, 'OFFSET': <TokenType.OFFSET: 'OFFSET'>, 'ON': <TokenType.ON: 'ON'>, 'OR': <TokenType.OR: 'OR'>, 'XOR': <TokenType.XOR: 'XOR'>, 'ORDER BY': <TokenType.ORDER_BY: 'ORDER_BY'>, 'ORDINALITY': <TokenType.ORDINALITY: 'ORDINALITY'>, 'OUTER': <TokenType.OUTER: 'OUTER'>, 'OVER': <TokenType.OVER: 'OVER'>, 'OVERLAPS': <TokenType.OVERLAPS: 'OVERLAPS'>, 'OVERWRITE': <TokenType.OVERWRITE: 'OVERWRITE'>, 'PARTITION': <TokenType.PARTITION: 'PARTITION'>, 'PARTITION BY': <TokenType.PARTITION_BY: 'PARTITION_BY'>, 'PARTITIONED BY': <TokenType.PARTITION_BY: 'PARTITION_BY'>, 'PARTITIONED_BY': <TokenType.PARTITION_BY: 'PARTITION_BY'>, 'PERCENT': <TokenType.PERCENT: 'PERCENT'>, 'PIVOT': <TokenType.PIVOT: 'PIVOT'>, 'PRAGMA': <TokenType.PRAGMA: 'PRAGMA'>, 'PRIMARY KEY': <TokenType.PRIMARY_KEY: 'PRIMARY_KEY'>, 'PROCEDURE': <TokenType.PROCEDURE: 'PROCEDURE'>, 'QUALIFY': <TokenType.QUALIFY: 'QUALIFY'>, 'RANGE': <TokenType.RANGE: 'RANGE'>, 'RECURSIVE': <TokenType.RECURSIVE: 'RECURSIVE'>, 'REGEXP': <TokenType.RLIKE: 'RLIKE'>, 'RENAME': <TokenType.RENAME: 'RENAME'>, 'REPLACE': <TokenType.REPLACE: 'REPLACE'>, 'RETURNING': <TokenType.RETURNING: 'RETURNING'>, 'REFERENCES': <TokenType.REFERENCES: 'REFERENCES'>, 'RIGHT': <TokenType.RIGHT: 'RIGHT'>, 'RLIKE': <TokenType.RLIKE: 'RLIKE'>, 'ROLLBACK': <TokenType.ROLLBACK: 'ROLLBACK'>, 'ROLLUP': <TokenType.ROLLUP: 'ROLLUP'>, 'ROW': <TokenType.ROW: 'ROW'>, 'ROWS': <TokenType.ROWS: 'ROWS'>, 'SCHEMA': <TokenType.SCHEMA: 'SCHEMA'>, 'SELECT': <TokenType.SELECT: 'SELECT'>, 'SEMI': <TokenType.SEMI: 'SEMI'>, 'SET': <TokenType.SET: 'SET'>, 'SETTINGS': <TokenType.SETTINGS: 'SETTINGS'>, 'SHOW': <TokenType.SHOW: 'SHOW'>, 'SIMILAR TO': <TokenType.SIMILAR_TO: 'SIMILAR_TO'>, 'SOME': <TokenType.SOME: 'SOME'>, 'SORT BY': <TokenType.SORT_BY: 'SORT_BY'>, 'START WITH': <TokenType.START_WITH: 'START_WITH'>, 'STRAIGHT_JOIN': <TokenType.STRAIGHT_JOIN: 'STRAIGHT_JOIN'>, 'TABLE': <TokenType.TABLE: 'TABLE'>, 'TABLESAMPLE': <TokenType.TABLE_SAMPLE: 'TABLE_SAMPLE'>, 'TEMP': <TokenType.TEMPORARY: 'TEMPORARY'>, 'TEMPORARY': <TokenType.TEMPORARY: 'TEMPORARY'>, 'THEN': <TokenType.THEN: 'THEN'>, 'TRUE': <TokenType.TRUE: 'TRUE'>, 'TRUNCATE': <TokenType.TRUNCATE: 'TRUNCATE'>, 'UNION': <TokenType.UNION: 'UNION'>, 'UNKNOWN': <TokenType.UNKNOWN: 'UNKNOWN'>, 'UNNEST': <TokenType.UNNEST: 'UNNEST'>, 'UNPIVOT': <TokenType.UNPIVOT: 'UNPIVOT'>, 'UPDATE': <TokenType.UPDATE: 'UPDATE'>, 'USE': <TokenType.USE: 'USE'>, 'USING': <TokenType.USING: 'USING'>, 'UUID': <TokenType.UUID: 'UUID'>, 'VALUES': <TokenType.VALUES: 'VALUES'>, 'VIEW': <TokenType.VIEW: 'VIEW'>, 'VOLATILE': <TokenType.VOLATILE: 'VOLATILE'>, 'WHEN': <TokenType.WHEN: 'WHEN'>, 'WHERE': <TokenType.WHERE: 'WHERE'>, 'WINDOW': <TokenType.WINDOW: 'WINDOW'>, 'WITH': <TokenType.WITH: 'WITH'>, 'APPLY': <TokenType.APPLY: 'APPLY'>, 'ARRAY': <TokenType.ARRAY: 'ARRAY'>, 'BIT': <TokenType.BIT: 'BIT'>, 'BOOL': <TokenType.BOOLEAN: 'BOOLEAN'>, 'BOOLEAN': <TokenType.BOOLEAN: 'BOOLEAN'>, 'BYTE': <TokenType.TINYINT: 'TINYINT'>, 'MEDIUMINT': <TokenType.MEDIUMINT: 'MEDIUMINT'>, 'INT1': <TokenType.TINYINT: 'TINYINT'>, 'TINYINT': <TokenType.UTINYINT: 'UTINYINT'>, 'INT16': <TokenType.SMALLINT: 'SMALLINT'>, 'SHORT': <TokenType.SMALLINT: 'SMALLINT'>, 'SMALLINT': <TokenType.SMALLINT: 'SMALLINT'>, 'INT128': <TokenType.INT128: 'INT128'>, 'HUGEINT': <TokenType.INT128: 'INT128'>, 'UHUGEINT': <TokenType.UINT128: 'UINT128'>, 'INT2': <TokenType.SMALLINT: 'SMALLINT'>, 'INTEGER': <TokenType.INT: 'INT'>, 'INT': <TokenType.INT: 'INT'>, 'INT4': <TokenType.INT: 'INT'>, 'INT32': <TokenType.INT: 'INT'>, 'INT64': <TokenType.BIGINT: 'BIGINT'>, 'LONG': <TokenType.BIGINT: 'BIGINT'>, 'BIGINT': <TokenType.BIGINT: 'BIGINT'>, 'INT8': <TokenType.TINYINT: 'TINYINT'>, 'UINT': <TokenType.UINT: 'UINT'>, 'DEC': <TokenType.DECIMAL: 'DECIMAL'>, 'DECIMAL': <TokenType.DECIMAL: 'DECIMAL'>, 'DECIMAL32': <TokenType.DECIMAL32: 'DECIMAL32'>, 'DECIMAL64': <TokenType.DECIMAL64: 'DECIMAL64'>, 'DECIMAL128': <TokenType.DECIMAL128: 'DECIMAL128'>, 'DECIMAL256': <TokenType.DECIMAL256: 'DECIMAL256'>, 'BIGDECIMAL': <TokenType.BIGDECIMAL: 'BIGDECIMAL'>, 'BIGNUMERIC': <TokenType.BIGDECIMAL: 'BIGDECIMAL'>, 'LIST': <TokenType.LIST: 'LIST'>, 'MAP': <TokenType.MAP: 'MAP'>, 'NULLABLE': <TokenType.NULLABLE: 'NULLABLE'>, 'NUMBER': <TokenType.DECIMAL: 'DECIMAL'>, 'NUMERIC': <TokenType.DECIMAL: 'DECIMAL'>, 'FIXED': <TokenType.DECIMAL: 'DECIMAL'>, 'REAL': <TokenType.FLOAT: 'FLOAT'>, 'FLOAT': <TokenType.FLOAT: 'FLOAT'>, 'FLOAT4': <TokenType.FLOAT: 'FLOAT'>, 'FLOAT8': <TokenType.DOUBLE: 'DOUBLE'>, 'DOUBLE': <TokenType.DOUBLE: 'DOUBLE'>, 'DOUBLE PRECISION': <TokenType.DOUBLE: 'DOUBLE'>, 'JSON': <TokenType.JSON: 'JSON'>, 'JSONB': <TokenType.JSONB: 'JSONB'>, 'CHAR': <TokenType.CHAR: 'CHAR'>, 'CHARACTER': <TokenType.CHAR: 'CHAR'>, 'NCHAR': <TokenType.NCHAR: 'NCHAR'>, 'VARCHAR': <TokenType.VARCHAR: 'VARCHAR'>, 'VARCHAR2': <TokenType.VARCHAR: 'VARCHAR'>, 'NVARCHAR': <TokenType.NVARCHAR: 'NVARCHAR'>, 'NVARCHAR2': <TokenType.NVARCHAR: 'NVARCHAR'>, 'BPCHAR': <TokenType.BPCHAR: 'BPCHAR'>, 'STR': <TokenType.TEXT: 'TEXT'>, 'STRING': <TokenType.TEXT: 'TEXT'>, 'TEXT': <TokenType.TEXT: 'TEXT'>, 'LONGTEXT': <TokenType.LONGTEXT: 'LONGTEXT'>, 'MEDIUMTEXT': <TokenType.MEDIUMTEXT: 'MEDIUMTEXT'>, 'TINYTEXT': <TokenType.TINYTEXT: 'TINYTEXT'>, 'CLOB': <TokenType.TEXT: 'TEXT'>, 'LONGVARCHAR': <TokenType.TEXT: 'TEXT'>, 'BINARY': <TokenType.BINARY: 'BINARY'>, 'BLOB': <TokenType.VARBINARY: 'VARBINARY'>, 'LONGBLOB': <TokenType.LONGBLOB: 'LONGBLOB'>, 'MEDIUMBLOB': <TokenType.MEDIUMBLOB: 'MEDIUMBLOB'>, 'TINYBLOB': <TokenType.TINYBLOB: 'TINYBLOB'>, 'BYTEA': <TokenType.VARBINARY: 'VARBINARY'>, 'VARBINARY': <TokenType.VARBINARY: 'VARBINARY'>, 'TIME': <TokenType.TIME: 'TIME'>, 'TIMETZ': <TokenType.TIMETZ: 'TIMETZ'>, 'TIMESTAMP': <TokenType.ROWVERSION: 'ROWVERSION'>, 'TIMESTAMPTZ': <TokenType.TIMESTAMPTZ: 'TIMESTAMPTZ'>, 'TIMESTAMPLTZ': <TokenType.TIMESTAMPLTZ: 'TIMESTAMPLTZ'>, 'TIMESTAMP_LTZ': <TokenType.TIMESTAMPLTZ: 'TIMESTAMPLTZ'>, 'TIMESTAMPNTZ': <TokenType.TIMESTAMPNTZ: 'TIMESTAMPNTZ'>, 'TIMESTAMP_NTZ': <TokenType.TIMESTAMPNTZ: 'TIMESTAMPNTZ'>, 'DATE': <TokenType.DATE: 'DATE'>, 'DATETIME': <TokenType.DATETIME: 'DATETIME'>, 'INT4RANGE': <TokenType.INT4RANGE: 'INT4RANGE'>, 'INT4MULTIRANGE': <TokenType.INT4MULTIRANGE: 'INT4MULTIRANGE'>, 'INT8RANGE': <TokenType.INT8RANGE: 'INT8RANGE'>, 'INT8MULTIRANGE': <TokenType.INT8MULTIRANGE: 'INT8MULTIRANGE'>, 'NUMRANGE': <TokenType.NUMRANGE: 'NUMRANGE'>, 'NUMMULTIRANGE': <TokenType.NUMMULTIRANGE: 'NUMMULTIRANGE'>, 'TSRANGE': <TokenType.TSRANGE: 'TSRANGE'>, 'TSMULTIRANGE': <TokenType.TSMULTIRANGE: 'TSMULTIRANGE'>, 'TSTZRANGE': <TokenType.TSTZRANGE: 'TSTZRANGE'>, 'TSTZMULTIRANGE': <TokenType.TSTZMULTIRANGE: 'TSTZMULTIRANGE'>, 'DATERANGE': <TokenType.DATERANGE: 'DATERANGE'>, 'DATEMULTIRANGE': <TokenType.DATEMULTIRANGE: 'DATEMULTIRANGE'>, 'UNIQUE': <TokenType.UNIQUE: 'UNIQUE'>, 'VECTOR': <TokenType.VECTOR: 'VECTOR'>, 'STRUCT': <TokenType.STRUCT: 'STRUCT'>, 'SEQUENCE': <TokenType.SEQUENCE: 'SEQUENCE'>, 'VARIANT': <TokenType.VARIANT: 'VARIANT'>, 'ALTER': <TokenType.ALTER: 'ALTER'>, 'ANALYZE': <TokenType.COMMAND: 'COMMAND'>, 'CALL': <TokenType.COMMAND: 'COMMAND'>, 'COMMENT': <TokenType.COMMENT: 'COMMENT'>, 'EXPLAIN': <TokenType.COMMAND: 'COMMAND'>, 'GRANT': <TokenType.GRANT: 'GRANT'>, 'OPTIMIZE': <TokenType.COMMAND: 'COMMAND'>, 'PREPARE': <TokenType.COMMAND: 'COMMAND'>, 'VACUUM': <TokenType.COMMAND: 'COMMAND'>, 'USER-DEFINED': <TokenType.USERDEFINED: 'USERDEFINED'>, 'FOR VERSION': <TokenType.VERSION_SNAPSHOT: 'VERSION_SNAPSHOT'>, 'FOR TIMESTAMP': <TokenType.TIMESTAMP_SNAPSHOT: 'TIMESTAMP_SNAPSHOT'>, 'CLUSTERED INDEX': <TokenType.INDEX: 'INDEX'>, 'DATETIME2': <TokenType.DATETIME: 'DATETIME'>, 'DATETIMEOFFSET': <TokenType.TIMESTAMPTZ: 'TIMESTAMPTZ'>, 'DECLARE': <TokenType.DECLARE: 'DECLARE'>, 'EXEC': <TokenType.COMMAND: 'COMMAND'>, 'FOR SYSTEM_TIME': <TokenType.TIMESTAMP_SNAPSHOT: 'TIMESTAMP_SNAPSHOT'>, 'IMAGE': <TokenType.IMAGE: 'IMAGE'>, 'MONEY': <TokenType.MONEY: 'MONEY'>, 'NONCLUSTERED INDEX': <TokenType.INDEX: 'INDEX'>, 'NTEXT': <TokenType.TEXT: 'TEXT'>, 'OPTION': <TokenType.OPTION: 'OPTION'>, 'OUTPUT': <TokenType.RETURNING: 'RETURNING'>, 'PRINT': <TokenType.COMMAND: 'COMMAND'>, 'PROC': <TokenType.PROCEDURE: 'PROCEDURE'>, 'ROWVERSION': <TokenType.ROWVERSION: 'ROWVERSION'>, 'SMALLDATETIME': <TokenType.DATETIME: 'DATETIME'>, 'SMALLMONEY': <TokenType.SMALLMONEY: 'SMALLMONEY'>, 'SQL_VARIANT': <TokenType.VARIANT: 'VARIANT'>, 'SYSTEM_USER': <TokenType.CURRENT_USER: 'CURRENT_USER'>, 'TOP': <TokenType.TOP: 'TOP'>, 'UNIQUEIDENTIFIER': <TokenType.UNIQUEIDENTIFIER: 'UNIQUEIDENTIFIER'>, 'UPDATE STATISTICS': <TokenType.COMMAND: 'COMMAND'>, 'XML': <TokenType.XML: 'XML'>}
COMMANDS =
{<TokenType.SHOW: 'SHOW'>, <TokenType.FETCH: 'FETCH'>, <TokenType.RENAME: 'RENAME'>, <TokenType.EXECUTE: 'EXECUTE'>, <TokenType.END: 'END'>, <TokenType.COMMAND: 'COMMAND'>}
Inherited Members
- sqlglot.tokens.Tokenizer
- Tokenizer
- SINGLE_TOKENS
- BIT_STRINGS
- BYTE_STRINGS
- RAW_STRINGS
- HEREDOC_STRINGS
- UNICODE_STRINGS
- STRING_ESCAPES
- IDENTIFIER_ESCAPES
- HEREDOC_TAG_IS_IDENTIFIER
- HEREDOC_STRING_ALTERNATIVE
- STRING_ESCAPES_ALLOWED_IN_RAW_STRINGS
- NESTED_COMMENTS
- HINT_START
- TOKENS_PRECEDING_HINT
- WHITE_SPACE
- COMMAND_PREFIX_TOKENS
- NUMERIC_LITERALS
- COMMENTS
- dialect
- reset
- tokenize
- tokenize_rs
- size
- sql
- tokens
526 class Parser(parser.Parser): 527 SET_REQUIRES_ASSIGNMENT_DELIMITER = False 528 LOG_DEFAULTS_TO_LN = True 529 ALTER_TABLE_ADD_REQUIRED_FOR_EACH_COLUMN = False 530 STRING_ALIASES = True 531 NO_PAREN_IF_COMMANDS = False 532 533 QUERY_MODIFIER_PARSERS = { 534 **parser.Parser.QUERY_MODIFIER_PARSERS, 535 TokenType.OPTION: lambda self: ("options", self._parse_options()), 536 } 537 538 FUNCTIONS = { 539 **parser.Parser.FUNCTIONS, 540 "CHARINDEX": lambda args: exp.StrPosition( 541 this=seq_get(args, 1), 542 substr=seq_get(args, 0), 543 position=seq_get(args, 2), 544 ), 545 "COUNT": lambda args: exp.Count( 546 this=seq_get(args, 0), expressions=args[1:], big_int=False 547 ), 548 "COUNT_BIG": lambda args: exp.Count( 549 this=seq_get(args, 0), expressions=args[1:], big_int=True 550 ), 551 "DATEADD": build_date_delta(exp.DateAdd, unit_mapping=DATE_DELTA_INTERVAL), 552 "DATEDIFF": _build_date_delta(exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL), 553 "DATENAME": _build_formatted_time(exp.TimeToStr, full_format_mapping=True), 554 "DATEPART": _build_formatted_time(exp.TimeToStr), 555 "DATETIMEFROMPARTS": _build_datetimefromparts, 556 "EOMONTH": _build_eomonth, 557 "FORMAT": _build_format, 558 "GETDATE": exp.CurrentTimestamp.from_arg_list, 559 "HASHBYTES": _build_hashbytes, 560 "ISNULL": build_coalesce, 561 "JSON_QUERY": _build_json_query, 562 "JSON_VALUE": parser.build_extract_json_with_path(exp.JSONExtractScalar), 563 "LEN": _build_with_arg_as_text(exp.Length), 564 "LEFT": _build_with_arg_as_text(exp.Left), 565 "RIGHT": _build_with_arg_as_text(exp.Right), 566 "PARSENAME": _build_parsename, 567 "REPLICATE": exp.Repeat.from_arg_list, 568 "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)), 569 "SYSDATETIME": exp.CurrentTimestamp.from_arg_list, 570 "SUSER_NAME": exp.CurrentUser.from_arg_list, 571 "SUSER_SNAME": exp.CurrentUser.from_arg_list, 572 "SYSTEM_USER": exp.CurrentUser.from_arg_list, 573 "TIMEFROMPARTS": _build_timefromparts, 574 } 575 576 JOIN_HINTS = {"LOOP", "HASH", "MERGE", "REMOTE"} 577 578 PROCEDURE_OPTIONS = dict.fromkeys( 579 ("ENCRYPTION", "RECOMPILE", "SCHEMABINDING", "NATIVE_COMPILATION", "EXECUTE"), tuple() 580 ) 581 582 RETURNS_TABLE_TOKENS = parser.Parser.ID_VAR_TOKENS - { 583 TokenType.TABLE, 584 *parser.Parser.TYPE_TOKENS, 585 } 586 587 STATEMENT_PARSERS = { 588 **parser.Parser.STATEMENT_PARSERS, 589 TokenType.DECLARE: lambda self: self._parse_declare(), 590 } 591 592 RANGE_PARSERS = { 593 **parser.Parser.RANGE_PARSERS, 594 TokenType.DCOLON: lambda self, this: self.expression( 595 exp.ScopeResolution, 596 this=this, 597 expression=self._parse_function() or self._parse_var(any_token=True), 598 ), 599 } 600 601 # The DCOLON (::) operator serves as a scope resolution (exp.ScopeResolution) operator in T-SQL 602 COLUMN_OPERATORS = { 603 **parser.Parser.COLUMN_OPERATORS, 604 TokenType.DCOLON: lambda self, this, to: self.expression(exp.Cast, this=this, to=to) 605 if isinstance(to, exp.DataType) and to.this != exp.DataType.Type.USERDEFINED 606 else self.expression(exp.ScopeResolution, this=this, expression=to), 607 } 608 609 def _parse_dcolon(self) -> t.Optional[exp.Expression]: 610 # We want to use _parse_types() if the first token after :: is a known type, 611 # otherwise we could parse something like x::varchar(max) into a function 612 if self._match_set(self.TYPE_TOKENS, advance=False): 613 return self._parse_types() 614 615 return self._parse_function() or self._parse_types() 616 617 def _parse_options(self) -> t.Optional[t.List[exp.Expression]]: 618 if not self._match(TokenType.OPTION): 619 return None 620 621 def _parse_option() -> t.Optional[exp.Expression]: 622 option = self._parse_var_from_options(OPTIONS) 623 if not option: 624 return None 625 626 self._match(TokenType.EQ) 627 return self.expression( 628 exp.QueryOption, this=option, expression=self._parse_primary_or_var() 629 ) 630 631 return self._parse_wrapped_csv(_parse_option) 632 633 def _parse_projections(self) -> t.List[exp.Expression]: 634 """ 635 T-SQL supports the syntax alias = expression in the SELECT's projection list, 636 so we transform all parsed Selects to convert their EQ projections into Aliases. 637 638 See: https://learn.microsoft.com/en-us/sql/t-sql/queries/select-clause-transact-sql?view=sql-server-ver16#syntax 639 """ 640 return [ 641 ( 642 exp.alias_(projection.expression, projection.this.this, copy=False) 643 if isinstance(projection, exp.EQ) and isinstance(projection.this, exp.Column) 644 else projection 645 ) 646 for projection in super()._parse_projections() 647 ] 648 649 def _parse_commit_or_rollback(self) -> exp.Commit | exp.Rollback: 650 """Applies to SQL Server and Azure SQL Database 651 COMMIT [ { TRAN | TRANSACTION } 652 [ transaction_name | @tran_name_variable ] ] 653 [ WITH ( DELAYED_DURABILITY = { OFF | ON } ) ] 654 655 ROLLBACK { TRAN | TRANSACTION } 656 [ transaction_name | @tran_name_variable 657 | savepoint_name | @savepoint_variable ] 658 """ 659 rollback = self._prev.token_type == TokenType.ROLLBACK 660 661 self._match_texts(("TRAN", "TRANSACTION")) 662 this = self._parse_id_var() 663 664 if rollback: 665 return self.expression(exp.Rollback, this=this) 666 667 durability = None 668 if self._match_pair(TokenType.WITH, TokenType.L_PAREN): 669 self._match_text_seq("DELAYED_DURABILITY") 670 self._match(TokenType.EQ) 671 672 if self._match_text_seq("OFF"): 673 durability = False 674 else: 675 self._match(TokenType.ON) 676 durability = True 677 678 self._match_r_paren() 679 680 return self.expression(exp.Commit, this=this, durability=durability) 681 682 def _parse_transaction(self) -> exp.Transaction | exp.Command: 683 """Applies to SQL Server and Azure SQL Database 684 BEGIN { TRAN | TRANSACTION } 685 [ { transaction_name | @tran_name_variable } 686 [ WITH MARK [ 'description' ] ] 687 ] 688 """ 689 if self._match_texts(("TRAN", "TRANSACTION")): 690 transaction = self.expression(exp.Transaction, this=self._parse_id_var()) 691 if self._match_text_seq("WITH", "MARK"): 692 transaction.set("mark", self._parse_string()) 693 694 return transaction 695 696 return self._parse_as_command(self._prev) 697 698 def _parse_returns(self) -> exp.ReturnsProperty: 699 table = self._parse_id_var(any_token=False, tokens=self.RETURNS_TABLE_TOKENS) 700 returns = super()._parse_returns() 701 returns.set("table", table) 702 return returns 703 704 def _parse_convert( 705 self, strict: bool, safe: t.Optional[bool] = None 706 ) -> t.Optional[exp.Expression]: 707 this = self._parse_types() 708 self._match(TokenType.COMMA) 709 args = [this, *self._parse_csv(self._parse_assignment)] 710 convert = exp.Convert.from_arg_list(args) 711 convert.set("safe", safe) 712 convert.set("strict", strict) 713 return convert 714 715 def _parse_user_defined_function( 716 self, kind: t.Optional[TokenType] = None 717 ) -> t.Optional[exp.Expression]: 718 this = super()._parse_user_defined_function(kind=kind) 719 720 if ( 721 kind == TokenType.FUNCTION 722 or isinstance(this, exp.UserDefinedFunction) 723 or self._match(TokenType.ALIAS, advance=False) 724 ): 725 return this 726 727 if not self._match(TokenType.WITH, advance=False): 728 expressions = self._parse_csv(self._parse_function_parameter) 729 else: 730 expressions = None 731 732 return self.expression(exp.UserDefinedFunction, this=this, expressions=expressions) 733 734 def _parse_id_var( 735 self, 736 any_token: bool = True, 737 tokens: t.Optional[t.Collection[TokenType]] = None, 738 ) -> t.Optional[exp.Expression]: 739 is_temporary = self._match(TokenType.HASH) 740 is_global = is_temporary and self._match(TokenType.HASH) 741 742 this = super()._parse_id_var(any_token=any_token, tokens=tokens) 743 if this: 744 if is_global: 745 this.set("global", True) 746 elif is_temporary: 747 this.set("temporary", True) 748 749 return this 750 751 def _parse_create(self) -> exp.Create | exp.Command: 752 create = super()._parse_create() 753 754 if isinstance(create, exp.Create): 755 table = create.this.this if isinstance(create.this, exp.Schema) else create.this 756 if isinstance(table, exp.Table) and table.this.args.get("temporary"): 757 if not create.args.get("properties"): 758 create.set("properties", exp.Properties(expressions=[])) 759 760 create.args["properties"].append("expressions", exp.TemporaryProperty()) 761 762 return create 763 764 def _parse_if(self) -> t.Optional[exp.Expression]: 765 index = self._index 766 767 if self._match_text_seq("OBJECT_ID"): 768 self._parse_wrapped_csv(self._parse_string) 769 if self._match_text_seq("IS", "NOT", "NULL") and self._match(TokenType.DROP): 770 return self._parse_drop(exists=True) 771 self._retreat(index) 772 773 return super()._parse_if() 774 775 def _parse_unique(self) -> exp.UniqueColumnConstraint: 776 if self._match_texts(("CLUSTERED", "NONCLUSTERED")): 777 this = self.CONSTRAINT_PARSERS[self._prev.text.upper()](self) 778 else: 779 this = self._parse_schema(self._parse_id_var(any_token=False)) 780 781 return self.expression(exp.UniqueColumnConstraint, this=this) 782 783 def _parse_partition(self) -> t.Optional[exp.Partition]: 784 if not self._match_text_seq("WITH", "(", "PARTITIONS"): 785 return None 786 787 def parse_range(): 788 low = self._parse_bitwise() 789 high = self._parse_bitwise() if self._match_text_seq("TO") else None 790 791 return ( 792 self.expression(exp.PartitionRange, this=low, expression=high) if high else low 793 ) 794 795 partition = self.expression( 796 exp.Partition, expressions=self._parse_wrapped_csv(parse_range) 797 ) 798 799 self._match_r_paren() 800 801 return partition 802 803 def _parse_declare(self) -> exp.Declare | exp.Command: 804 index = self._index 805 expressions = self._try_parse(partial(self._parse_csv, self._parse_declareitem)) 806 807 if not expressions or self._curr: 808 self._retreat(index) 809 return self._parse_as_command(self._prev) 810 811 return self.expression(exp.Declare, expressions=expressions) 812 813 def _parse_declareitem(self) -> t.Optional[exp.DeclareItem]: 814 var = self._parse_id_var() 815 if not var: 816 return None 817 818 value = None 819 self._match(TokenType.ALIAS) 820 if self._match(TokenType.TABLE): 821 data_type = self._parse_schema() 822 else: 823 data_type = self._parse_types() 824 if self._match(TokenType.EQ): 825 value = self._parse_bitwise() 826 827 return self.expression(exp.DeclareItem, this=var, kind=data_type, default=value)
Parser consumes a list of tokens produced by the Tokenizer and produces a parsed syntax tree.
Arguments:
- error_level: The desired error level. Default: ErrorLevel.IMMEDIATE
- error_message_context: The amount of context to capture from a query string when displaying the error message (in number of characters). Default: 100
- max_errors: Maximum number of error messages to include in a raised ParseError. This is only relevant if error_level is ErrorLevel.RAISE. Default: 3
QUERY_MODIFIER_PARSERS =
{<TokenType.MATCH_RECOGNIZE: 'MATCH_RECOGNIZE'>: <function Parser.<lambda>>, <TokenType.PREWHERE: 'PREWHERE'>: <function Parser.<lambda>>, <TokenType.WHERE: 'WHERE'>: <function Parser.<lambda>>, <TokenType.GROUP_BY: 'GROUP_BY'>: <function Parser.<lambda>>, <TokenType.HAVING: 'HAVING'>: <function Parser.<lambda>>, <TokenType.QUALIFY: 'QUALIFY'>: <function Parser.<lambda>>, <TokenType.WINDOW: 'WINDOW'>: <function Parser.<lambda>>, <TokenType.ORDER_BY: 'ORDER_BY'>: <function Parser.<lambda>>, <TokenType.LIMIT: 'LIMIT'>: <function Parser.<lambda>>, <TokenType.FETCH: 'FETCH'>: <function Parser.<lambda>>, <TokenType.OFFSET: 'OFFSET'>: <function Parser.<lambda>>, <TokenType.FOR: 'FOR'>: <function Parser.<lambda>>, <TokenType.LOCK: 'LOCK'>: <function Parser.<lambda>>, <TokenType.TABLE_SAMPLE: 'TABLE_SAMPLE'>: <function Parser.<lambda>>, <TokenType.USING: 'USING'>: <function Parser.<lambda>>, <TokenType.CLUSTER_BY: 'CLUSTER_BY'>: <function Parser.<lambda>>, <TokenType.DISTRIBUTE_BY: 'DISTRIBUTE_BY'>: <function Parser.<lambda>>, <TokenType.SORT_BY: 'SORT_BY'>: <function Parser.<lambda>>, <TokenType.CONNECT_BY: 'CONNECT_BY'>: <function Parser.<lambda>>, <TokenType.START_WITH: 'START_WITH'>: <function Parser.<lambda>>, <TokenType.OPTION: 'OPTION'>: <function TSQL.Parser.<lambda>>}
FUNCTIONS =
{'ABS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Abs'>>, 'ADD_MONTHS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.AddMonths'>>, 'ANONYMOUS_AGG_FUNC': <bound method Func.from_arg_list of <class 'sqlglot.expressions.AnonymousAggFunc'>>, 'ANY_VALUE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.AnyValue'>>, 'APPLY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Apply'>>, 'APPROX_DISTINCT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ApproxDistinct'>>, 'APPROX_COUNT_DISTINCT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ApproxDistinct'>>, 'APPROX_QUANTILE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ApproxQuantile'>>, 'APPROX_TOP_K': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ApproxTopK'>>, 'ARG_MAX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArgMax'>>, 'ARGMAX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArgMax'>>, 'MAX_BY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArgMax'>>, 'ARG_MIN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArgMin'>>, 'ARGMIN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArgMin'>>, 'MIN_BY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArgMin'>>, 'ARRAY': <function Parser.<lambda>>, 'ARRAY_AGG': <function Parser.<lambda>>, 'ARRAY_ALL': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayAll'>>, 'ARRAY_ANY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayAny'>>, 'ARRAY_CONCAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayConcat'>>, 'ARRAY_CAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayConcat'>>, 'ARRAY_CONSTRUCT_COMPACT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayConstructCompact'>>, 'ARRAY_CONTAINS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayContains'>>, 'ARRAY_HAS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayContains'>>, 'ARRAY_CONTAINS_ALL': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayContainsAll'>>, 'ARRAY_HAS_ALL': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayContainsAll'>>, 'FILTER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayFilter'>>, 'ARRAY_FILTER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayFilter'>>, 'ARRAY_OVERLAPS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayOverlaps'>>, 'ARRAY_SIZE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArraySize'>>, 'ARRAY_LENGTH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArraySize'>>, 'ARRAY_SORT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArraySort'>>, 'ARRAY_SUM': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArraySum'>>, 'ARRAY_TO_STRING': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayToString'>>, 'ARRAY_JOIN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayToString'>>, 'ARRAY_UNION_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayUnionAgg'>>, 'ARRAY_UNIQUE_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayUniqueAgg'>>, 'AVG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Avg'>>, 'CASE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Case'>>, 'CAST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Cast'>>, 'CAST_TO_STR_TYPE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CastToStrType'>>, 'CBRT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Cbrt'>>, 'CEIL': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Ceil'>>, 'CEILING': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Ceil'>>, 'CHR': <function Parser.<lambda>>, 'CHAR': <function Parser.<lambda>>, 'COALESCE': <function build_coalesce>, 'IFNULL': <function build_coalesce>, 'NVL': <function build_coalesce>, 'COLLATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Collate'>>, 'COLUMNS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Columns'>>, 'COMBINED_AGG_FUNC': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CombinedAggFunc'>>, 'COMBINED_PARAMETERIZED_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CombinedParameterizedAgg'>>, 'CONCAT': <function Parser.<lambda>>, 'CONCAT_WS': <function Parser.<lambda>>, 'CONNECT_BY_ROOT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ConnectByRoot'>>, 'CONTAINS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Contains'>>, 'CONVERT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Convert'>>, 'CONVERT_TIMEZONE': <function build_convert_timezone>, 'CORR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Corr'>>, 'COUNT': <function TSQL.Parser.<lambda>>, 'COUNT_IF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CountIf'>>, 'COUNTIF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CountIf'>>, 'COVAR_POP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CovarPop'>>, 'COVAR_SAMP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CovarSamp'>>, 'CURRENT_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CurrentDate'>>, 'CURRENT_DATETIME': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CurrentDatetime'>>, 'CURRENT_TIME': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CurrentTime'>>, 'CURRENT_TIMESTAMP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CurrentTimestamp'>>, 'CURRENT_USER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CurrentUser'>>, 'DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Date'>>, 'DATE_ADD': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DateAdd'>>, 'DATEDIFF': <function _build_date_delta.<locals>._builder>, 'DATE_DIFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DateDiff'>>, 'DATE_FROM_PARTS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DateFromParts'>>, 'DATEFROMPARTS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DateFromParts'>>, 'DATE_STR_TO_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DateStrToDate'>>, 'DATE_SUB': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DateSub'>>, 'DATE_TO_DATE_STR': <function Parser.<lambda>>, 'DATE_TO_DI': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DateToDi'>>, 'DATE_TRUNC': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DateTrunc'>>, 'DATETIME': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Datetime'>>, 'DATETIME_ADD': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DatetimeAdd'>>, 'DATETIME_DIFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DatetimeDiff'>>, 'DATETIME_SUB': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DatetimeSub'>>, 'DATETIME_TRUNC': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DatetimeTrunc'>>, 'DAY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Day'>>, 'DAY_OF_MONTH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfMonth'>>, 'DAYOFMONTH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfMonth'>>, 'DAY_OF_WEEK': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfWeek'>>, 'DAYOFWEEK': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfWeek'>>, 'DAYOFWEEK_ISO': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfWeekIso'>>, 'ISODOW': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfWeekIso'>>, 'DAY_OF_YEAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfYear'>>, 'DAYOFYEAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfYear'>>, 'DECODE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Decode'>>, 'DI_TO_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DiToDate'>>, 'ENCODE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Encode'>>, 'EXISTS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Exists'>>, 'EXP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Exp'>>, 'EXPLODE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Explode'>>, 'EXPLODE_OUTER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ExplodeOuter'>>, 'EXPLODING_GENERATE_SERIES': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ExplodingGenerateSeries'>>, 'EXTRACT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Extract'>>, 'FEATURES_AT_TIME': <bound method Func.from_arg_list of <class 'sqlglot.expressions.FeaturesAtTime'>>, 'FIRST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.First'>>, 'FIRST_VALUE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.FirstValue'>>, 'FLATTEN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Flatten'>>, 'FLOOR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Floor'>>, 'FROM_BASE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.FromBase'>>, 'FROM_BASE64': <bound method Func.from_arg_list of <class 'sqlglot.expressions.FromBase64'>>, 'FROM_ISO8601_TIMESTAMP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.FromISO8601Timestamp'>>, 'GAP_FILL': <bound method Func.from_arg_list of <class 'sqlglot.expressions.GapFill'>>, 'GENERATE_DATE_ARRAY': <function Parser.<lambda>>, 'GENERATE_SERIES': <bound method Func.from_arg_list of <class 'sqlglot.expressions.GenerateSeries'>>, 'GENERATE_TIMESTAMP_ARRAY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.GenerateTimestampArray'>>, 'GREATEST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Greatest'>>, 'GROUP_CONCAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.GroupConcat'>>, 'HEX': <function build_hex>, 'HLL': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Hll'>>, 'IF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.If'>>, 'IIF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.If'>>, 'INITCAP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Initcap'>>, 'INLINE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Inline'>>, 'INT64': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Int64'>>, 'IS_INF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.IsInf'>>, 'ISINF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.IsInf'>>, 'IS_NAN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.IsNan'>>, 'ISNAN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.IsNan'>>, 'J_S_O_N_ARRAY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONArray'>>, 'J_S_O_N_ARRAY_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONArrayAgg'>>, 'JSON_ARRAY_CONTAINS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONArrayContains'>>, 'JSONB_CONTAINS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONBContains'>>, 'JSONB_EXISTS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONBExists'>>, 'JSONB_EXTRACT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONBExtract'>>, 'JSONB_EXTRACT_SCALAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONBExtractScalar'>>, 'J_S_O_N_EXISTS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONExists'>>, 'JSON_EXTRACT': <function build_extract_json_with_path.<locals>._builder>, 'JSON_EXTRACT_ARRAY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONExtractArray'>>, 'JSON_EXTRACT_SCALAR': <function build_extract_json_with_path.<locals>._builder>, 'JSON_FORMAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONFormat'>>, 'J_S_O_N_OBJECT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONObject'>>, 'J_S_O_N_OBJECT_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONObjectAgg'>>, 'J_S_O_N_TABLE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONTable'>>, 'J_S_O_N_VALUE_ARRAY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONValueArray'>>, 'LAG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Lag'>>, 'LAST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Last'>>, 'LAST_DAY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LastDay'>>, 'LAST_DAY_OF_MONTH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LastDay'>>, 'LAST_VALUE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LastValue'>>, 'LEAD': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Lead'>>, 'LEAST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Least'>>, 'LEFT': <function _build_with_arg_as_text.<locals>._parse>, 'LENGTH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Length'>>, 'LEN': <function _build_with_arg_as_text.<locals>._parse>, 'LEVENSHTEIN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Levenshtein'>>, 'LIST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.List'>>, 'LN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Ln'>>, 'LOG': <function build_logarithm>, 'LOGICAL_AND': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LogicalAnd'>>, 'BOOL_AND': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LogicalAnd'>>, 'BOOLAND_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LogicalAnd'>>, 'LOGICAL_OR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LogicalOr'>>, 'BOOL_OR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LogicalOr'>>, 'BOOLOR_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LogicalOr'>>, 'LOWER': <function build_lower>, 'LCASE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Lower'>>, 'LOWER_HEX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LowerHex'>>, 'MD5': <bound method Func.from_arg_list of <class 'sqlglot.expressions.MD5'>>, 'MD5_DIGEST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.MD5Digest'>>, 'MAKE_INTERVAL': <bound method Func.from_arg_list of <class 'sqlglot.expressions.MakeInterval'>>, 'MAP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Map'>>, 'MAP_FROM_ENTRIES': <bound method Func.from_arg_list of <class 'sqlglot.expressions.MapFromEntries'>>, 'MATCH_AGAINST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.MatchAgainst'>>, 'MAX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Max'>>, 'MEDIAN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Median'>>, 'MIN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Min'>>, 'MONTH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Month'>>, 'MONTHS_BETWEEN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.MonthsBetween'>>, 'NEXT_VALUE_FOR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.NextValueFor'>>, 'NORMALIZE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Normalize'>>, 'NTH_VALUE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.NthValue'>>, 'NULLIF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Nullif'>>, 'NUMBER_TO_STR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.NumberToStr'>>, 'NVL2': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Nvl2'>>, 'OBJECT_INSERT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ObjectInsert'>>, 'OPEN_J_S_O_N': <bound method Func.from_arg_list of <class 'sqlglot.expressions.OpenJSON'>>, 'OVERLAY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Overlay'>>, 'PAD': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Pad'>>, 'PARAMETERIZED_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ParameterizedAgg'>>, 'PARSE_JSON': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ParseJSON'>>, 'JSON_PARSE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ParseJSON'>>, 'PERCENTILE_CONT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.PercentileCont'>>, 'PERCENTILE_DISC': <bound method Func.from_arg_list of <class 'sqlglot.expressions.PercentileDisc'>>, 'POSEXPLODE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Posexplode'>>, 'POSEXPLODE_OUTER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.PosexplodeOuter'>>, 'POWER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Pow'>>, 'POW': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Pow'>>, 'PREDICT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Predict'>>, 'QUANTILE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Quantile'>>, 'QUARTER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Quarter'>>, 'RAND': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Rand'>>, 'RANDOM': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Rand'>>, 'RANDN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Randn'>>, 'RANGE_N': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RangeN'>>, 'READ_CSV': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ReadCSV'>>, 'REDUCE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Reduce'>>, 'REGEXP_EXTRACT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RegexpExtract'>>, 'REGEXP_EXTRACT_ALL': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RegexpExtractAll'>>, 'REGEXP_I_LIKE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RegexpILike'>>, 'REGEXP_LIKE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RegexpLike'>>, 'REGEXP_REPLACE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RegexpReplace'>>, 'REGEXP_SPLIT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RegexpSplit'>>, 'REPEAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Repeat'>>, 'RIGHT': <function _build_with_arg_as_text.<locals>._parse>, 'ROUND': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Round'>>, 'ROW_NUMBER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RowNumber'>>, 'SHA': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SHA'>>, 'SHA1': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SHA'>>, 'SHA2': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SHA2'>>, 'SAFE_DIVIDE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SafeDivide'>>, 'SIGN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Sign'>>, 'SIGNUM': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Sign'>>, 'SORT_ARRAY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SortArray'>>, 'SPLIT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Split'>>, 'SPLIT_PART': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SplitPart'>>, 'SQRT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Sqrt'>>, 'STANDARD_HASH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StandardHash'>>, 'STAR_MAP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StarMap'>>, 'STARTS_WITH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StartsWith'>>, 'STARTSWITH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StartsWith'>>, 'STDDEV': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Stddev'>>, 'STDEV': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Stddev'>>, 'STDDEV_POP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StddevPop'>>, 'STDDEV_SAMP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StddevSamp'>>, 'STR_POSITION': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StrPosition'>>, 'STR_TO_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StrToDate'>>, 'STR_TO_MAP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StrToMap'>>, 'STR_TO_TIME': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StrToTime'>>, 'STR_TO_UNIX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StrToUnix'>>, 'STRING': <bound method Func.from_arg_list of <class 'sqlglot.expressions.String'>>, 'STRING_TO_ARRAY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StringToArray'>>, 'SPLIT_BY_STRING': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StringToArray'>>, 'STRUCT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Struct'>>, 'STRUCT_EXTRACT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StructExtract'>>, 'STUFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Stuff'>>, 'INSERT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Stuff'>>, 'SUBSTRING': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Substring'>>, 'SUBSTR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Substring'>>, 'SUM': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Sum'>>, 'TIME': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Time'>>, 'TIME_ADD': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeAdd'>>, 'TIME_DIFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeDiff'>>, 'TIME_FROM_PARTS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeFromParts'>>, 'TIMEFROMPARTS': <function _build_timefromparts>, 'TIME_STR_TO_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeStrToDate'>>, 'TIME_STR_TO_TIME': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeStrToTime'>>, 'TIME_STR_TO_UNIX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeStrToUnix'>>, 'TIME_SUB': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeSub'>>, 'TIME_TO_STR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeToStr'>>, 'TIME_TO_TIME_STR': <function Parser.<lambda>>, 'TIME_TO_UNIX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeToUnix'>>, 'TIME_TRUNC': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeTrunc'>>, 'TIMESTAMP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Timestamp'>>, 'TIMESTAMP_ADD': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimestampAdd'>>, 'TIMESTAMPDIFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimestampDiff'>>, 'TIMESTAMP_DIFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimestampDiff'>>, 'TIMESTAMP_FROM_PARTS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimestampFromParts'>>, 'TIMESTAMPFROMPARTS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimestampFromParts'>>, 'TIMESTAMP_SUB': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimestampSub'>>, 'TIMESTAMP_TRUNC': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimestampTrunc'>>, 'TO_ARRAY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ToArray'>>, 'TO_BASE64': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ToBase64'>>, 'TO_CHAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ToChar'>>, 'TO_DAYS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ToDays'>>, 'TO_DOUBLE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ToDouble'>>, 'TO_MAP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ToMap'>>, 'TO_NUMBER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ToNumber'>>, 'TRANSFORM': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Transform'>>, 'TRIM': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Trim'>>, 'TRY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Try'>>, 'TRY_CAST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TryCast'>>, 'TS_OR_DI_TO_DI': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TsOrDiToDi'>>, 'TS_OR_DS_ADD': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TsOrDsAdd'>>, 'TS_OR_DS_DIFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TsOrDsDiff'>>, 'TS_OR_DS_TO_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TsOrDsToDate'>>, 'TS_OR_DS_TO_DATE_STR': <function Parser.<lambda>>, 'TS_OR_DS_TO_DATETIME': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TsOrDsToDatetime'>>, 'TS_OR_DS_TO_TIME': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TsOrDsToTime'>>, 'TS_OR_DS_TO_TIMESTAMP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TsOrDsToTimestamp'>>, 'UNHEX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Unhex'>>, 'UNIX_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.UnixDate'>>, 'UNIX_SECONDS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.UnixSeconds'>>, 'UNIX_TO_STR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.UnixToStr'>>, 'UNIX_TO_TIME': <bound method Func.from_arg_list of <class 'sqlglot.expressions.UnixToTime'>>, 'UNIX_TO_TIME_STR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.UnixToTimeStr'>>, 'UNNEST': <function Parser.<lambda>>, 'UPPER': <function build_upper>, 'UCASE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Upper'>>, 'UUID': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Uuid'>>, 'GEN_RANDOM_UUID': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Uuid'>>, 'GENERATE_UUID': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Uuid'>>, 'UUID_STRING': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Uuid'>>, 'VAR_MAP': <function build_var_map>, 'VARIANCE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Variance'>>, 'VARIANCE_SAMP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Variance'>>, 'VAR_SAMP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Variance'>>, 'VARIANCE_POP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.VariancePop'>>, 'VAR_POP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.VariancePop'>>, 'WEEK': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Week'>>, 'WEEK_OF_YEAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.WeekOfYear'>>, 'WEEKOFYEAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.WeekOfYear'>>, 'WHEN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.When'>>, 'X_M_L_TABLE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.XMLTable'>>, 'XOR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Xor'>>, 'YEAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Year'>>, 'ARRAYAGG': <function Parser.<lambda>>, 'GLOB': <function Parser.<lambda>>, 'INSTR': <function Parser.<lambda>>, 'JSON_EXTRACT_PATH_TEXT': <function build_extract_json_with_path.<locals>._builder>, 'LIKE': <function build_like>, 'LOG2': <function Parser.<lambda>>, 'LOG10': <function Parser.<lambda>>, 'LPAD': <function Parser.<lambda>>, 'LEFTPAD': <function Parser.<lambda>>, 'LTRIM': <function Parser.<lambda>>, 'MOD': <function build_mod>, 'RIGHTPAD': <function Parser.<lambda>>, 'RPAD': <function Parser.<lambda>>, 'RTRIM': <function Parser.<lambda>>, 'SCOPE_RESOLUTION': <function Parser.<lambda>>, 'TO_HEX': <function build_hex>, 'CHARINDEX': <function TSQL.Parser.<lambda>>, 'COUNT_BIG': <function TSQL.Parser.<lambda>>, 'DATEADD': <function build_date_delta.<locals>._builder>, 'DATENAME': <function _build_formatted_time.<locals>._builder>, 'DATEPART': <function _build_formatted_time.<locals>._builder>, 'DATETIMEFROMPARTS': <function _build_datetimefromparts>, 'EOMONTH': <function _build_eomonth>, 'FORMAT': <function _build_format>, 'GETDATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CurrentTimestamp'>>, 'HASHBYTES': <function _build_hashbytes>, 'ISNULL': <function build_coalesce>, 'JSON_QUERY': <function _build_json_query>, 'JSON_VALUE': <function build_extract_json_with_path.<locals>._builder>, 'PARSENAME': <function _build_parsename>, 'REPLICATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Repeat'>>, 'SQUARE': <function TSQL.Parser.<lambda>>, 'SYSDATETIME': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CurrentTimestamp'>>, 'SUSER_NAME': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CurrentUser'>>, 'SUSER_SNAME': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CurrentUser'>>, 'SYSTEM_USER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CurrentUser'>>}
PROCEDURE_OPTIONS =
{'ENCRYPTION': (), 'RECOMPILE': (), 'SCHEMABINDING': (), 'NATIVE_COMPILATION': (), 'EXECUTE': ()}
RETURNS_TABLE_TOKENS =
{<TokenType.ROWS: 'ROWS'>, <TokenType.FUNCTION: 'FUNCTION'>, <TokenType.RENAME: 'RENAME'>, <TokenType.SOURCE: 'SOURCE'>, <TokenType.REFRESH: 'REFRESH'>, <TokenType.COMMIT: 'COMMIT'>, <TokenType.VAR: 'VAR'>, <TokenType.DETACH: 'DETACH'>, <TokenType.ISNULL: 'ISNULL'>, <TokenType.OPERATOR: 'OPERATOR'>, <TokenType.KEEP: 'KEEP'>, <TokenType.CURRENT_DATETIME: 'CURRENT_DATETIME'>, <TokenType.COLUMN: 'COLUMN'>, <TokenType.COPY: 'COPY'>, <TokenType.FINAL: 'FINAL'>, <TokenType.UNNEST: 'UNNEST'>, <TokenType.WINDOW: 'WINDOW'>, <TokenType.SCHEMA: 'SCHEMA'>, <TokenType.ANY: 'ANY'>, <TokenType.CURRENT_TIMESTAMP: 'CURRENT_TIMESTAMP'>, <TokenType.ALL: 'ALL'>, <TokenType.ORDINALITY: 'ORDINALITY'>, <TokenType.KILL: 'KILL'>, <TokenType.END: 'END'>, <TokenType.PARTITION: 'PARTITION'>, <TokenType.FOREIGN_KEY: 'FOREIGN_KEY'>, <TokenType.MODEL: 'MODEL'>, <TokenType.APPLY: 'APPLY'>, <TokenType.ESCAPE: 'ESCAPE'>, <TokenType.PROCEDURE: 'PROCEDURE'>, <TokenType.USE: 'USE'>, <TokenType.CURRENT_TIME: 'CURRENT_TIME'>, <TokenType.RIGHT: 'RIGHT'>, <TokenType.SHOW: 'SHOW'>, <TokenType.PRAGMA: 'PRAGMA'>, <TokenType.FALSE: 'FALSE'>, <TokenType.SOME: 'SOME'>, <TokenType.ANTI: 'ANTI'>, <TokenType.OFFSET: 'OFFSET'>, <TokenType.CURRENT_DATE: 'CURRENT_DATE'>, <TokenType.COMMAND: 'COMMAND'>, <TokenType.IDENTIFIER: 'IDENTIFIER'>, <TokenType.ATTACH: 'ATTACH'>, <TokenType.SINK: 'SINK'>, <TokenType.TOP: 'TOP'>, <TokenType.VIEW: 'VIEW'>, <TokenType.PIVOT: 'PIVOT'>, <TokenType.WAREHOUSE: 'WAREHOUSE'>, <TokenType.EXISTS: 'EXISTS'>, <TokenType.LOAD: 'LOAD'>, <TokenType.SETTINGS: 'SETTINGS'>, <TokenType.DEFAULT: 'DEFAULT'>, <TokenType.FILTER: 'FILTER'>, <TokenType.MERGE: 'MERGE'>, <TokenType.INDEX: 'INDEX'>, <TokenType.CASE: 'CASE'>, <TokenType.DELETE: 'DELETE'>, <TokenType.OVERLAPS: 'OVERLAPS'>, <TokenType.COMMENT: 'COMMENT'>, <TokenType.SEMI: 'SEMI'>, <TokenType.VOLATILE: 'VOLATILE'>, <TokenType.SEQUENCE: 'SEQUENCE'>, <TokenType.STORAGE_INTEGRATION: 'STORAGE_INTEGRATION'>, <TokenType.DIV: 'DIV'>, <TokenType.UNIQUE: 'UNIQUE'>, <TokenType.FORMAT: 'FORMAT'>, <TokenType.CUBE: 'CUBE'>, <TokenType.DESCRIBE: 'DESCRIBE'>, <TokenType.DATABASE: 'DATABASE'>, <TokenType.TAG: 'TAG'>, <TokenType.ASC: 'ASC'>, <TokenType.REPLACE: 'REPLACE'>, <TokenType.PERCENT: 'PERCENT'>, <TokenType.ROLLUP: 'ROLLUP'>, <TokenType.COLLATE: 'COLLATE'>, <TokenType.LEFT: 'LEFT'>, <TokenType.NATURAL: 'NATURAL'>, <TokenType.TEMPORARY: 'TEMPORARY'>, <TokenType.DESC: 'DESC'>, <TokenType.RECURSIVE: 'RECURSIVE'>, <TokenType.EXECUTE: 'EXECUTE'>, <TokenType.TRUE: 'TRUE'>, <TokenType.CACHE: 'CACHE'>, <TokenType.ROW: 'ROW'>, <TokenType.DICTIONARY: 'DICTIONARY'>, <TokenType.SET: 'SET'>, <TokenType.UNPIVOT: 'UNPIVOT'>, <TokenType.AUTO_INCREMENT: 'AUTO_INCREMENT'>, <TokenType.TRUNCATE: 'TRUNCATE'>, <TokenType.CONSTRAINT: 'CONSTRAINT'>, <TokenType.BEGIN: 'BEGIN'>, <TokenType.OVERWRITE: 'OVERWRITE'>, <TokenType.ASOF: 'ASOF'>, <TokenType.STREAMLIT: 'STREAMLIT'>, <TokenType.CURRENT_USER: 'CURRENT_USER'>, <TokenType.UPDATE: 'UPDATE'>, <TokenType.FIRST: 'FIRST'>, <TokenType.IS: 'IS'>, <TokenType.NEXT: 'NEXT'>, <TokenType.REFERENCES: 'REFERENCES'>, <TokenType.FULL: 'FULL'>}
STATEMENT_PARSERS =
{<TokenType.ALTER: 'ALTER'>: <function Parser.<lambda>>, <TokenType.BEGIN: 'BEGIN'>: <function Parser.<lambda>>, <TokenType.CACHE: 'CACHE'>: <function Parser.<lambda>>, <TokenType.COMMENT: 'COMMENT'>: <function Parser.<lambda>>, <TokenType.COMMIT: 'COMMIT'>: <function Parser.<lambda>>, <TokenType.COPY: 'COPY'>: <function Parser.<lambda>>, <TokenType.CREATE: 'CREATE'>: <function Parser.<lambda>>, <TokenType.DELETE: 'DELETE'>: <function Parser.<lambda>>, <TokenType.DESC: 'DESC'>: <function Parser.<lambda>>, <TokenType.DESCRIBE: 'DESCRIBE'>: <function Parser.<lambda>>, <TokenType.DROP: 'DROP'>: <function Parser.<lambda>>, <TokenType.GRANT: 'GRANT'>: <function Parser.<lambda>>, <TokenType.INSERT: 'INSERT'>: <function Parser.<lambda>>, <TokenType.KILL: 'KILL'>: <function Parser.<lambda>>, <TokenType.LOAD: 'LOAD'>: <function Parser.<lambda>>, <TokenType.MERGE: 'MERGE'>: <function Parser.<lambda>>, <TokenType.PIVOT: 'PIVOT'>: <function Parser.<lambda>>, <TokenType.PRAGMA: 'PRAGMA'>: <function Parser.<lambda>>, <TokenType.REFRESH: 'REFRESH'>: <function Parser.<lambda>>, <TokenType.ROLLBACK: 'ROLLBACK'>: <function Parser.<lambda>>, <TokenType.SET: 'SET'>: <function Parser.<lambda>>, <TokenType.TRUNCATE: 'TRUNCATE'>: <function Parser.<lambda>>, <TokenType.UNCACHE: 'UNCACHE'>: <function Parser.<lambda>>, <TokenType.UPDATE: 'UPDATE'>: <function Parser.<lambda>>, <TokenType.USE: 'USE'>: <function Parser.<lambda>>, <TokenType.SEMICOLON: 'SEMICOLON'>: <function Parser.<lambda>>, <TokenType.DECLARE: 'DECLARE'>: <function TSQL.Parser.<lambda>>}
RANGE_PARSERS =
{<TokenType.AT_GT: 'AT_GT'>: <function binary_range_parser.<locals>._parse_binary_range>, <TokenType.BETWEEN: 'BETWEEN'>: <function Parser.<lambda>>, <TokenType.GLOB: 'GLOB'>: <function binary_range_parser.<locals>._parse_binary_range>, <TokenType.ILIKE: 'ILIKE'>: <function binary_range_parser.<locals>._parse_binary_range>, <TokenType.IN: 'IN'>: <function Parser.<lambda>>, <TokenType.IRLIKE: 'IRLIKE'>: <function binary_range_parser.<locals>._parse_binary_range>, <TokenType.IS: 'IS'>: <function Parser.<lambda>>, <TokenType.LIKE: 'LIKE'>: <function binary_range_parser.<locals>._parse_binary_range>, <TokenType.LT_AT: 'LT_AT'>: <function binary_range_parser.<locals>._parse_binary_range>, <TokenType.OVERLAPS: 'OVERLAPS'>: <function binary_range_parser.<locals>._parse_binary_range>, <TokenType.RLIKE: 'RLIKE'>: <function binary_range_parser.<locals>._parse_binary_range>, <TokenType.SIMILAR_TO: 'SIMILAR_TO'>: <function binary_range_parser.<locals>._parse_binary_range>, <TokenType.FOR: 'FOR'>: <function Parser.<lambda>>, <TokenType.DCOLON: 'DCOLON'>: <function TSQL.Parser.<lambda>>}
COLUMN_OPERATORS =
{<TokenType.DOT: 'DOT'>: None, <TokenType.DCOLON: 'DCOLON'>: <function TSQL.Parser.<lambda>>, <TokenType.ARROW: 'ARROW'>: <function Parser.<lambda>>, <TokenType.DARROW: 'DARROW'>: <function Parser.<lambda>>, <TokenType.HASH_ARROW: 'HASH_ARROW'>: <function Parser.<lambda>>, <TokenType.DHASH_ARROW: 'DHASH_ARROW'>: <function Parser.<lambda>>, <TokenType.PLACEHOLDER: 'PLACEHOLDER'>: <function Parser.<lambda>>}
ID_VAR_TOKENS =
{<TokenType.ROWS: 'ROWS'>, <TokenType.FUNCTION: 'FUNCTION'>, <TokenType.RENAME: 'RENAME'>, <TokenType.TIMESTAMPTZ: 'TIMESTAMPTZ'>, <TokenType.SOURCE: 'SOURCE'>, <TokenType.UDECIMAL: 'UDECIMAL'>, <TokenType.LIST: 'LIST'>, <TokenType.TIME: 'TIME'>, <TokenType.INET: 'INET'>, <TokenType.REFRESH: 'REFRESH'>, <TokenType.COMMIT: 'COMMIT'>, <TokenType.VAR: 'VAR'>, <TokenType.DETACH: 'DETACH'>, <TokenType.ISNULL: 'ISNULL'>, <TokenType.TABLE: 'TABLE'>, <TokenType.OPERATOR: 'OPERATOR'>, <TokenType.UBIGINT: 'UBIGINT'>, <TokenType.STRAIGHT_JOIN: 'STRAIGHT_JOIN'>, <TokenType.DATE: 'DATE'>, <TokenType.BINARY: 'BINARY'>, <TokenType.TSRANGE: 'TSRANGE'>, <TokenType.ENUM: 'ENUM'>, <TokenType.KEEP: 'KEEP'>, <TokenType.CURRENT_DATETIME: 'CURRENT_DATETIME'>, <TokenType.OBJECT_IDENTIFIER: 'OBJECT_IDENTIFIER'>, <TokenType.COLUMN: 'COLUMN'>, <TokenType.YEAR: 'YEAR'>, <TokenType.COPY: 'COPY'>, <TokenType.TIMESTAMPLTZ: 'TIMESTAMPLTZ'>, <TokenType.MAP: 'MAP'>, <TokenType.BIT: 'BIT'>, <TokenType.DATETIME64: 'DATETIME64'>, <TokenType.FINAL: 'FINAL'>, <TokenType.VECTOR: 'VECTOR'>, <TokenType.UNNEST: 'UNNEST'>, <TokenType.BPCHAR: 'BPCHAR'>, <TokenType.WINDOW: 'WINDOW'>, <TokenType.SCHEMA: 'SCHEMA'>, <TokenType.ANY: 'ANY'>, <TokenType.DOUBLE: 'DOUBLE'>, <TokenType.DATE32: 'DATE32'>, <TokenType.ALL: 'ALL'>, <TokenType.ORDINALITY: 'ORDINALITY'>, <TokenType.CURRENT_TIMESTAMP: 'CURRENT_TIMESTAMP'>, <TokenType.KILL: 'KILL'>, <TokenType.BOOLEAN: 'BOOLEAN'>, <TokenType.ENUM16: 'ENUM16'>, <TokenType.END: 'END'>, <TokenType.PARTITION: 'PARTITION'>, <TokenType.FOREIGN_KEY: 'FOREIGN_KEY'>, <TokenType.MODEL: 'MODEL'>, <TokenType.NESTED: 'NESTED'>, <TokenType.APPLY: 'APPLY'>, <TokenType.ESCAPE: 'ESCAPE'>, <TokenType.PROCEDURE: 'PROCEDURE'>, <TokenType.NUMMULTIRANGE: 'NUMMULTIRANGE'>, <TokenType.VARBINARY: 'VARBINARY'>, <TokenType.GEOGRAPHY: 'GEOGRAPHY'>, <TokenType.USE: 'USE'>, <TokenType.CURRENT_TIME: 'CURRENT_TIME'>, <TokenType.RIGHT: 'RIGHT'>, <TokenType.SHOW: 'SHOW'>, <TokenType.BIGDECIMAL: 'BIGDECIMAL'>, <TokenType.LOWCARDINALITY: 'LOWCARDINALITY'>, <TokenType.SMALLINT: 'SMALLINT'>, <TokenType.TSMULTIRANGE: 'TSMULTIRANGE'>, <TokenType.PRAGMA: 'PRAGMA'>, <TokenType.FALSE: 'FALSE'>, <TokenType.DATEMULTIRANGE: 'DATEMULTIRANGE'>, <TokenType.SOME: 'SOME'>, <TokenType.DECIMAL: 'DECIMAL'>, <TokenType.INT4RANGE: 'INT4RANGE'>, <TokenType.ANTI: 'ANTI'>, <TokenType.OFFSET: 'OFFSET'>, <TokenType.USMALLINT: 'USMALLINT'>, <TokenType.MEDIUMBLOB: 'MEDIUMBLOB'>, <TokenType.CURRENT_DATE: 'CURRENT_DATE'>, <TokenType.COMMAND: 'COMMAND'>, <TokenType.GEOMETRY: 'GEOMETRY'>, <TokenType.FIXEDSTRING: 'FIXEDSTRING'>, <TokenType.IDENTIFIER: 'IDENTIFIER'>, <TokenType.IPV4: 'IPV4'>, <TokenType.UINT256: 'UINT256'>, <TokenType.ATTACH: 'ATTACH'>, <TokenType.ROWVERSION: 'ROWVERSION'>, <TokenType.SINK: 'SINK'>, <TokenType.TOP: 'TOP'>, <TokenType.ARRAY: 'ARRAY'>, <TokenType.DECIMAL128: 'DECIMAL128'>, <TokenType.SIMPLEAGGREGATEFUNCTION: 'SIMPLEAGGREGATEFUNCTION'>, <TokenType.HLLSKETCH: 'HLLSKETCH'>, <TokenType.SUPER: 'SUPER'>, <TokenType.UTINYINT: 'UTINYINT'>, <TokenType.SMALLSERIAL: 'SMALLSERIAL'>, <TokenType.VIEW: 'VIEW'>, <TokenType.CHAR: 'CHAR'>, <TokenType.PIVOT: 'PIVOT'>, <TokenType.IPADDRESS: 'IPADDRESS'>, <TokenType.WAREHOUSE: 'WAREHOUSE'>, <TokenType.EXISTS: 'EXISTS'>, <TokenType.LOAD: 'LOAD'>, <TokenType.SETTINGS: 'SETTINGS'>, <TokenType.TEXT: 'TEXT'>, <TokenType.MEDIUMINT: 'MEDIUMINT'>, <TokenType.TINYTEXT: 'TINYTEXT'>, <TokenType.USERDEFINED: 'USERDEFINED'>, <TokenType.MONEY: 'MONEY'>, <TokenType.INT4MULTIRANGE: 'INT4MULTIRANGE'>, <TokenType.DEFAULT: 'DEFAULT'>, <TokenType.FILTER: 'FILTER'>, <TokenType.MERGE: 'MERGE'>, <TokenType.INDEX: 'INDEX'>, <TokenType.TINYBLOB: 'TINYBLOB'>, <TokenType.BIGSERIAL: 'BIGSERIAL'>, <TokenType.CASE: 'CASE'>, <TokenType.DECIMAL32: 'DECIMAL32'>, <TokenType.INT8RANGE: 'INT8RANGE'>, <TokenType.TIMESTAMP_S: 'TIMESTAMP_S'>, <TokenType.DELETE: 'DELETE'>, <TokenType.MULTIPOLYGON: 'MULTIPOLYGON'>, <TokenType.RING: 'RING'>, <TokenType.DECIMAL256: 'DECIMAL256'>, <TokenType.NAME: 'NAME'>, <TokenType.OVERLAPS: 'OVERLAPS'>, <TokenType.COMMENT: 'COMMENT'>, <TokenType.SEMI: 'SEMI'>, <TokenType.VOLATILE: 'VOLATILE'>, <TokenType.SEQUENCE: 'SEQUENCE'>, <TokenType.STORAGE_INTEGRATION: 'STORAGE_INTEGRATION'>, <TokenType.MEDIUMTEXT: 'MEDIUMTEXT'>, <TokenType.DIV: 'DIV'>, <TokenType.POLYGON: 'POLYGON'>, <TokenType.NVARCHAR: 'NVARCHAR'>, <TokenType.NULL: 'NULL'>, <TokenType.UNIQUE: 'UNIQUE'>, <TokenType.IPV6: 'IPV6'>, <TokenType.FORMAT: 'FORMAT'>, <TokenType.CUBE: 'CUBE'>, <TokenType.DESCRIBE: 'DESCRIBE'>, <TokenType.DATERANGE: 'DATERANGE'>, <TokenType.INTERVAL: 'INTERVAL'>, <TokenType.TSTZRANGE: 'TSTZRANGE'>, <TokenType.DATABASE: 'DATABASE'>, <TokenType.UINT128: 'UINT128'>, <TokenType.TIMESTAMPNTZ: 'TIMESTAMPNTZ'>, <TokenType.TSTZMULTIRANGE: 'TSTZMULTIRANGE'>, <TokenType.MULTILINESTRING: 'MULTILINESTRING'>, <TokenType.TAG: 'TAG'>, <TokenType.FLOAT: 'FLOAT'>, <TokenType.ENUM8: 'ENUM8'>, <TokenType.HSTORE: 'HSTORE'>, <TokenType.UINT: 'UINT'>, <TokenType.ASC: 'ASC'>, <TokenType.BIGINT: 'BIGINT'>, <TokenType.TIMESTAMP_NS: 'TIMESTAMP_NS'>, <TokenType.AGGREGATEFUNCTION: 'AGGREGATEFUNCTION'>, <TokenType.REPLACE: 'REPLACE'>, <TokenType.PERCENT: 'PERCENT'>, <TokenType.TIMESTAMP_MS: 'TIMESTAMP_MS'>, <TokenType.COLLATE: 'COLLATE'>, <TokenType.LEFT: 'LEFT'>, <TokenType.NATURAL: 'NATURAL'>, <TokenType.ROLLUP: 'ROLLUP'>, <TokenType.NUMRANGE: 'NUMRANGE'>, <TokenType.TEMPORARY: 'TEMPORARY'>, <TokenType.DESC: 'DESC'>, <TokenType.RECURSIVE: 'RECURSIVE'>, <TokenType.EXECUTE: 'EXECUTE'>, <TokenType.PSEUDO_TYPE: 'PSEUDO_TYPE'>, <TokenType.TRUE: 'TRUE'>, <TokenType.OBJECT: 'OBJECT'>, <TokenType.NCHAR: 'NCHAR'>, <TokenType.TDIGEST: 'TDIGEST'>, <TokenType.UNIQUEIDENTIFIER: 'UNIQUEIDENTIFIER'>, <TokenType.CACHE: 'CACHE'>, <TokenType.SMALLMONEY: 'SMALLMONEY'>, <TokenType.ROW: 'ROW'>, <TokenType.TINYINT: 'TINYINT'>, <TokenType.LONGTEXT: 'LONGTEXT'>, <TokenType.DICTIONARY: 'DICTIONARY'>, <TokenType.SET: 'SET'>, <TokenType.UNPIVOT: 'UNPIVOT'>, <TokenType.INT: 'INT'>, <TokenType.LONGBLOB: 'LONGBLOB'>, <TokenType.IPPREFIX: 'IPPREFIX'>, <TokenType.UNKNOWN: 'UNKNOWN'>, <TokenType.AUTO_INCREMENT: 'AUTO_INCREMENT'>, <TokenType.VARCHAR: 'VARCHAR'>, <TokenType.TRUNCATE: 'TRUNCATE'>, <TokenType.POINT: 'POINT'>, <TokenType.CONSTRAINT: 'CONSTRAINT'>, <TokenType.VARIANT: 'VARIANT'>, <TokenType.BEGIN: 'BEGIN'>, <TokenType.JSON: 'JSON'>, <TokenType.OVERWRITE: 'OVERWRITE'>, <TokenType.UUID: 'UUID'>, <TokenType.STRUCT: 'STRUCT'>, <TokenType.RANGE: 'RANGE'>, <TokenType.ASOF: 'ASOF'>, <TokenType.INT8MULTIRANGE: 'INT8MULTIRANGE'>, <TokenType.XML: 'XML'>, <TokenType.INT256: 'INT256'>, <TokenType.DECIMAL64: 'DECIMAL64'>, <TokenType.STREAMLIT: 'STREAMLIT'>, <TokenType.IMAGE: 'IMAGE'>, <TokenType.CURRENT_USER: 'CURRENT_USER'>, <TokenType.UPDATE: 'UPDATE'>, <TokenType.NULLABLE: 'NULLABLE'>, <TokenType.FIRST: 'FIRST'>, <TokenType.INT128: 'INT128'>, <TokenType.TIMETZ: 'TIMETZ'>, <TokenType.TIMESTAMP: 'TIMESTAMP'>, <TokenType.UMEDIUMINT: 'UMEDIUMINT'>, <TokenType.IS: 'IS'>, <TokenType.JSONB: 'JSONB'>, <TokenType.SERIAL: 'SERIAL'>, <TokenType.LINESTRING: 'LINESTRING'>, <TokenType.NEXT: 'NEXT'>, <TokenType.REFERENCES: 'REFERENCES'>, <TokenType.DATETIME: 'DATETIME'>, <TokenType.FULL: 'FULL'>}
TABLE_ALIAS_TOKENS =
{<TokenType.ROWS: 'ROWS'>, <TokenType.FUNCTION: 'FUNCTION'>, <TokenType.RENAME: 'RENAME'>, <TokenType.TIMESTAMPTZ: 'TIMESTAMPTZ'>, <TokenType.SOURCE: 'SOURCE'>, <TokenType.UDECIMAL: 'UDECIMAL'>, <TokenType.LIST: 'LIST'>, <TokenType.TIME: 'TIME'>, <TokenType.INET: 'INET'>, <TokenType.REFRESH: 'REFRESH'>, <TokenType.COMMIT: 'COMMIT'>, <TokenType.VAR: 'VAR'>, <TokenType.DETACH: 'DETACH'>, <TokenType.ISNULL: 'ISNULL'>, <TokenType.TABLE: 'TABLE'>, <TokenType.OPERATOR: 'OPERATOR'>, <TokenType.UBIGINT: 'UBIGINT'>, <TokenType.STRAIGHT_JOIN: 'STRAIGHT_JOIN'>, <TokenType.DATE: 'DATE'>, <TokenType.BINARY: 'BINARY'>, <TokenType.TSRANGE: 'TSRANGE'>, <TokenType.ENUM: 'ENUM'>, <TokenType.KEEP: 'KEEP'>, <TokenType.CURRENT_DATETIME: 'CURRENT_DATETIME'>, <TokenType.OBJECT_IDENTIFIER: 'OBJECT_IDENTIFIER'>, <TokenType.COLUMN: 'COLUMN'>, <TokenType.YEAR: 'YEAR'>, <TokenType.COPY: 'COPY'>, <TokenType.TIMESTAMPLTZ: 'TIMESTAMPLTZ'>, <TokenType.MAP: 'MAP'>, <TokenType.BIT: 'BIT'>, <TokenType.DATETIME64: 'DATETIME64'>, <TokenType.FINAL: 'FINAL'>, <TokenType.VECTOR: 'VECTOR'>, <TokenType.UNNEST: 'UNNEST'>, <TokenType.BPCHAR: 'BPCHAR'>, <TokenType.SCHEMA: 'SCHEMA'>, <TokenType.ANY: 'ANY'>, <TokenType.DOUBLE: 'DOUBLE'>, <TokenType.DATE32: 'DATE32'>, <TokenType.CURRENT_TIMESTAMP: 'CURRENT_TIMESTAMP'>, <TokenType.ALL: 'ALL'>, <TokenType.ORDINALITY: 'ORDINALITY'>, <TokenType.KILL: 'KILL'>, <TokenType.BOOLEAN: 'BOOLEAN'>, <TokenType.ENUM16: 'ENUM16'>, <TokenType.END: 'END'>, <TokenType.PARTITION: 'PARTITION'>, <TokenType.FOREIGN_KEY: 'FOREIGN_KEY'>, <TokenType.MODEL: 'MODEL'>, <TokenType.NESTED: 'NESTED'>, <TokenType.ESCAPE: 'ESCAPE'>, <TokenType.PROCEDURE: 'PROCEDURE'>, <TokenType.NUMMULTIRANGE: 'NUMMULTIRANGE'>, <TokenType.VARBINARY: 'VARBINARY'>, <TokenType.GEOGRAPHY: 'GEOGRAPHY'>, <TokenType.USE: 'USE'>, <TokenType.CURRENT_TIME: 'CURRENT_TIME'>, <TokenType.SHOW: 'SHOW'>, <TokenType.BIGDECIMAL: 'BIGDECIMAL'>, <TokenType.LOWCARDINALITY: 'LOWCARDINALITY'>, <TokenType.SMALLINT: 'SMALLINT'>, <TokenType.TSMULTIRANGE: 'TSMULTIRANGE'>, <TokenType.PRAGMA: 'PRAGMA'>, <TokenType.FALSE: 'FALSE'>, <TokenType.DATEMULTIRANGE: 'DATEMULTIRANGE'>, <TokenType.SOME: 'SOME'>, <TokenType.DECIMAL: 'DECIMAL'>, <TokenType.INT4RANGE: 'INT4RANGE'>, <TokenType.USMALLINT: 'USMALLINT'>, <TokenType.MEDIUMBLOB: 'MEDIUMBLOB'>, <TokenType.ANTI: 'ANTI'>, <TokenType.CURRENT_DATE: 'CURRENT_DATE'>, <TokenType.COMMAND: 'COMMAND'>, <TokenType.GEOMETRY: 'GEOMETRY'>, <TokenType.FIXEDSTRING: 'FIXEDSTRING'>, <TokenType.IDENTIFIER: 'IDENTIFIER'>, <TokenType.IPV4: 'IPV4'>, <TokenType.UINT256: 'UINT256'>, <TokenType.ATTACH: 'ATTACH'>, <TokenType.ROWVERSION: 'ROWVERSION'>, <TokenType.SINK: 'SINK'>, <TokenType.TOP: 'TOP'>, <TokenType.ARRAY: 'ARRAY'>, <TokenType.DECIMAL128: 'DECIMAL128'>, <TokenType.SIMPLEAGGREGATEFUNCTION: 'SIMPLEAGGREGATEFUNCTION'>, <TokenType.HLLSKETCH: 'HLLSKETCH'>, <TokenType.SUPER: 'SUPER'>, <TokenType.UTINYINT: 'UTINYINT'>, <TokenType.SMALLSERIAL: 'SMALLSERIAL'>, <TokenType.VIEW: 'VIEW'>, <TokenType.CHAR: 'CHAR'>, <TokenType.PIVOT: 'PIVOT'>, <TokenType.IPADDRESS: 'IPADDRESS'>, <TokenType.WAREHOUSE: 'WAREHOUSE'>, <TokenType.EXISTS: 'EXISTS'>, <TokenType.LOAD: 'LOAD'>, <TokenType.SETTINGS: 'SETTINGS'>, <TokenType.TEXT: 'TEXT'>, <TokenType.MEDIUMINT: 'MEDIUMINT'>, <TokenType.TINYTEXT: 'TINYTEXT'>, <TokenType.USERDEFINED: 'USERDEFINED'>, <TokenType.MONEY: 'MONEY'>, <TokenType.INT4MULTIRANGE: 'INT4MULTIRANGE'>, <TokenType.DEFAULT: 'DEFAULT'>, <TokenType.FILTER: 'FILTER'>, <TokenType.MERGE: 'MERGE'>, <TokenType.INDEX: 'INDEX'>, <TokenType.TINYBLOB: 'TINYBLOB'>, <TokenType.BIGSERIAL: 'BIGSERIAL'>, <TokenType.CASE: 'CASE'>, <TokenType.DECIMAL32: 'DECIMAL32'>, <TokenType.INT8RANGE: 'INT8RANGE'>, <TokenType.TIMESTAMP_S: 'TIMESTAMP_S'>, <TokenType.DELETE: 'DELETE'>, <TokenType.MULTIPOLYGON: 'MULTIPOLYGON'>, <TokenType.RING: 'RING'>, <TokenType.DECIMAL256: 'DECIMAL256'>, <TokenType.NAME: 'NAME'>, <TokenType.OVERLAPS: 'OVERLAPS'>, <TokenType.COMMENT: 'COMMENT'>, <TokenType.SEMI: 'SEMI'>, <TokenType.VOLATILE: 'VOLATILE'>, <TokenType.SEQUENCE: 'SEQUENCE'>, <TokenType.STORAGE_INTEGRATION: 'STORAGE_INTEGRATION'>, <TokenType.MEDIUMTEXT: 'MEDIUMTEXT'>, <TokenType.DIV: 'DIV'>, <TokenType.POLYGON: 'POLYGON'>, <TokenType.NVARCHAR: 'NVARCHAR'>, <TokenType.NULL: 'NULL'>, <TokenType.UNIQUE: 'UNIQUE'>, <TokenType.IPV6: 'IPV6'>, <TokenType.FORMAT: 'FORMAT'>, <TokenType.CUBE: 'CUBE'>, <TokenType.DESCRIBE: 'DESCRIBE'>, <TokenType.DATERANGE: 'DATERANGE'>, <TokenType.INTERVAL: 'INTERVAL'>, <TokenType.TSTZRANGE: 'TSTZRANGE'>, <TokenType.DATABASE: 'DATABASE'>, <TokenType.UINT128: 'UINT128'>, <TokenType.TIMESTAMPNTZ: 'TIMESTAMPNTZ'>, <TokenType.TSTZMULTIRANGE: 'TSTZMULTIRANGE'>, <TokenType.MULTILINESTRING: 'MULTILINESTRING'>, <TokenType.TAG: 'TAG'>, <TokenType.FLOAT: 'FLOAT'>, <TokenType.ENUM8: 'ENUM8'>, <TokenType.HSTORE: 'HSTORE'>, <TokenType.UINT: 'UINT'>, <TokenType.ASC: 'ASC'>, <TokenType.BIGINT: 'BIGINT'>, <TokenType.TIMESTAMP_NS: 'TIMESTAMP_NS'>, <TokenType.AGGREGATEFUNCTION: 'AGGREGATEFUNCTION'>, <TokenType.REPLACE: 'REPLACE'>, <TokenType.PERCENT: 'PERCENT'>, <TokenType.TIMESTAMP_MS: 'TIMESTAMP_MS'>, <TokenType.COLLATE: 'COLLATE'>, <TokenType.ROLLUP: 'ROLLUP'>, <TokenType.NUMRANGE: 'NUMRANGE'>, <TokenType.TEMPORARY: 'TEMPORARY'>, <TokenType.DESC: 'DESC'>, <TokenType.RECURSIVE: 'RECURSIVE'>, <TokenType.EXECUTE: 'EXECUTE'>, <TokenType.PSEUDO_TYPE: 'PSEUDO_TYPE'>, <TokenType.TRUE: 'TRUE'>, <TokenType.OBJECT: 'OBJECT'>, <TokenType.NCHAR: 'NCHAR'>, <TokenType.TDIGEST: 'TDIGEST'>, <TokenType.UNIQUEIDENTIFIER: 'UNIQUEIDENTIFIER'>, <TokenType.CACHE: 'CACHE'>, <TokenType.SMALLMONEY: 'SMALLMONEY'>, <TokenType.ROW: 'ROW'>, <TokenType.TINYINT: 'TINYINT'>, <TokenType.LONGTEXT: 'LONGTEXT'>, <TokenType.DICTIONARY: 'DICTIONARY'>, <TokenType.SET: 'SET'>, <TokenType.UNPIVOT: 'UNPIVOT'>, <TokenType.INT: 'INT'>, <TokenType.LONGBLOB: 'LONGBLOB'>, <TokenType.IPPREFIX: 'IPPREFIX'>, <TokenType.UNKNOWN: 'UNKNOWN'>, <TokenType.AUTO_INCREMENT: 'AUTO_INCREMENT'>, <TokenType.VARCHAR: 'VARCHAR'>, <TokenType.TRUNCATE: 'TRUNCATE'>, <TokenType.POINT: 'POINT'>, <TokenType.CONSTRAINT: 'CONSTRAINT'>, <TokenType.VARIANT: 'VARIANT'>, <TokenType.BEGIN: 'BEGIN'>, <TokenType.JSON: 'JSON'>, <TokenType.OVERWRITE: 'OVERWRITE'>, <TokenType.UUID: 'UUID'>, <TokenType.STRUCT: 'STRUCT'>, <TokenType.RANGE: 'RANGE'>, <TokenType.INT8MULTIRANGE: 'INT8MULTIRANGE'>, <TokenType.XML: 'XML'>, <TokenType.INT256: 'INT256'>, <TokenType.DECIMAL64: 'DECIMAL64'>, <TokenType.STREAMLIT: 'STREAMLIT'>, <TokenType.IMAGE: 'IMAGE'>, <TokenType.CURRENT_USER: 'CURRENT_USER'>, <TokenType.UPDATE: 'UPDATE'>, <TokenType.NULLABLE: 'NULLABLE'>, <TokenType.FIRST: 'FIRST'>, <TokenType.INT128: 'INT128'>, <TokenType.TIMETZ: 'TIMETZ'>, <TokenType.TIMESTAMP: 'TIMESTAMP'>, <TokenType.UMEDIUMINT: 'UMEDIUMINT'>, <TokenType.IS: 'IS'>, <TokenType.JSONB: 'JSONB'>, <TokenType.SERIAL: 'SERIAL'>, <TokenType.LINESTRING: 'LINESTRING'>, <TokenType.NEXT: 'NEXT'>, <TokenType.REFERENCES: 'REFERENCES'>, <TokenType.DATETIME: 'DATETIME'>}
SET_TRIE: Dict =
{'GLOBAL': {0: True}, 'LOCAL': {0: True}, 'SESSION': {0: True}, 'TRANSACTION': {0: True}}
Inherited Members
- sqlglot.parser.Parser
- Parser
- NO_PAREN_FUNCTIONS
- STRUCT_TYPE_TOKENS
- NESTED_TYPE_TOKENS
- ENUM_TYPE_TOKENS
- AGGREGATE_TYPE_TOKENS
- TYPE_TOKENS
- SIGNED_TO_UNSIGNED_TYPE_TOKEN
- SUBQUERY_PREDICATES
- RESERVED_TOKENS
- DB_CREATABLES
- CREATABLES
- ALTERABLES
- INTERVAL_VARS
- ALIAS_TOKENS
- ARRAY_CONSTRUCTORS
- COMMENT_TABLE_ALIAS_TOKENS
- UPDATE_ALIAS_TOKENS
- TRIM_TYPES
- FUNC_TOKENS
- CONJUNCTION
- ASSIGNMENT
- DISJUNCTION
- EQUALITY
- COMPARISON
- BITWISE
- TERM
- FACTOR
- EXPONENT
- TIMES
- TIMESTAMPS
- SET_OPERATIONS
- JOIN_METHODS
- JOIN_SIDES
- JOIN_KINDS
- LAMBDAS
- EXPRESSION_PARSERS
- UNARY_PARSERS
- STRING_PARSERS
- NUMERIC_PARSERS
- PRIMARY_PARSERS
- PLACEHOLDER_PARSERS
- PROPERTY_PARSERS
- CONSTRAINT_PARSERS
- ALTER_PARSERS
- ALTER_ALTER_PARSERS
- SCHEMA_UNNAMED_CONSTRAINTS
- NO_PAREN_FUNCTION_PARSERS
- INVALID_FUNC_NAME_TOKENS
- FUNCTIONS_WITH_ALIASED_ARGS
- KEY_VALUE_DEFINITIONS
- FUNCTION_PARSERS
- SET_PARSERS
- SHOW_PARSERS
- TYPE_LITERAL_PARSERS
- TYPE_CONVERTERS
- DDL_SELECT_TOKENS
- PRE_VOLATILE_TOKENS
- TRANSACTION_KIND
- TRANSACTION_CHARACTERISTICS
- CONFLICT_ACTIONS
- CREATE_SEQUENCE
- ISOLATED_LOADING_OPTIONS
- USABLES
- CAST_ACTIONS
- SCHEMA_BINDING_OPTIONS
- EXECUTE_AS_OPTIONS
- KEY_CONSTRAINT_OPTIONS
- INSERT_ALTERNATIVES
- CLONE_KEYWORDS
- HISTORICAL_DATA_PREFIX
- HISTORICAL_DATA_KIND
- OPCLASS_FOLLOW_KEYWORDS
- OPTYPE_FOLLOW_TOKENS
- TABLE_INDEX_HINT_TOKENS
- VIEW_ATTRIBUTES
- WINDOW_ALIAS_TOKENS
- WINDOW_BEFORE_PAREN_TOKENS
- WINDOW_SIDES
- JSON_KEY_VALUE_SEPARATOR_TOKENS
- FETCH_TOKENS
- ADD_CONSTRAINT_TOKENS
- DISTINCT_TOKENS
- NULL_TOKENS
- UNNEST_OFFSET_ALIAS_TOKENS
- SELECT_START_TOKENS
- COPY_INTO_VARLEN_OPTIONS
- IS_JSON_PREDICATE_KIND
- ODBC_DATETIME_LITERALS
- ON_CONDITION_TOKENS
- PRIVILEGE_FOLLOW_TOKENS
- DESCRIBE_STYLES
- OPERATION_MODIFIERS
- STRICT_CAST
- PREFIXED_PIVOT_COLUMNS
- IDENTIFY_PIVOT_STRINGS
- TABLESAMPLE_CSV
- DEFAULT_SAMPLING_METHOD
- TRIM_PATTERN_FIRST
- MODIFIERS_ATTACHED_TO_SET_OP
- SET_OP_MODIFIERS
- JSON_ARROWS_REQUIRE_JSON_TYPE
- COLON_IS_VARIANT_EXTRACT
- VALUES_FOLLOWED_BY_PAREN
- SUPPORTS_IMPLICIT_UNNEST
- INTERVAL_SPANS
- SUPPORTS_PARTITION_SELECTION
- WRAPPED_TRANSFORM_COLUMN_CONSTRAINT
- error_level
- error_message_context
- max_errors
- dialect
- reset
- parse
- parse_into
- check_errors
- raise_error
- expression
- validate_expression
- errors
- sql
829 class Generator(generator.Generator): 830 LIMIT_IS_TOP = True 831 QUERY_HINTS = False 832 RETURNING_END = False 833 NVL2_SUPPORTED = False 834 ALTER_TABLE_INCLUDE_COLUMN_KEYWORD = False 835 LIMIT_FETCH = "FETCH" 836 COMPUTED_COLUMN_WITH_TYPE = False 837 CTE_RECURSIVE_KEYWORD_REQUIRED = False 838 ENSURE_BOOLS = True 839 NULL_ORDERING_SUPPORTED = None 840 SUPPORTS_SINGLE_ARG_CONCAT = False 841 TABLESAMPLE_SEED_KEYWORD = "REPEATABLE" 842 SUPPORTS_SELECT_INTO = True 843 JSON_PATH_BRACKETED_KEY_SUPPORTED = False 844 SUPPORTS_TO_NUMBER = False 845 SET_OP_MODIFIERS = False 846 COPY_PARAMS_EQ_REQUIRED = True 847 PARSE_JSON_NAME = None 848 EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE = False 849 850 EXPRESSIONS_WITHOUT_NESTED_CTES = { 851 exp.Create, 852 exp.Delete, 853 exp.Insert, 854 exp.Intersect, 855 exp.Except, 856 exp.Merge, 857 exp.Select, 858 exp.Subquery, 859 exp.Union, 860 exp.Update, 861 } 862 863 SUPPORTED_JSON_PATH_PARTS = { 864 exp.JSONPathKey, 865 exp.JSONPathRoot, 866 exp.JSONPathSubscript, 867 } 868 869 TYPE_MAPPING = { 870 **generator.Generator.TYPE_MAPPING, 871 exp.DataType.Type.BOOLEAN: "BIT", 872 exp.DataType.Type.DECIMAL: "NUMERIC", 873 exp.DataType.Type.DATETIME: "DATETIME2", 874 exp.DataType.Type.DOUBLE: "FLOAT", 875 exp.DataType.Type.INT: "INTEGER", 876 exp.DataType.Type.ROWVERSION: "ROWVERSION", 877 exp.DataType.Type.TEXT: "VARCHAR(MAX)", 878 exp.DataType.Type.TIMESTAMP: "DATETIME2", 879 exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET", 880 exp.DataType.Type.UTINYINT: "TINYINT", 881 exp.DataType.Type.VARIANT: "SQL_VARIANT", 882 } 883 884 TYPE_MAPPING.pop(exp.DataType.Type.NCHAR) 885 TYPE_MAPPING.pop(exp.DataType.Type.NVARCHAR) 886 887 TRANSFORMS = { 888 **generator.Generator.TRANSFORMS, 889 exp.AnyValue: any_value_to_max_sql, 890 exp.ArrayToString: rename_func("STRING_AGG"), 891 exp.AutoIncrementColumnConstraint: lambda *_: "IDENTITY", 892 exp.DateAdd: date_delta_sql("DATEADD"), 893 exp.DateDiff: date_delta_sql("DATEDIFF"), 894 exp.CTE: transforms.preprocess([qualify_derived_table_outputs]), 895 exp.CurrentDate: rename_func("GETDATE"), 896 exp.CurrentTimestamp: rename_func("GETDATE"), 897 exp.DateStrToDate: datestrtodate_sql, 898 exp.Extract: rename_func("DATEPART"), 899 exp.GeneratedAsIdentityColumnConstraint: generatedasidentitycolumnconstraint_sql, 900 exp.GroupConcat: _string_agg_sql, 901 exp.If: rename_func("IIF"), 902 exp.JSONExtract: _json_extract_sql, 903 exp.JSONExtractScalar: _json_extract_sql, 904 exp.LastDay: lambda self, e: self.func("EOMONTH", e.this), 905 exp.Ln: rename_func("LOG"), 906 exp.Max: max_or_greatest, 907 exp.MD5: lambda self, e: self.func("HASHBYTES", exp.Literal.string("MD5"), e.this), 908 exp.Min: min_or_least, 909 exp.NumberToStr: _format_sql, 910 exp.Repeat: rename_func("REPLICATE"), 911 exp.Select: transforms.preprocess( 912 [ 913 transforms.eliminate_distinct_on, 914 transforms.eliminate_semi_and_anti_joins, 915 transforms.eliminate_qualify, 916 transforms.unnest_generate_date_array_using_recursive_cte, 917 ] 918 ), 919 exp.Stddev: rename_func("STDEV"), 920 exp.StrPosition: lambda self, e: self.func( 921 "CHARINDEX", e.args.get("substr"), e.this, e.args.get("position") 922 ), 923 exp.Subquery: transforms.preprocess([qualify_derived_table_outputs]), 924 exp.SHA: lambda self, e: self.func("HASHBYTES", exp.Literal.string("SHA1"), e.this), 925 exp.SHA2: lambda self, e: self.func( 926 "HASHBYTES", exp.Literal.string(f"SHA2_{e.args.get('length', 256)}"), e.this 927 ), 928 exp.TemporaryProperty: lambda self, e: "", 929 exp.TimeStrToTime: _timestrtotime_sql, 930 exp.TimeToStr: _format_sql, 931 exp.Trim: trim_sql, 932 exp.TsOrDsAdd: date_delta_sql("DATEADD", cast=True), 933 exp.TsOrDsDiff: date_delta_sql("DATEDIFF"), 934 } 935 936 TRANSFORMS.pop(exp.ReturnsProperty) 937 938 PROPERTIES_LOCATION = { 939 **generator.Generator.PROPERTIES_LOCATION, 940 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 941 } 942 943 def scope_resolution(self, rhs: str, scope_name: str) -> str: 944 return f"{scope_name}::{rhs}" 945 946 def select_sql(self, expression: exp.Select) -> str: 947 if expression.args.get("offset"): 948 if not expression.args.get("order"): 949 # ORDER BY is required in order to use OFFSET in a query, so we use 950 # a noop order by, since we don't really care about the order. 951 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 952 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 953 954 limit = expression.args.get("limit") 955 if isinstance(limit, exp.Limit): 956 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 957 # we replace here because otherwise TOP would be generated in select_sql 958 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 959 960 return super().select_sql(expression) 961 962 def convert_sql(self, expression: exp.Convert) -> str: 963 name = "TRY_CONVERT" if expression.args.get("safe") else "CONVERT" 964 return self.func( 965 name, expression.this, expression.expression, expression.args.get("style") 966 ) 967 968 def queryoption_sql(self, expression: exp.QueryOption) -> str: 969 option = self.sql(expression, "this") 970 value = self.sql(expression, "expression") 971 if value: 972 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 973 return f"{option} {optional_equal_sign}{value}" 974 return option 975 976 def lateral_op(self, expression: exp.Lateral) -> str: 977 cross_apply = expression.args.get("cross_apply") 978 if cross_apply is True: 979 return "CROSS APPLY" 980 if cross_apply is False: 981 return "OUTER APPLY" 982 983 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 984 self.unsupported("LATERAL clause is not supported.") 985 return "LATERAL" 986 987 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 988 this = expression.this 989 split_count = len(this.name.split(".")) 990 delimiter = expression.args.get("delimiter") 991 part_index = expression.args.get("part_index") 992 993 if ( 994 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 995 or (delimiter and delimiter.name != ".") 996 or not part_index 997 or split_count > 4 998 ): 999 self.unsupported( 1000 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 1001 ) 1002 return "" 1003 1004 return self.func( 1005 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 1006 ) 1007 1008 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1009 nano = expression.args.get("nano") 1010 if nano is not None: 1011 nano.pop() 1012 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1013 1014 if expression.args.get("fractions") is None: 1015 expression.set("fractions", exp.Literal.number(0)) 1016 if expression.args.get("precision") is None: 1017 expression.set("precision", exp.Literal.number(0)) 1018 1019 return rename_func("TIMEFROMPARTS")(self, expression) 1020 1021 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1022 zone = expression.args.get("zone") 1023 if zone is not None: 1024 zone.pop() 1025 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1026 1027 nano = expression.args.get("nano") 1028 if nano is not None: 1029 nano.pop() 1030 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1031 1032 if expression.args.get("milli") is None: 1033 expression.set("milli", exp.Literal.number(0)) 1034 1035 return rename_func("DATETIMEFROMPARTS")(self, expression) 1036 1037 def setitem_sql(self, expression: exp.SetItem) -> str: 1038 this = expression.this 1039 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1040 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1041 return f"{self.sql(this.left)} {self.sql(this.right)}" 1042 1043 return super().setitem_sql(expression) 1044 1045 def boolean_sql(self, expression: exp.Boolean) -> str: 1046 if type(expression.parent) in BIT_TYPES or isinstance( 1047 expression.find_ancestor(exp.Values, exp.Select), exp.Values 1048 ): 1049 return "1" if expression.this else "0" 1050 1051 return "(1 = 1)" if expression.this else "(1 = 0)" 1052 1053 def is_sql(self, expression: exp.Is) -> str: 1054 if isinstance(expression.expression, exp.Boolean): 1055 return self.binary(expression, "=") 1056 return self.binary(expression, "IS") 1057 1058 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1059 sql = self.sql(expression, "this") 1060 properties = expression.args.get("properties") 1061 1062 if sql[:1] != "#" and any( 1063 isinstance(prop, exp.TemporaryProperty) 1064 for prop in (properties.expressions if properties else []) 1065 ): 1066 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1067 1068 return sql 1069 1070 def create_sql(self, expression: exp.Create) -> str: 1071 kind = expression.kind 1072 exists = expression.args.pop("exists", None) 1073 1074 like_property = expression.find(exp.LikeProperty) 1075 if like_property: 1076 ctas_expression = like_property.this 1077 else: 1078 ctas_expression = expression.expression 1079 1080 if kind == "VIEW": 1081 expression.this.set("catalog", None) 1082 with_ = expression.args.get("with") 1083 if ctas_expression and with_: 1084 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1085 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1086 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1087 ctas_expression.set("with", with_.pop()) 1088 1089 sql = super().create_sql(expression) 1090 1091 table = expression.find(exp.Table) 1092 1093 # Convert CTAS statement to SELECT .. INTO .. 1094 if kind == "TABLE" and ctas_expression: 1095 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1096 ctas_expression = ctas_expression.subquery() 1097 1098 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1099 select_into.set("into", exp.Into(this=table)) 1100 1101 if like_property: 1102 select_into.limit(0, copy=False) 1103 1104 sql = self.sql(select_into) 1105 1106 if exists: 1107 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1108 sql_with_ctes = self.prepend_ctes(expression, sql) 1109 sql_literal = self.sql(exp.Literal.string(sql_with_ctes)) 1110 if kind == "SCHEMA": 1111 return f"""IF NOT EXISTS (SELECT * FROM information_schema.schemata WHERE schema_name = {identifier}) EXEC({sql_literal})""" 1112 elif kind == "TABLE": 1113 assert table 1114 where = exp.and_( 1115 exp.column("table_name").eq(table.name), 1116 exp.column("table_schema").eq(table.db) if table.db else None, 1117 exp.column("table_catalog").eq(table.catalog) if table.catalog else None, 1118 ) 1119 return f"""IF NOT EXISTS (SELECT * FROM information_schema.tables WHERE {where}) EXEC({sql_literal})""" 1120 elif kind == "INDEX": 1121 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1122 return f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql_literal})""" 1123 elif expression.args.get("replace"): 1124 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1125 1126 return self.prepend_ctes(expression, sql) 1127 1128 def count_sql(self, expression: exp.Count) -> str: 1129 func_name = "COUNT_BIG" if expression.args.get("big_int") else "COUNT" 1130 return rename_func(func_name)(self, expression) 1131 1132 def offset_sql(self, expression: exp.Offset) -> str: 1133 return f"{super().offset_sql(expression)} ROWS" 1134 1135 def version_sql(self, expression: exp.Version) -> str: 1136 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1137 this = f"FOR {name}" 1138 expr = expression.expression 1139 kind = expression.text("kind") 1140 if kind in ("FROM", "BETWEEN"): 1141 args = expr.expressions 1142 sep = "TO" if kind == "FROM" else "AND" 1143 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1144 else: 1145 expr_sql = self.sql(expr) 1146 1147 expr_sql = f" {expr_sql}" if expr_sql else "" 1148 return f"{this} {kind}{expr_sql}" 1149 1150 def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str: 1151 table = expression.args.get("table") 1152 table = f"{table} " if table else "" 1153 return f"RETURNS {table}{self.sql(expression, 'this')}" 1154 1155 def returning_sql(self, expression: exp.Returning) -> str: 1156 into = self.sql(expression, "into") 1157 into = self.seg(f"INTO {into}") if into else "" 1158 return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}" 1159 1160 def transaction_sql(self, expression: exp.Transaction) -> str: 1161 this = self.sql(expression, "this") 1162 this = f" {this}" if this else "" 1163 mark = self.sql(expression, "mark") 1164 mark = f" WITH MARK {mark}" if mark else "" 1165 return f"BEGIN TRANSACTION{this}{mark}" 1166 1167 def commit_sql(self, expression: exp.Commit) -> str: 1168 this = self.sql(expression, "this") 1169 this = f" {this}" if this else "" 1170 durability = expression.args.get("durability") 1171 durability = ( 1172 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1173 if durability is not None 1174 else "" 1175 ) 1176 return f"COMMIT TRANSACTION{this}{durability}" 1177 1178 def rollback_sql(self, expression: exp.Rollback) -> str: 1179 this = self.sql(expression, "this") 1180 this = f" {this}" if this else "" 1181 return f"ROLLBACK TRANSACTION{this}" 1182 1183 def identifier_sql(self, expression: exp.Identifier) -> str: 1184 identifier = super().identifier_sql(expression) 1185 1186 if expression.args.get("global"): 1187 identifier = f"##{identifier}" 1188 elif expression.args.get("temporary"): 1189 identifier = f"#{identifier}" 1190 1191 return identifier 1192 1193 def constraint_sql(self, expression: exp.Constraint) -> str: 1194 this = self.sql(expression, "this") 1195 expressions = self.expressions(expression, flat=True, sep=" ") 1196 return f"CONSTRAINT {this} {expressions}" 1197 1198 def length_sql(self, expression: exp.Length) -> str: 1199 return self._uncast_text(expression, "LEN") 1200 1201 def right_sql(self, expression: exp.Right) -> str: 1202 return self._uncast_text(expression, "RIGHT") 1203 1204 def left_sql(self, expression: exp.Left) -> str: 1205 return self._uncast_text(expression, "LEFT") 1206 1207 def _uncast_text(self, expression: exp.Expression, name: str) -> str: 1208 this = expression.this 1209 if isinstance(this, exp.Cast) and this.is_type(exp.DataType.Type.TEXT): 1210 this_sql = self.sql(this, "this") 1211 else: 1212 this_sql = self.sql(this) 1213 expression_sql = self.sql(expression, "expression") 1214 return self.func(name, this_sql, expression_sql if expression_sql else None) 1215 1216 def partition_sql(self, expression: exp.Partition) -> str: 1217 return f"WITH (PARTITIONS({self.expressions(expression, flat=True)}))" 1218 1219 def alter_sql(self, expression: exp.Alter) -> str: 1220 action = seq_get(expression.args.get("actions") or [], 0) 1221 if isinstance(action, exp.AlterRename): 1222 return f"EXEC sp_rename '{self.sql(expression.this)}', '{action.this.name}'" 1223 return super().alter_sql(expression) 1224 1225 def drop_sql(self, expression: exp.Drop) -> str: 1226 if expression.args["kind"]