Edit on GitHub

sqlglot.generators.snowflake

   1from __future__ import annotations
   2
   3import typing as t
   4
   5from sqlglot import exp, generator, transforms
   6from sqlglot.dialects.dialect import (
   7    array_append_sql,
   8    array_concat_sql,
   9    date_delta_sql,
  10    datestrtodate_sql,
  11    groupconcat_sql,
  12    if_sql,
  13    inline_array_sql,
  14    map_date_part,
  15    max_or_greatest,
  16    min_or_least,
  17    no_make_interval_sql,
  18    no_timestamp_sql,
  19    rename_func,
  20    strposition_sql,
  21    timestampdiff_sql,
  22    timestamptrunc_sql,
  23    timestrtotime_sql,
  24    unit_to_str,
  25    var_map_sql,
  26)
  27from sqlglot.generator import unsupported_args
  28from sqlglot.helper import find_new_name, flatten, seq_get
  29from sqlglot.optimizer.scope import build_scope, find_all_in_scope
  30from sqlglot.parsers.snowflake import (
  31    RANKING_WINDOW_FUNCTIONS_WITH_FRAME,
  32    TIMESTAMP_TYPES,
  33    SnowflakeParser,
  34    build_object_construct,
  35)
  36from sqlglot.tokens import TokenType
  37from collections import defaultdict
  38
  39if t.TYPE_CHECKING:
  40    from sqlglot._typing import E
  41
  42
  43def _build_datediff(args: list) -> exp.DateDiff:
  44    return exp.DateDiff(
  45        this=seq_get(args, 2),
  46        expression=seq_get(args, 1),
  47        unit=map_date_part(seq_get(args, 0)),
  48        date_part_boundary=True,
  49    )
  50
  51
  52def _build_date_time_add(expr_type: type[E]) -> t.Callable[[list], E]:
  53    def _builder(args: list) -> E:
  54        return expr_type(
  55            this=seq_get(args, 2),
  56            expression=seq_get(args, 1),
  57            unit=map_date_part(seq_get(args, 0)),
  58        )
  59
  60    return _builder
  61
  62
  63def _regexpilike_sql(self: SnowflakeGenerator, expression: exp.RegexpILike) -> str:
  64    flag = expression.text("flag")
  65
  66    if "i" not in flag:
  67        flag += "i"
  68
  69    return self.func(
  70        "REGEXP_LIKE", expression.this, expression.expression, exp.Literal.string(flag)
  71    )
  72
  73
  74def _unqualify_pivot_columns(expression: exp.Expr) -> exp.Expr:
  75    """
  76    Snowflake doesn't allow columns referenced in UNPIVOT to be qualified,
  77    so we need to unqualify them. Same goes for ANY ORDER BY <column>.
  78
  79    Example:
  80        >>> from sqlglot import parse_one
  81        >>> expr = parse_one("SELECT * FROM m_sales UNPIVOT(sales FOR month IN (m_sales.jan, feb, mar, april))")
  82        >>> print(_unqualify_pivot_columns(expr).sql(dialect="snowflake"))
  83        SELECT * FROM m_sales UNPIVOT(sales FOR month IN (jan, feb, mar, april))
  84    """
  85    if isinstance(expression, exp.Pivot):
  86        if expression.unpivot:
  87            expression = transforms.unqualify_columns(expression)
  88        else:
  89            for field in expression.fields:
  90                field_expr = seq_get(field.expressions if field else [], 0)
  91
  92                if isinstance(field_expr, exp.PivotAny):
  93                    unqualified_field_expr = transforms.unqualify_columns(field_expr)
  94                    t.cast(exp.Expr, field).set("expressions", unqualified_field_expr, 0)
  95
  96    return expression
  97
  98
  99def _flatten_structured_types_unless_iceberg(expression: exp.Expr) -> exp.Expr:
 100    assert isinstance(expression, exp.Create)
 101
 102    def _flatten_structured_type(expression: exp.Expr) -> exp.Expr:
 103        if isinstance(expression, exp.DataType) and expression.this in exp.DataType.NESTED_TYPES:
 104            expression.set("expressions", None)
 105        return expression
 106
 107    props = expression.args.get("properties")
 108    if isinstance(expression.this, exp.Schema) and not (props and props.find(exp.IcebergProperty)):
 109        for schema_expression in expression.this.expressions:
 110            if isinstance(schema_expression, exp.ColumnDef):
 111                column_type = schema_expression.kind
 112                if isinstance(column_type, exp.DataType):
 113                    column_type.transform(_flatten_structured_type, copy=False)
 114
 115    return expression
 116
 117
 118def _unnest_generate_date_array(unnest: exp.Unnest) -> None:
 119    generate_date_array = unnest.expressions[0]
 120    start = generate_date_array.args.get("start")
 121    end = generate_date_array.args.get("end")
 122    step = generate_date_array.args.get("step")
 123
 124    if not start or not end or not isinstance(step, exp.Interval) or step.name != "1":
 125        return
 126
 127    unit = step.args.get("unit")
 128
 129    unnest_alias = unnest.args.get("alias")
 130    if unnest_alias:
 131        unnest_alias = unnest_alias.copy()
 132        sequence_value_name = seq_get(unnest_alias.columns, 0) or "value"
 133    else:
 134        sequence_value_name = "value"
 135
 136    # We'll add the next sequence value to the starting date and project the result
 137    date_add = _build_date_time_add(exp.DateAdd)(
 138        [unit, exp.cast(sequence_value_name, "int"), exp.cast(start, "date")]
 139    )
 140
 141    # We use DATEDIFF to compute the number of sequence values needed
 142    number_sequence = SnowflakeParser.FUNCTIONS["ARRAY_GENERATE_RANGE"](
 143        [exp.Literal.number(0), _build_datediff([unit, start, end]) + 1]
 144    )
 145
 146    unnest.set("expressions", [number_sequence])
 147
 148    unnest_parent = unnest.parent
 149    if isinstance(unnest_parent, exp.Join):
 150        select = unnest_parent.parent
 151        if isinstance(select, exp.Select):
 152            replace_column_name = (
 153                sequence_value_name
 154                if isinstance(sequence_value_name, str)
 155                else sequence_value_name.name
 156            )
 157
 158            scope = build_scope(select)
 159            if scope:
 160                for column in scope.columns:
 161                    if column.name.lower() == replace_column_name.lower():
 162                        column.replace(
 163                            date_add.as_(replace_column_name)
 164                            if isinstance(column.parent, exp.Select)
 165                            else date_add
 166                        )
 167
 168            lateral = exp.Lateral(this=unnest_parent.this.pop())
 169            unnest_parent.replace(exp.Join(this=lateral))
 170    else:
 171        unnest.replace(
 172            exp.select(date_add.as_(sequence_value_name))
 173            .from_(unnest.copy())
 174            .subquery(unnest_alias)
 175        )
 176
 177
 178def _transform_generate_date_array(expression: exp.Expr) -> exp.Expr:
 179    if isinstance(expression, exp.Select):
 180        for generate_date_array in expression.find_all(exp.GenerateDateArray):
 181            parent = generate_date_array.parent
 182
 183            # If GENERATE_DATE_ARRAY is used directly as an array (e.g passed into ARRAY_LENGTH), the transformed Snowflake
 184            # query is the following (it'll be unnested properly on the next iteration due to copy):
 185            # SELECT ref(GENERATE_DATE_ARRAY(...)) -> SELECT ref((SELECT ARRAY_AGG(*) FROM UNNEST(GENERATE_DATE_ARRAY(...))))
 186            if not isinstance(parent, exp.Unnest):
 187                unnest = exp.Unnest(expressions=[generate_date_array.copy()])
 188                generate_date_array.replace(
 189                    exp.select(exp.ArrayAgg(this=exp.Star())).from_(unnest).subquery()
 190                )
 191
 192            if (
 193                isinstance(parent, exp.Unnest)
 194                and isinstance(parent.parent, (exp.From, exp.Join))
 195                and len(parent.expressions) == 1
 196            ):
 197                _unnest_generate_date_array(parent)
 198
 199    return expression
 200
 201
 202def _regexpextract_sql(
 203    self: SnowflakeGenerator, expression: exp.RegexpExtract | exp.RegexpExtractAll
 204) -> str:
 205    # Other dialects don't support all of the following parameters, so we need to
 206    # generate default values as necessary to ensure the transpilation is correct
 207    group = expression.args.get("group")
 208
 209    # To avoid generating all these default values, we set group to None if
 210    # it's 0 (also default value) which doesn't trigger the following chain
 211    if group and group.name == "0":
 212        group = None
 213
 214    parameters = expression.args.get("parameters") or (group and exp.Literal.string("c"))
 215    occurrence = expression.args.get("occurrence") or (parameters and exp.Literal.number(1))
 216    position = expression.args.get("position") or (occurrence and exp.Literal.number(1))
 217
 218    return self.func(
 219        "REGEXP_SUBSTR" if isinstance(expression, exp.RegexpExtract) else "REGEXP_SUBSTR_ALL",
 220        expression.this,
 221        expression.expression,
 222        position,
 223        occurrence,
 224        parameters,
 225        group,
 226    )
 227
 228
 229def _json_extract_value_array_sql(
 230    self: SnowflakeGenerator, expression: exp.JSONValueArray | exp.JSONExtractArray
 231) -> str:
 232    json_extract = exp.JSONExtract(this=expression.this, expression=expression.expression)
 233    ident = exp.to_identifier("x")
 234
 235    if isinstance(expression, exp.JSONValueArray):
 236        this: exp.Expr = exp.cast(ident, to=exp.DType.VARCHAR)
 237    else:
 238        this = exp.ParseJSON(this=f"TO_JSON({ident})")
 239
 240    transform_lambda = exp.Lambda(expressions=[ident], this=this)
 241
 242    return self.func("TRANSFORM", json_extract, transform_lambda)
 243
 244
 245def _qualify_unnested_columns(expression: exp.Expr) -> exp.Expr:
 246    if isinstance(expression, exp.Select):
 247        scope = build_scope(expression)
 248        if not scope:
 249            return expression
 250
 251        unnests = list(scope.find_all(exp.Unnest))
 252
 253        if not unnests:
 254            return expression
 255
 256        taken_source_names = set(scope.sources)
 257        column_source: dict[str, exp.Identifier] = {}
 258        unnest_to_identifier: dict[exp.Unnest, exp.Identifier] = {}
 259
 260        unnest_identifier: exp.Identifier | None = None
 261        orig_expression = expression.copy()
 262
 263        for unnest in unnests:
 264            if not isinstance(unnest.parent, (exp.From, exp.Join)):
 265                continue
 266
 267            # Try to infer column names produced by an unnest operator. This is only possible
 268            # when we can peek into the (statically known) contents of the unnested value.
 269            unnest_columns: set[str] = set()
 270            for unnest_expr in unnest.expressions:
 271                if not isinstance(unnest_expr, exp.Array):
 272                    continue
 273
 274                for array_expr in unnest_expr.expressions:
 275                    if not (
 276                        isinstance(array_expr, exp.Struct)
 277                        and array_expr.expressions
 278                        and all(
 279                            isinstance(struct_expr, exp.PropertyEQ)
 280                            for struct_expr in array_expr.expressions
 281                        )
 282                    ):
 283                        continue
 284
 285                    unnest_columns.update(
 286                        struct_expr.this.name.lower() for struct_expr in array_expr.expressions
 287                    )
 288                    break
 289
 290                if unnest_columns:
 291                    break
 292
 293            unnest_alias = unnest.args.get("alias")
 294            if not unnest_alias:
 295                alias_name = find_new_name(taken_source_names, "value")
 296                taken_source_names.add(alias_name)
 297
 298                # Produce a `TableAlias` AST similar to what is produced for BigQuery. This
 299                # will be corrected later, when we generate SQL for the `Unnest` AST node.
 300                aliased_unnest = exp.alias_(unnest, None, table=[alias_name])
 301                scope.replace(unnest, aliased_unnest)
 302
 303                unnest_identifier = aliased_unnest.args["alias"].columns[0]
 304            else:
 305                alias_columns = getattr(unnest_alias, "columns", [])
 306                unnest_identifier = unnest_alias.this or seq_get(alias_columns, 0)
 307
 308            if not isinstance(unnest_identifier, exp.Identifier):
 309                return orig_expression
 310
 311            unnest_to_identifier[unnest] = unnest_identifier
 312            column_source.update({c.lower(): unnest_identifier for c in unnest_columns})
 313
 314        for column in scope.columns:
 315            if column.table:
 316                continue
 317
 318            table = column_source.get(column.name.lower())
 319            if (
 320                unnest_identifier
 321                and not table
 322                and len(scope.sources) == 1
 323                and column.name.lower() != unnest_identifier.name.lower()
 324            ):
 325                unnest_ancestor = column.find_ancestor(exp.Unnest, exp.Select)
 326                if isinstance(unnest_ancestor, exp.Unnest):
 327                    ancestor_identifier = unnest_to_identifier.get(unnest_ancestor)
 328                    if (
 329                        ancestor_identifier
 330                        and ancestor_identifier.name.lower() == unnest_identifier.name.lower()
 331                    ):
 332                        continue
 333
 334                table = unnest_identifier
 335
 336            column.set("table", table and table.copy())
 337
 338    return expression
 339
 340
 341def _eliminate_dot_variant_lookup(expression: exp.Expr) -> exp.Expr:
 342    if isinstance(expression, exp.Select):
 343        # This transformation is used to facilitate transpilation of BigQuery `UNNEST` operations
 344        # to Snowflake. It should not affect roundtrip because `Unnest` nodes cannot be produced
 345        # by Snowflake's parser.
 346        #
 347        # Additionally, at the time of writing this, BigQuery is the only dialect that produces a
 348        # `TableAlias` node that only fills `columns` and not `this`, due to `UNNEST_COLUMN_ONLY`.
 349        unnest_aliases = set()
 350        for unnest in find_all_in_scope(expression, exp.Unnest):
 351            unnest_alias = unnest.args.get("alias")
 352            if (
 353                isinstance(unnest_alias, exp.TableAlias)
 354                and not unnest_alias.this
 355                and len(unnest_alias.columns) == 1
 356            ):
 357                unnest_aliases.add(unnest_alias.columns[0].name)
 358
 359        if unnest_aliases:
 360            for c in find_all_in_scope(expression, exp.Column):
 361                if c.table in unnest_aliases:
 362                    bracket_lhs = c.args["table"]
 363                    bracket_rhs = exp.Literal.string(c.name)
 364                    bracket = exp.Bracket(this=bracket_lhs, expressions=[bracket_rhs])
 365
 366                    if c.parent is expression:
 367                        # Retain column projection names by using aliases
 368                        c.replace(exp.alias_(bracket, c.this.copy()))
 369                    else:
 370                        c.replace(bracket)
 371
 372    return expression
 373
 374
 375class SnowflakeGenerator(generator.Generator):
 376    SELECT_KINDS: tuple[str, ...] = ()
 377    PARAMETER_TOKEN = "$"
 378    MATCHED_BY_SOURCE = False
 379    SINGLE_STRING_INTERVAL = True
 380    JOIN_HINTS = False
 381    TABLE_HINTS = False
 382    QUERY_HINTS = False
 383    AGGREGATE_FILTER_SUPPORTED = False
 384    SUPPORTS_TABLE_COPY = False
 385    COLLATE_IS_FUNC = True
 386    LIMIT_ONLY_LITERALS = True
 387    JSON_KEY_VALUE_PAIR_SEP = ","
 388    INSERT_OVERWRITE = " OVERWRITE INTO"
 389    STRUCT_DELIMITER = ("(", ")")
 390    COPY_PARAMS_ARE_WRAPPED = False
 391    COPY_PARAMS_EQ_REQUIRED = True
 392    STAR_EXCEPT = "EXCLUDE"
 393    SUPPORTS_EXPLODING_PROJECTIONS = False
 394    ARRAY_CONCAT_IS_VAR_LEN = False
 395    SUPPORTS_CONVERT_TIMEZONE = True
 396    EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE = False
 397    SUPPORTS_MEDIAN = True
 398    ARRAY_SIZE_NAME = "ARRAY_SIZE"
 399    SUPPORTS_DECODE_CASE = True
 400
 401    AFTER_HAVING_MODIFIER_TRANSFORMS = generator.AFTER_HAVING_MODIFIER_TRANSFORMS
 402
 403    IS_BOOL_ALLOWED = False
 404    DIRECTED_JOINS = True
 405    SUPPORTS_UESCAPE = False
 406    TRY_SUPPORTED = False
 407
 408    TRANSFORMS = {
 409        **generator.Generator.TRANSFORMS,
 410        exp.ApproxDistinct: rename_func("APPROX_COUNT_DISTINCT"),
 411        exp.ArgMax: rename_func("MAX_BY"),
 412        exp.ArgMin: rename_func("MIN_BY"),
 413        exp.Array: transforms.preprocess([transforms.inherit_struct_field_names]),
 414        exp.ArrayConcat: array_concat_sql("ARRAY_CAT"),
 415        exp.ArrayAppend: array_append_sql("ARRAY_APPEND"),
 416        exp.ArrayPrepend: array_append_sql("ARRAY_PREPEND"),
 417        exp.ArrayContains: lambda self, e: self.func(
 418            "ARRAY_CONTAINS",
 419            e.expression
 420            if e.args.get("ensure_variant") is False
 421            else exp.cast(e.expression, exp.DType.VARIANT, copy=False),
 422            e.this,
 423        ),
 424        exp.ArrayPosition: lambda self, e: self.func(
 425            "ARRAY_POSITION",
 426            e.expression,
 427            e.this,
 428        ),
 429        exp.ArrayIntersect: rename_func("ARRAY_INTERSECTION"),
 430        exp.ArrayOverlaps: rename_func("ARRAYS_OVERLAP"),
 431        exp.AtTimeZone: lambda self, e: self.func("CONVERT_TIMEZONE", e.args.get("zone"), e.this),
 432        exp.BitwiseOr: rename_func("BITOR"),
 433        exp.BitwiseXor: rename_func("BITXOR"),
 434        exp.BitwiseAnd: rename_func("BITAND"),
 435        exp.BitwiseAndAgg: rename_func("BITANDAGG"),
 436        exp.BitwiseOrAgg: rename_func("BITORAGG"),
 437        exp.BitwiseXorAgg: rename_func("BITXORAGG"),
 438        exp.BitwiseNot: rename_func("BITNOT"),
 439        exp.BitwiseLeftShift: rename_func("BITSHIFTLEFT"),
 440        exp.BitwiseRightShift: rename_func("BITSHIFTRIGHT"),
 441        exp.Create: transforms.preprocess([_flatten_structured_types_unless_iceberg]),
 442        exp.CurrentTimestamp: lambda self, e: (
 443            self.func("SYSDATE") if e.args.get("sysdate") else self.function_fallback_sql(e)
 444        ),
 445        exp.CurrentSchemas: lambda self, e: self.func("CURRENT_SCHEMAS"),
 446        exp.Localtime: lambda self, e: (
 447            self.func("CURRENT_TIME", e.this) if e.this else "CURRENT_TIME"
 448        ),
 449        exp.Localtimestamp: lambda self, e: (
 450            self.func("CURRENT_TIMESTAMP", e.this) if e.this else "CURRENT_TIMESTAMP"
 451        ),
 452        exp.DateAdd: date_delta_sql("DATEADD"),
 453        exp.DateDiff: date_delta_sql("DATEDIFF"),
 454        exp.DatetimeAdd: date_delta_sql("TIMESTAMPADD"),
 455        exp.DatetimeDiff: timestampdiff_sql,
 456        exp.DateStrToDate: datestrtodate_sql,
 457        exp.Decrypt: lambda self, e: self.func(
 458            f"{'TRY_' if e.args.get('safe') else ''}DECRYPT",
 459            e.this,
 460            e.args.get("passphrase"),
 461            e.args.get("aad"),
 462            e.args.get("encryption_method"),
 463        ),
 464        exp.DecryptRaw: lambda self, e: self.func(
 465            f"{'TRY_' if e.args.get('safe') else ''}DECRYPT_RAW",
 466            e.this,
 467            e.args.get("key"),
 468            e.args.get("iv"),
 469            e.args.get("aad"),
 470            e.args.get("encryption_method"),
 471            e.args.get("aead"),
 472        ),
 473        exp.DayOfMonth: rename_func("DAYOFMONTH"),
 474        exp.DayOfWeek: rename_func("DAYOFWEEK"),
 475        exp.DayOfWeekIso: rename_func("DAYOFWEEKISO"),
 476        exp.DayOfYear: rename_func("DAYOFYEAR"),
 477        exp.DotProduct: rename_func("VECTOR_INNER_PRODUCT"),
 478        exp.Explode: rename_func("FLATTEN"),
 479        exp.Extract: lambda self, e: self.func(
 480            "DATE_PART", map_date_part(e.this, self.dialect), e.expression
 481        ),
 482        exp.CosineDistance: rename_func("VECTOR_COSINE_SIMILARITY"),
 483        exp.EuclideanDistance: rename_func("VECTOR_L2_DISTANCE"),
 484        exp.HandlerProperty: lambda self, e: f"HANDLER = {self.sql(e, 'this')}",
 485        exp.FileFormatProperty: lambda self, e: (
 486            f"FILE_FORMAT=({self.expressions(e, 'expressions', sep=' ')})"
 487        ),
 488        exp.FromTimeZone: lambda self, e: self.func(
 489            "CONVERT_TIMEZONE", e.args.get("zone"), "'UTC'", e.this
 490        ),
 491        exp.GenerateSeries: lambda self, e: self.func(
 492            "ARRAY_GENERATE_RANGE",
 493            e.args["start"],
 494            e.args["end"] if e.args.get("is_end_exclusive") else e.args["end"] + 1,
 495            e.args.get("step"),
 496        ),
 497        exp.GetExtract: rename_func("GET"),
 498        exp.GroupConcat: lambda self, e: groupconcat_sql(self, e, sep=""),
 499        exp.If: if_sql(name="IFF", false_value="NULL"),
 500        exp.JSONArray: lambda self, e: self.func(
 501            "TO_VARIANT", self.func("ARRAY_CONSTRUCT", *e.expressions)
 502        ),
 503        exp.JSONExtractArray: _json_extract_value_array_sql,
 504        exp.JSONExtractScalar: lambda self, e: self.func(
 505            "JSON_EXTRACT_PATH_TEXT", e.this, e.expression
 506        ),
 507        exp.JSONKeys: rename_func("OBJECT_KEYS"),
 508        exp.JSONObject: lambda self, e: self.func("OBJECT_CONSTRUCT_KEEP_NULL", *e.expressions),
 509        exp.JSONPathRoot: lambda *_: "",
 510        exp.JSONValueArray: _json_extract_value_array_sql,
 511        exp.Levenshtein: unsupported_args("ins_cost", "del_cost", "sub_cost")(
 512            rename_func("EDITDISTANCE")
 513        ),
 514        exp.LocationProperty: lambda self, e: f"LOCATION={self.sql(e, 'this')}",
 515        exp.LogicalAnd: rename_func("BOOLAND_AGG"),
 516        exp.LogicalOr: rename_func("BOOLOR_AGG"),
 517        exp.Map: lambda self, e: var_map_sql(self, e, "OBJECT_CONSTRUCT"),
 518        exp.ManhattanDistance: rename_func("VECTOR_L1_DISTANCE"),
 519        exp.MakeInterval: no_make_interval_sql,
 520        exp.Max: max_or_greatest,
 521        exp.Min: min_or_least,
 522        exp.ParseJSON: lambda self, e: self.func(
 523            f"{'TRY_' if e.args.get('safe') else ''}PARSE_JSON", e.this
 524        ),
 525        exp.ToBinary: lambda self, e: self.func(
 526            f"{'TRY_' if e.args.get('safe') else ''}TO_BINARY", e.this, e.args.get("format")
 527        ),
 528        exp.ToBoolean: lambda self, e: self.func(
 529            f"{'TRY_' if e.args.get('safe') else ''}TO_BOOLEAN", e.this
 530        ),
 531        exp.ToDouble: lambda self, e: self.func(
 532            f"{'TRY_' if e.args.get('safe') else ''}TO_DOUBLE", e.this, e.args.get("format")
 533        ),
 534        exp.ToFile: lambda self, e: self.func(
 535            f"{'TRY_' if e.args.get('safe') else ''}TO_FILE", e.this, e.args.get("path")
 536        ),
 537        exp.JSONFormat: rename_func("TO_JSON"),
 538        exp.PartitionedByProperty: lambda self, e: f"PARTITION BY {self.sql(e, 'this')}",
 539        exp.PercentileCont: transforms.preprocess([transforms.add_within_group_for_percentiles]),
 540        exp.PercentileDisc: transforms.preprocess([transforms.add_within_group_for_percentiles]),
 541        exp.Pivot: transforms.preprocess([_unqualify_pivot_columns]),
 542        exp.RegexpExtract: _regexpextract_sql,
 543        exp.RegexpExtractAll: _regexpextract_sql,
 544        exp.RegexpILike: _regexpilike_sql,
 545        exp.RowAccessProperty: lambda self, e: self.rowaccessproperty_sql(e),
 546        exp.Select: transforms.preprocess(
 547            [
 548                transforms.eliminate_window_clause,
 549                transforms.eliminate_distinct_on,
 550                transforms.explode_projection_to_unnest(),
 551                transforms.eliminate_semi_and_anti_joins,
 552                _transform_generate_date_array,
 553                _qualify_unnested_columns,
 554                _eliminate_dot_variant_lookup,
 555            ]
 556        ),
 557        exp.SHA: rename_func("SHA1"),
 558        exp.SHA1Digest: rename_func("SHA1_BINARY"),
 559        exp.MD5Digest: rename_func("MD5_BINARY"),
 560        exp.MD5NumberLower64: rename_func("MD5_NUMBER_LOWER64"),
 561        exp.MD5NumberUpper64: rename_func("MD5_NUMBER_UPPER64"),
 562        exp.LowerHex: rename_func("TO_CHAR"),
 563        exp.Skewness: rename_func("SKEW"),
 564        exp.StarMap: rename_func("OBJECT_CONSTRUCT"),
 565        exp.StartsWith: rename_func("STARTSWITH"),
 566        exp.EndsWith: rename_func("ENDSWITH"),
 567        exp.Rand: lambda self, e: self.func("RANDOM", e.this),
 568        exp.StrPosition: lambda self, e: strposition_sql(
 569            self, e, func_name="CHARINDEX", supports_position=True
 570        ),
 571        exp.StrToDate: lambda self, e: self.func("DATE", e.this, self.format_time(e)),
 572        exp.StringToArray: rename_func("STRTOK_TO_ARRAY"),
 573        exp.Stuff: rename_func("INSERT"),
 574        exp.StPoint: rename_func("ST_MAKEPOINT"),
 575        exp.TimeAdd: date_delta_sql("TIMEADD"),
 576        exp.TimeSlice: lambda self, e: self.func(
 577            "TIME_SLICE",
 578            e.this,
 579            e.expression,
 580            unit_to_str(e),
 581            e.args.get("kind"),
 582        ),
 583        exp.Timestamp: no_timestamp_sql,
 584        exp.TimestampAdd: date_delta_sql("TIMESTAMPADD"),
 585        exp.TimestampDiff: lambda self, e: self.func("TIMESTAMPDIFF", e.unit, e.expression, e.this),
 586        exp.TimestampTrunc: timestamptrunc_sql(),
 587        exp.TimeStrToTime: timestrtotime_sql,
 588        exp.TimeToUnix: lambda self, e: f"EXTRACT(epoch_second FROM {self.sql(e, 'this')})",
 589        exp.ToArray: rename_func("TO_ARRAY"),
 590        exp.ToChar: lambda self, e: self.function_fallback_sql(e),
 591        exp.TsOrDsAdd: date_delta_sql("DATEADD", cast=True),
 592        exp.TsOrDsDiff: date_delta_sql("DATEDIFF"),
 593        exp.TsOrDsToDate: lambda self, e: self.func(
 594            f"{'TRY_' if e.args.get('safe') else ''}TO_DATE", e.this, self.format_time(e)
 595        ),
 596        exp.TsOrDsToTime: lambda self, e: self.func(
 597            f"{'TRY_' if e.args.get('safe') else ''}TO_TIME", e.this, self.format_time(e)
 598        ),
 599        exp.Unhex: rename_func("HEX_DECODE_BINARY"),
 600        exp.UnixToTime: lambda self, e: self.func("TO_TIMESTAMP", e.this, e.args.get("scale")),
 601        exp.Uuid: rename_func("UUID_STRING"),
 602        exp.VarMap: lambda self, e: var_map_sql(self, e, "OBJECT_CONSTRUCT"),
 603        exp.Booland: rename_func("BOOLAND"),
 604        exp.Boolor: rename_func("BOOLOR"),
 605        exp.WeekOfYear: rename_func("WEEKISO"),
 606        exp.YearOfWeek: rename_func("YEAROFWEEK"),
 607        exp.YearOfWeekIso: rename_func("YEAROFWEEKISO"),
 608        exp.Xor: rename_func("BOOLXOR"),
 609        exp.ByteLength: rename_func("OCTET_LENGTH"),
 610        exp.Flatten: rename_func("ARRAY_FLATTEN"),
 611        exp.ArrayConcatAgg: lambda self, e: self.func("ARRAY_FLATTEN", exp.ArrayAgg(this=e.this)),
 612        exp.SHA2Digest: lambda self, e: self.func(
 613            "SHA2_BINARY", e.this, e.args.get("length") or exp.Literal.number(256)
 614        ),
 615    }
 616
 617    def sortarray_sql(self, expression: exp.SortArray) -> str:
 618        asc = expression.args.get("asc")
 619        nulls_first = expression.args.get("nulls_first")
 620        if asc == exp.false() and nulls_first == exp.true():
 621            nulls_first = None
 622        return self.func("ARRAY_SORT", expression.this, asc, nulls_first)
 623
 624    def nthvalue_sql(self, expression: exp.NthValue) -> str:
 625        result = self.func("NTH_VALUE", expression.this, expression.args.get("offset"))
 626
 627        from_first = expression.args.get("from_first")
 628
 629        if from_first is not None:
 630            if from_first:
 631                result = result + " FROM FIRST"
 632            else:
 633                result = result + " FROM LAST"
 634
 635        return result
 636
 637    SUPPORTED_JSON_PATH_PARTS = {
 638        exp.JSONPathKey,
 639        exp.JSONPathRoot,
 640        exp.JSONPathSubscript,
 641    }
 642
 643    TYPE_MAPPING = {
 644        **generator.Generator.TYPE_MAPPING,
 645        exp.DType.BIGDECIMAL: "DOUBLE",
 646        exp.DType.JSON: "VARIANT",
 647        exp.DType.NESTED: "OBJECT",
 648        exp.DType.STRUCT: "OBJECT",
 649        exp.DType.TEXT: "VARCHAR",
 650    }
 651
 652    TOKEN_MAPPING = {
 653        TokenType.AUTO_INCREMENT: "AUTOINCREMENT",
 654    }
 655
 656    PROPERTIES_LOCATION = {
 657        **generator.Generator.PROPERTIES_LOCATION,
 658        exp.CredentialsProperty: exp.Properties.Location.POST_WITH,
 659        exp.LocationProperty: exp.Properties.Location.POST_WITH,
 660        exp.PartitionedByProperty: exp.Properties.Location.POST_SCHEMA,
 661        exp.RowAccessProperty: exp.Properties.Location.POST_SCHEMA,
 662        exp.SetProperty: exp.Properties.Location.UNSUPPORTED,
 663        exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED,
 664    }
 665
 666    UNSUPPORTED_VALUES_EXPRESSIONS = {
 667        exp.Map,
 668        exp.StarMap,
 669        exp.Struct,
 670        exp.VarMap,
 671    }
 672
 673    RESPECT_IGNORE_NULLS_UNSUPPORTED_EXPRESSIONS = (exp.ArrayAgg,)
 674
 675    def with_properties(self, properties: exp.Properties) -> str:
 676        return self.properties(properties, wrapped=False, prefix=self.sep(""), sep=" ")
 677
 678    def values_sql(self, expression: exp.Values, values_as_table: bool = True) -> str:
 679        if expression.find(*self.UNSUPPORTED_VALUES_EXPRESSIONS):
 680            values_as_table = False
 681
 682        return super().values_sql(expression, values_as_table=values_as_table)
 683
 684    def datatype_sql(self, expression: exp.DataType) -> str:
 685        # Check if this is a FLOAT type nested inside a VECTOR type
 686        # VECTOR only accepts FLOAT (not DOUBLE), INT, and STRING as element types
 687        # https://docs.snowflake.com/en/sql-reference/data-types-vector
 688        if expression.is_type(exp.DType.DOUBLE):
 689            parent = expression.parent
 690            if isinstance(parent, exp.DataType) and parent.is_type(exp.DType.VECTOR):
 691                # Preserve FLOAT for VECTOR types instead of mapping to synonym DOUBLE
 692                return "FLOAT"
 693
 694        expressions = expression.expressions
 695        if expressions and expression.is_type(*exp.DataType.STRUCT_TYPES):
 696            for field_type in expressions:
 697                # The correct syntax is OBJECT [ (<key> <value_type [NOT NULL] [, ...]) ]
 698                if isinstance(field_type, exp.DataType):
 699                    return "OBJECT"
 700                if (
 701                    isinstance(field_type, exp.ColumnDef)
 702                    and field_type.this
 703                    and field_type.this.is_string
 704                ):
 705                    # Doing OBJECT('foo' VARCHAR) is invalid snowflake Syntax. Moreover, besides
 706                    # converting 'foo' into an identifier, we also need to quote it because these
 707                    # keys are case-sensitive. For example:
 708                    #
 709                    # WITH t AS (SELECT OBJECT_CONSTRUCT('x', 'y') AS c) SELECT c:x FROM t -- correct
 710                    # WITH t AS (SELECT OBJECT_CONSTRUCT('x', 'y') AS c) SELECT c:X FROM t -- incorrect, returns NULL
 711                    field_type.this.replace(exp.to_identifier(field_type.name, quoted=True))
 712
 713        return super().datatype_sql(expression)
 714
 715    def tonumber_sql(self, expression: exp.ToNumber) -> str:
 716        precision = expression.args.get("precision")
 717        scale = expression.args.get("scale")
 718
 719        default_precision = isinstance(precision, exp.Literal) and precision.name == "38"
 720        default_scale = isinstance(scale, exp.Literal) and scale.name == "0"
 721
 722        if default_precision and default_scale:
 723            precision = None
 724            scale = None
 725        elif default_scale:
 726            scale = None
 727
 728        func_name = "TRY_TO_NUMBER" if expression.args.get("safe") else "TO_NUMBER"
 729
 730        return self.func(
 731            func_name,
 732            expression.this,
 733            expression.args.get("format"),
 734            precision,
 735            scale,
 736        )
 737
 738    def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str:
 739        milli = expression.args.get("milli")
 740        if milli is not None:
 741            milli_to_nano = milli.pop() * exp.Literal.number(1000000)
 742            expression.set("nano", milli_to_nano)
 743
 744        return rename_func("TIMESTAMP_FROM_PARTS")(self, expression)
 745
 746    def cast_sql(self, expression: exp.Cast, safe_prefix: str | None = None) -> str:
 747        if expression.is_type(exp.DType.GEOGRAPHY):
 748            return self.func("TO_GEOGRAPHY", expression.this)
 749        if expression.is_type(exp.DType.GEOMETRY):
 750            return self.func("TO_GEOMETRY", expression.this)
 751
 752        return super().cast_sql(expression, safe_prefix=safe_prefix)
 753
 754    def trycast_sql(self, expression: exp.TryCast) -> str:
 755        value = expression.this
 756
 757        if value.type is None:
 758            from sqlglot.optimizer.annotate_types import annotate_types
 759
 760            value = annotate_types(value, dialect=self.dialect)
 761
 762        # Snowflake requires that TRY_CAST's value be a string
 763        # If TRY_CAST is being roundtripped (since Snowflake is the only dialect that sets "requires_string") or
 764        # if we can deduce that the value is a string, then we can generate TRY_CAST
 765        if expression.args.get("requires_string") or value.is_type(*exp.DataType.TEXT_TYPES):
 766            return super().trycast_sql(expression)
 767
 768        return self.cast_sql(expression)
 769
 770    def log_sql(self, expression: exp.Log) -> str:
 771        if not expression.expression:
 772            return self.func("LN", expression.this)
 773
 774        return super().log_sql(expression)
 775
 776    def greatest_sql(self, expression: exp.Greatest) -> str:
 777        name = "GREATEST_IGNORE_NULLS" if expression.args.get("ignore_nulls") else "GREATEST"
 778        return self.func(name, expression.this, *expression.expressions)
 779
 780    def least_sql(self, expression: exp.Least) -> str:
 781        name = "LEAST_IGNORE_NULLS" if expression.args.get("ignore_nulls") else "LEAST"
 782        return self.func(name, expression.this, *expression.expressions)
 783
 784    def generator_sql(self, expression: exp.Generator) -> str:
 785        args = []
 786        rowcount = expression.args.get("rowcount")
 787        timelimit = expression.args.get("timelimit")
 788
 789        if rowcount:
 790            args.append(exp.Kwarg(this=exp.var("ROWCOUNT"), expression=rowcount))
 791        if timelimit:
 792            args.append(exp.Kwarg(this=exp.var("TIMELIMIT"), expression=timelimit))
 793
 794        return self.func("GENERATOR", *args)
 795
 796    def unnest_sql(self, expression: exp.Unnest) -> str:
 797        unnest_alias = expression.args.get("alias")
 798        offset = expression.args.get("offset")
 799
 800        unnest_alias_columns = unnest_alias.columns if unnest_alias else []
 801        value = seq_get(unnest_alias_columns, 0) or exp.to_identifier("value")
 802
 803        columns = [
 804            exp.to_identifier("seq"),
 805            exp.to_identifier("key"),
 806            exp.to_identifier("path"),
 807            offset.pop() if isinstance(offset, exp.Expr) else exp.to_identifier("index"),
 808            value,
 809            exp.to_identifier("this"),
 810        ]
 811
 812        if unnest_alias:
 813            unnest_alias.set("columns", columns)
 814        else:
 815            unnest_alias = exp.TableAlias(this="_u", columns=columns)
 816
 817        table_input = self.sql(expression.expressions[0])
 818        if not table_input.startswith("INPUT =>"):
 819            table_input = f"INPUT => {table_input}"
 820
 821        expression_parent = expression.parent
 822
 823        explode = (
 824            f"FLATTEN({table_input})"
 825            if isinstance(expression_parent, exp.Lateral)
 826            else f"TABLE(FLATTEN({table_input}))"
 827        )
 828        alias = self.sql(unnest_alias)
 829        alias = f" AS {alias}" if alias else ""
 830        value = (
 831            ""
 832            if isinstance(expression_parent, (exp.From, exp.Join, exp.Lateral))
 833            else f"{value} FROM "
 834        )
 835
 836        return f"{value}{explode}{alias}"
 837
 838    def show_sql(self, expression: exp.Show) -> str:
 839        terse = "TERSE " if expression.args.get("terse") else ""
 840        iceberg = "ICEBERG " if expression.args.get("iceberg") else ""
 841        history = " HISTORY" if expression.args.get("history") else ""
 842        like = self.sql(expression, "like")
 843        like = f" LIKE {like}" if like else ""
 844
 845        scope = self.sql(expression, "scope")
 846        scope = f" {scope}" if scope else ""
 847
 848        scope_kind = self.sql(expression, "scope_kind")
 849        if scope_kind:
 850            scope_kind = f" IN {scope_kind}"
 851
 852        starts_with = self.sql(expression, "starts_with")
 853        if starts_with:
 854            starts_with = f" STARTS WITH {starts_with}"
 855
 856        limit = self.sql(expression, "limit")
 857
 858        from_ = self.sql(expression, "from_")
 859        if from_:
 860            from_ = f" FROM {from_}"
 861
 862        privileges = self.expressions(expression, key="privileges", flat=True)
 863        privileges = f" WITH PRIVILEGES {privileges}" if privileges else ""
 864
 865        return f"SHOW {terse}{iceberg}{expression.name}{history}{like}{scope_kind}{scope}{starts_with}{limit}{from_}{privileges}"
 866
 867    def rowaccessproperty_sql(self, expression: exp.RowAccessProperty) -> str:
 868        if not expression.this:
 869            return "ROW ACCESS"
 870        on = f" ON ({self.expressions(expression, flat=True)})" if expression.expressions else ""
 871        return f"WITH ROW ACCESS POLICY {self.sql(expression, 'this')}{on}"
 872
 873    def describe_sql(self, expression: exp.Describe) -> str:
 874        kind_value = expression.args.get("kind") or "TABLE"
 875
 876        properties = expression.args.get("properties")
 877        if properties:
 878            qualifier = self.expressions(properties, sep=" ")
 879            kind = f" {qualifier} {kind_value}"
 880        else:
 881            kind = f" {kind_value}"
 882
 883        this = f" {self.sql(expression, 'this')}"
 884        expressions = self.expressions(expression, flat=True)
 885        expressions = f" {expressions}" if expressions else ""
 886        return f"DESCRIBE{kind}{this}{expressions}"
 887
 888    def generatedasidentitycolumnconstraint_sql(
 889        self, expression: exp.GeneratedAsIdentityColumnConstraint
 890    ) -> str:
 891        start = expression.args.get("start")
 892        start = f" START {start}" if start else ""
 893        increment = expression.args.get("increment")
 894        increment = f" INCREMENT {increment}" if increment else ""
 895
 896        order = expression.args.get("order")
 897        if order is not None:
 898            order_clause = " ORDER" if order else " NOORDER"
 899        else:
 900            order_clause = ""
 901
 902        return f"AUTOINCREMENT{start}{increment}{order_clause}"
 903
 904    def cluster_sql(self, expression: exp.Cluster) -> str:
 905        return f"CLUSTER BY ({self.expressions(expression, flat=True)})"
 906
 907    def struct_sql(self, expression: exp.Struct) -> str:
 908        if len(expression.expressions) == 1:
 909            arg = expression.expressions[0]
 910            if arg.is_star or (isinstance(arg, exp.ILike) and arg.left.is_star):
 911                # Wildcard syntax: https://docs.snowflake.com/en/sql-reference/data-types-semistructured#object
 912                return f"{{{self.sql(expression.expressions[0])}}}"
 913
 914        keys = []
 915        values = []
 916
 917        for i, e in enumerate(expression.expressions):
 918            if isinstance(e, exp.PropertyEQ):
 919                keys.append(
 920                    exp.Literal.string(e.name) if isinstance(e.this, exp.Identifier) else e.this
 921                )
 922                values.append(e.expression)
 923            else:
 924                keys.append(exp.Literal.string(f"_{i}"))
 925                values.append(e)
 926
 927        return self.func("OBJECT_CONSTRUCT", *flatten(zip(keys, values)))
 928
 929    @unsupported_args("weight", "accuracy")
 930    def approxquantile_sql(self, expression: exp.ApproxQuantile) -> str:
 931        return self.func("APPROX_PERCENTILE", expression.this, expression.args.get("quantile"))
 932
 933    def alterset_sql(self, expression: exp.AlterSet) -> str:
 934        exprs = self.expressions(expression, flat=True)
 935        exprs = f" {exprs}" if exprs else ""
 936        file_format = self.expressions(expression, key="file_format", flat=True, sep=" ")
 937        file_format = f" STAGE_FILE_FORMAT = ({file_format})" if file_format else ""
 938        copy_options = self.expressions(expression, key="copy_options", flat=True, sep=" ")
 939        copy_options = f" STAGE_COPY_OPTIONS = ({copy_options})" if copy_options else ""
 940        tag = self.expressions(expression, key="tag", flat=True)
 941        tag = f" TAG {tag}" if tag else ""
 942
 943        return f"SET{exprs}{file_format}{copy_options}{tag}"
 944
 945    def strtotime_sql(self, expression: exp.StrToTime):
 946        # target_type is stored as a DataType instance
 947        target_type = expression.args.get("target_type")
 948
 949        # Get the type enum from DataType instance or from type annotation
 950        if isinstance(target_type, exp.DataType):
 951            type_enum = target_type.this
 952        elif expression.type:
 953            type_enum = expression.type.this
 954        else:
 955            type_enum = exp.DType.TIMESTAMP
 956
 957        func_name = TIMESTAMP_TYPES.get(type_enum, "TO_TIMESTAMP")
 958
 959        return self.func(
 960            f"{'TRY_' if expression.args.get('safe') else ''}{func_name}",
 961            expression.this,
 962            self.format_time(expression),
 963        )
 964
 965    def timestampsub_sql(self, expression: exp.TimestampSub):
 966        return self.sql(
 967            exp.TimestampAdd(
 968                this=expression.this,
 969                expression=expression.expression * -1,
 970                unit=expression.unit,
 971            )
 972        )
 973
 974    def jsonextract_sql(self, expression: exp.JSONExtract):
 975        this = expression.this
 976
 977        # JSON strings are valid coming from other dialects such as BQ so
 978        # for these cases we PARSE_JSON preemptively
 979        if not isinstance(this, (exp.ParseJSON, exp.JSONExtract)) and not expression.args.get(
 980            "requires_json"
 981        ):
 982            this = exp.ParseJSON(this=this)
 983
 984        return self.func(
 985            "GET_PATH",
 986            this,
 987            expression.expression,
 988        )
 989
 990    def timetostr_sql(self, expression: exp.TimeToStr) -> str:
 991        this = expression.this
 992        if this.is_string:
 993            this = exp.cast(this, exp.DType.TIMESTAMP)
 994
 995        return self.func("TO_CHAR", this, self.format_time(expression))
 996
 997    def datesub_sql(self, expression: exp.DateSub) -> str:
 998        value = expression.expression
 999        if value:
1000            value.replace(value * (-1))
1001        else:
1002            self.unsupported("DateSub cannot be transpiled if the subtracted count is unknown")
1003
1004        return date_delta_sql("DATEADD")(self, expression)
1005
1006    def select_sql(self, expression: exp.Select) -> str:
1007        limit = expression.args.get("limit")
1008        offset = expression.args.get("offset")
1009        if offset and not limit:
1010            expression.limit(exp.Null(), copy=False)
1011        return super().select_sql(expression)
1012
1013    def createable_sql(self, expression: exp.Create, locations: defaultdict) -> str:
1014        is_materialized = expression.find(exp.MaterializedProperty)
1015        copy_grants_property = expression.find(exp.CopyGrantsProperty)
1016
1017        if expression.kind == "VIEW" and is_materialized and copy_grants_property:
1018            # For materialized views, COPY GRANTS is located *before* the columns list
1019            # This is in contrast to normal views where COPY GRANTS is located *after* the columns list
1020            # We default CopyGrantsProperty to POST_SCHEMA which means we need to output it POST_NAME if a materialized view is detected
1021            # ref: https://docs.snowflake.com/en/sql-reference/sql/create-materialized-view#syntax
1022            # ref: https://docs.snowflake.com/en/sql-reference/sql/create-view#syntax
1023            post_schema_properties = locations[exp.Properties.Location.POST_SCHEMA]
1024            post_schema_properties.pop(post_schema_properties.index(copy_grants_property))
1025
1026            this_name = self.sql(expression.this, "this")
1027            copy_grants = self.sql(copy_grants_property)
1028            this_schema = self.schema_columns_sql(expression.this)
1029            this_schema = f"{self.sep()}{this_schema}" if this_schema else ""
1030
1031            return f"{this_name}{self.sep()}{copy_grants}{this_schema}"
1032
1033        return super().createable_sql(expression, locations)
1034
1035    def arrayagg_sql(self, expression: exp.ArrayAgg) -> str:
1036        this = expression.this
1037
1038        # If an ORDER BY clause is present, we need to remove it from ARRAY_AGG
1039        # and add it later as part of the WITHIN GROUP clause
1040        order = this if isinstance(this, exp.Order) else None
1041        if order:
1042            expression.set("this", order.this.pop())
1043
1044        expr_sql = super().arrayagg_sql(expression)
1045
1046        if order:
1047            expr_sql = self.sql(exp.WithinGroup(this=expr_sql, expression=order))
1048
1049        return expr_sql
1050
1051    def arraytostring_sql(self, expression: exp.ArrayToString) -> str:
1052        return self.func("ARRAY_TO_STRING", expression.this, expression.expression)
1053
1054    def array_sql(self, expression: exp.Array) -> str:
1055        expressions = expression.expressions
1056
1057        first_expr = seq_get(expressions, 0)
1058        if isinstance(first_expr, exp.Select):
1059            # SELECT AS STRUCT foo AS alias_foo -> ARRAY_AGG(OBJECT_CONSTRUCT('alias_foo', foo))
1060            if first_expr.text("kind").upper() == "STRUCT":
1061                object_construct_args = []
1062                for expr in first_expr.expressions:
1063                    # Alias case: SELECT AS STRUCT foo AS alias_foo -> OBJECT_CONSTRUCT('alias_foo', foo)
1064                    # Column case: SELECT AS STRUCT foo -> OBJECT_CONSTRUCT('foo', foo)
1065                    name = expr.this if isinstance(expr, exp.Alias) else expr
1066
1067                    object_construct_args.extend([exp.Literal.string(expr.alias_or_name), name])
1068
1069                array_agg = exp.ArrayAgg(this=build_object_construct(args=object_construct_args))
1070
1071                first_expr.set("kind", None)
1072                first_expr.set("expressions", [array_agg])
1073
1074                return self.sql(first_expr.subquery())
1075
1076        return inline_array_sql(self, expression)
1077
1078    def currentdate_sql(self, expression: exp.CurrentDate) -> str:
1079        zone = self.sql(expression, "this")
1080        if not zone:
1081            return super().currentdate_sql(expression)
1082
1083        expr = exp.Cast(
1084            this=exp.ConvertTimezone(target_tz=zone, timestamp=exp.CurrentTimestamp()),
1085            to=exp.DataType(this=exp.DType.DATE),
1086        )
1087        return self.sql(expr)
1088
1089    def dot_sql(self, expression: exp.Dot) -> str:
1090        this = expression.this
1091
1092        if not this.type:
1093            from sqlglot.optimizer.annotate_types import annotate_types
1094
1095            this = annotate_types(this, dialect=self.dialect)
1096
1097        if not isinstance(this, exp.Dot) and this.is_type(exp.DType.STRUCT):
1098            # Generate colon notation for the top level STRUCT
1099            return f"{self.sql(this)}:{self.sql(expression, 'expression')}"
1100
1101        return super().dot_sql(expression)
1102
1103    def modelattribute_sql(self, expression: exp.ModelAttribute) -> str:
1104        return f"{self.sql(expression, 'this')}!{self.sql(expression, 'expression')}"
1105
1106    def format_sql(self, expression: exp.Format) -> str:
1107        if expression.name.lower() == "%s" and len(expression.expressions) == 1:
1108            return self.func("TO_CHAR", expression.expressions[0])
1109
1110        return self.function_fallback_sql(expression)
1111
1112    def splitpart_sql(self, expression: exp.SplitPart) -> str:
1113        # Set part_index to 1 if missing
1114        if not expression.args.get("delimiter"):
1115            expression.set("delimiter", exp.Literal.string(" "))
1116
1117        if not expression.args.get("part_index"):
1118            expression.set("part_index", exp.Literal.number(1))
1119
1120        return rename_func("SPLIT_PART")(self, expression)
1121
1122    def uniform_sql(self, expression: exp.Uniform) -> str:
1123        gen = expression.args.get("gen")
1124        seed = expression.args.get("seed")
1125
1126        # From Databricks UNIFORM(min, max, seed) -> Wrap gen in RANDOM(seed)
1127        if seed:
1128            gen = exp.Rand(this=seed)
1129
1130        # No gen argument (from Databricks 2-arg UNIFORM(min, max)) -> Add RANDOM()
1131        if not gen:
1132            gen = exp.Rand()
1133
1134        return self.func("UNIFORM", expression.this, expression.expression, gen)
1135
1136    def window_sql(self, expression: exp.Window) -> str:
1137        spec = expression.args.get("spec")
1138        this = expression.this
1139
1140        if (
1141            (
1142                isinstance(this, RANKING_WINDOW_FUNCTIONS_WITH_FRAME)
1143                or (
1144                    isinstance(this, (exp.RespectNulls, exp.IgnoreNulls))
1145                    and isinstance(this.this, RANKING_WINDOW_FUNCTIONS_WITH_FRAME)
1146                )
1147            )
1148            and spec
1149            and (
1150                spec.text("kind").upper() == "ROWS"
1151                and spec.text("start").upper() == "UNBOUNDED"
1152                and spec.text("start_side").upper() == "PRECEDING"
1153                and spec.text("end").upper() == "UNBOUNDED"
1154                and spec.text("end_side").upper() == "FOLLOWING"
1155            )
1156        ):
1157            # omit the default window from window ranking functions
1158            expression.set("spec", None)
1159        return super().window_sql(expression)
class SnowflakeGenerator(sqlglot.generator.Generator):
 376class SnowflakeGenerator(generator.Generator):
 377    SELECT_KINDS: tuple[str, ...] = ()
 378    PARAMETER_TOKEN = "$"
 379    MATCHED_BY_SOURCE = False
 380    SINGLE_STRING_INTERVAL = True
 381    JOIN_HINTS = False
 382    TABLE_HINTS = False
 383    QUERY_HINTS = False
 384    AGGREGATE_FILTER_SUPPORTED = False
 385    SUPPORTS_TABLE_COPY = False
 386    COLLATE_IS_FUNC = True
 387    LIMIT_ONLY_LITERALS = True
 388    JSON_KEY_VALUE_PAIR_SEP = ","
 389    INSERT_OVERWRITE = " OVERWRITE INTO"
 390    STRUCT_DELIMITER = ("(", ")")
 391    COPY_PARAMS_ARE_WRAPPED = False
 392    COPY_PARAMS_EQ_REQUIRED = True
 393    STAR_EXCEPT = "EXCLUDE"
 394    SUPPORTS_EXPLODING_PROJECTIONS = False
 395    ARRAY_CONCAT_IS_VAR_LEN = False
 396    SUPPORTS_CONVERT_TIMEZONE = True
 397    EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE = False
 398    SUPPORTS_MEDIAN = True
 399    ARRAY_SIZE_NAME = "ARRAY_SIZE"
 400    SUPPORTS_DECODE_CASE = True
 401
 402    AFTER_HAVING_MODIFIER_TRANSFORMS = generator.AFTER_HAVING_MODIFIER_TRANSFORMS
 403
 404    IS_BOOL_ALLOWED = False
 405    DIRECTED_JOINS = True
 406    SUPPORTS_UESCAPE = False
 407    TRY_SUPPORTED = False
 408
 409    TRANSFORMS = {
 410        **generator.Generator.TRANSFORMS,
 411        exp.ApproxDistinct: rename_func("APPROX_COUNT_DISTINCT"),
 412        exp.ArgMax: rename_func("MAX_BY"),
 413        exp.ArgMin: rename_func("MIN_BY"),
 414        exp.Array: transforms.preprocess([transforms.inherit_struct_field_names]),
 415        exp.ArrayConcat: array_concat_sql("ARRAY_CAT"),
 416        exp.ArrayAppend: array_append_sql("ARRAY_APPEND"),
 417        exp.ArrayPrepend: array_append_sql("ARRAY_PREPEND"),
 418        exp.ArrayContains: lambda self, e: self.func(
 419            "ARRAY_CONTAINS",
 420            e.expression
 421            if e.args.get("ensure_variant") is False
 422            else exp.cast(e.expression, exp.DType.VARIANT, copy=False),
 423            e.this,
 424        ),
 425        exp.ArrayPosition: lambda self, e: self.func(
 426            "ARRAY_POSITION",
 427            e.expression,
 428            e.this,
 429        ),
 430        exp.ArrayIntersect: rename_func("ARRAY_INTERSECTION"),
 431        exp.ArrayOverlaps: rename_func("ARRAYS_OVERLAP"),
 432        exp.AtTimeZone: lambda self, e: self.func("CONVERT_TIMEZONE", e.args.get("zone"), e.this),
 433        exp.BitwiseOr: rename_func("BITOR"),
 434        exp.BitwiseXor: rename_func("BITXOR"),
 435        exp.BitwiseAnd: rename_func("BITAND"),
 436        exp.BitwiseAndAgg: rename_func("BITANDAGG"),
 437        exp.BitwiseOrAgg: rename_func("BITORAGG"),
 438        exp.BitwiseXorAgg: rename_func("BITXORAGG"),
 439        exp.BitwiseNot: rename_func("BITNOT"),
 440        exp.BitwiseLeftShift: rename_func("BITSHIFTLEFT"),
 441        exp.BitwiseRightShift: rename_func("BITSHIFTRIGHT"),
 442        exp.Create: transforms.preprocess([_flatten_structured_types_unless_iceberg]),
 443        exp.CurrentTimestamp: lambda self, e: (
 444            self.func("SYSDATE") if e.args.get("sysdate") else self.function_fallback_sql(e)
 445        ),
 446        exp.CurrentSchemas: lambda self, e: self.func("CURRENT_SCHEMAS"),
 447        exp.Localtime: lambda self, e: (
 448            self.func("CURRENT_TIME", e.this) if e.this else "CURRENT_TIME"
 449        ),
 450        exp.Localtimestamp: lambda self, e: (
 451            self.func("CURRENT_TIMESTAMP", e.this) if e.this else "CURRENT_TIMESTAMP"
 452        ),
 453        exp.DateAdd: date_delta_sql("DATEADD"),
 454        exp.DateDiff: date_delta_sql("DATEDIFF"),
 455        exp.DatetimeAdd: date_delta_sql("TIMESTAMPADD"),
 456        exp.DatetimeDiff: timestampdiff_sql,
 457        exp.DateStrToDate: datestrtodate_sql,
 458        exp.Decrypt: lambda self, e: self.func(
 459            f"{'TRY_' if e.args.get('safe') else ''}DECRYPT",
 460            e.this,
 461            e.args.get("passphrase"),
 462            e.args.get("aad"),
 463            e.args.get("encryption_method"),
 464        ),
 465        exp.DecryptRaw: lambda self, e: self.func(
 466            f"{'TRY_' if e.args.get('safe') else ''}DECRYPT_RAW",
 467            e.this,
 468            e.args.get("key"),
 469            e.args.get("iv"),
 470            e.args.get("aad"),
 471            e.args.get("encryption_method"),
 472            e.args.get("aead"),
 473        ),
 474        exp.DayOfMonth: rename_func("DAYOFMONTH"),
 475        exp.DayOfWeek: rename_func("DAYOFWEEK"),
 476        exp.DayOfWeekIso: rename_func("DAYOFWEEKISO"),
 477        exp.DayOfYear: rename_func("DAYOFYEAR"),
 478        exp.DotProduct: rename_func("VECTOR_INNER_PRODUCT"),
 479        exp.Explode: rename_func("FLATTEN"),
 480        exp.Extract: lambda self, e: self.func(
 481            "DATE_PART", map_date_part(e.this, self.dialect), e.expression
 482        ),
 483        exp.CosineDistance: rename_func("VECTOR_COSINE_SIMILARITY"),
 484        exp.EuclideanDistance: rename_func("VECTOR_L2_DISTANCE"),
 485        exp.HandlerProperty: lambda self, e: f"HANDLER = {self.sql(e, 'this')}",
 486        exp.FileFormatProperty: lambda self, e: (
 487            f"FILE_FORMAT=({self.expressions(e, 'expressions', sep=' ')})"
 488        ),
 489        exp.FromTimeZone: lambda self, e: self.func(
 490            "CONVERT_TIMEZONE", e.args.get("zone"), "'UTC'", e.this
 491        ),
 492        exp.GenerateSeries: lambda self, e: self.func(
 493            "ARRAY_GENERATE_RANGE",
 494            e.args["start"],
 495            e.args["end"] if e.args.get("is_end_exclusive") else e.args["end"] + 1,
 496            e.args.get("step"),
 497        ),
 498        exp.GetExtract: rename_func("GET"),
 499        exp.GroupConcat: lambda self, e: groupconcat_sql(self, e, sep=""),
 500        exp.If: if_sql(name="IFF", false_value="NULL"),
 501        exp.JSONArray: lambda self, e: self.func(
 502            "TO_VARIANT", self.func("ARRAY_CONSTRUCT", *e.expressions)
 503        ),
 504        exp.JSONExtractArray: _json_extract_value_array_sql,
 505        exp.JSONExtractScalar: lambda self, e: self.func(
 506            "JSON_EXTRACT_PATH_TEXT", e.this, e.expression
 507        ),
 508        exp.JSONKeys: rename_func("OBJECT_KEYS"),
 509        exp.JSONObject: lambda self, e: self.func("OBJECT_CONSTRUCT_KEEP_NULL", *e.expressions),
 510        exp.JSONPathRoot: lambda *_: "",
 511        exp.JSONValueArray: _json_extract_value_array_sql,
 512        exp.Levenshtein: unsupported_args("ins_cost", "del_cost", "sub_cost")(
 513            rename_func("EDITDISTANCE")
 514        ),
 515        exp.LocationProperty: lambda self, e: f"LOCATION={self.sql(e, 'this')}",
 516        exp.LogicalAnd: rename_func("BOOLAND_AGG"),
 517        exp.LogicalOr: rename_func("BOOLOR_AGG"),
 518        exp.Map: lambda self, e: var_map_sql(self, e, "OBJECT_CONSTRUCT"),
 519        exp.ManhattanDistance: rename_func("VECTOR_L1_DISTANCE"),
 520        exp.MakeInterval: no_make_interval_sql,
 521        exp.Max: max_or_greatest,
 522        exp.Min: min_or_least,
 523        exp.ParseJSON: lambda self, e: self.func(
 524            f"{'TRY_' if e.args.get('safe') else ''}PARSE_JSON", e.this
 525        ),
 526        exp.ToBinary: lambda self, e: self.func(
 527            f"{'TRY_' if e.args.get('safe') else ''}TO_BINARY", e.this, e.args.get("format")
 528        ),
 529        exp.ToBoolean: lambda self, e: self.func(
 530            f"{'TRY_' if e.args.get('safe') else ''}TO_BOOLEAN", e.this
 531        ),
 532        exp.ToDouble: lambda self, e: self.func(
 533            f"{'TRY_' if e.args.get('safe') else ''}TO_DOUBLE", e.this, e.args.get("format")
 534        ),
 535        exp.ToFile: lambda self, e: self.func(
 536            f"{'TRY_' if e.args.get('safe') else ''}TO_FILE", e.this, e.args.get("path")
 537        ),
 538        exp.JSONFormat: rename_func("TO_JSON"),
 539        exp.PartitionedByProperty: lambda self, e: f"PARTITION BY {self.sql(e, 'this')}",
 540        exp.PercentileCont: transforms.preprocess([transforms.add_within_group_for_percentiles]),
 541        exp.PercentileDisc: transforms.preprocess([transforms.add_within_group_for_percentiles]),
 542        exp.Pivot: transforms.preprocess([_unqualify_pivot_columns]),
 543        exp.RegexpExtract: _regexpextract_sql,
 544        exp.RegexpExtractAll: _regexpextract_sql,
 545        exp.RegexpILike: _regexpilike_sql,
 546        exp.RowAccessProperty: lambda self, e: self.rowaccessproperty_sql(e),
 547        exp.Select: transforms.preprocess(
 548            [
 549                transforms.eliminate_window_clause,
 550                transforms.eliminate_distinct_on,
 551                transforms.explode_projection_to_unnest(),
 552                transforms.eliminate_semi_and_anti_joins,
 553                _transform_generate_date_array,
 554                _qualify_unnested_columns,
 555                _eliminate_dot_variant_lookup,
 556            ]
 557        ),
 558        exp.SHA: rename_func("SHA1"),
 559        exp.SHA1Digest: rename_func("SHA1_BINARY"),
 560        exp.MD5Digest: rename_func("MD5_BINARY"),
 561        exp.MD5NumberLower64: rename_func("MD5_NUMBER_LOWER64"),
 562        exp.MD5NumberUpper64: rename_func("MD5_NUMBER_UPPER64"),
 563        exp.LowerHex: rename_func("TO_CHAR"),
 564        exp.Skewness: rename_func("SKEW"),
 565        exp.StarMap: rename_func("OBJECT_CONSTRUCT"),
 566        exp.StartsWith: rename_func("STARTSWITH"),
 567        exp.EndsWith: rename_func("ENDSWITH"),
 568        exp.Rand: lambda self, e: self.func("RANDOM", e.this),
 569        exp.StrPosition: lambda self, e: strposition_sql(
 570            self, e, func_name="CHARINDEX", supports_position=True
 571        ),
 572        exp.StrToDate: lambda self, e: self.func("DATE", e.this, self.format_time(e)),
 573        exp.StringToArray: rename_func("STRTOK_TO_ARRAY"),
 574        exp.Stuff: rename_func("INSERT"),
 575        exp.StPoint: rename_func("ST_MAKEPOINT"),
 576        exp.TimeAdd: date_delta_sql("TIMEADD"),
 577        exp.TimeSlice: lambda self, e: self.func(
 578            "TIME_SLICE",
 579            e.this,
 580            e.expression,
 581            unit_to_str(e),
 582            e.args.get("kind"),
 583        ),
 584        exp.Timestamp: no_timestamp_sql,
 585        exp.TimestampAdd: date_delta_sql("TIMESTAMPADD"),
 586        exp.TimestampDiff: lambda self, e: self.func("TIMESTAMPDIFF", e.unit, e.expression, e.this),
 587        exp.TimestampTrunc: timestamptrunc_sql(),
 588        exp.TimeStrToTime: timestrtotime_sql,
 589        exp.TimeToUnix: lambda self, e: f"EXTRACT(epoch_second FROM {self.sql(e, 'this')})",
 590        exp.ToArray: rename_func("TO_ARRAY"),
 591        exp.ToChar: lambda self, e: self.function_fallback_sql(e),
 592        exp.TsOrDsAdd: date_delta_sql("DATEADD", cast=True),
 593        exp.TsOrDsDiff: date_delta_sql("DATEDIFF"),
 594        exp.TsOrDsToDate: lambda self, e: self.func(
 595            f"{'TRY_' if e.args.get('safe') else ''}TO_DATE", e.this, self.format_time(e)
 596        ),
 597        exp.TsOrDsToTime: lambda self, e: self.func(
 598            f"{'TRY_' if e.args.get('safe') else ''}TO_TIME", e.this, self.format_time(e)
 599        ),
 600        exp.Unhex: rename_func("HEX_DECODE_BINARY"),
 601        exp.UnixToTime: lambda self, e: self.func("TO_TIMESTAMP", e.this, e.args.get("scale")),
 602        exp.Uuid: rename_func("UUID_STRING"),
 603        exp.VarMap: lambda self, e: var_map_sql(self, e, "OBJECT_CONSTRUCT"),
 604        exp.Booland: rename_func("BOOLAND"),
 605        exp.Boolor: rename_func("BOOLOR"),
 606        exp.WeekOfYear: rename_func("WEEKISO"),
 607        exp.YearOfWeek: rename_func("YEAROFWEEK"),
 608        exp.YearOfWeekIso: rename_func("YEAROFWEEKISO"),
 609        exp.Xor: rename_func("BOOLXOR"),
 610        exp.ByteLength: rename_func("OCTET_LENGTH"),
 611        exp.Flatten: rename_func("ARRAY_FLATTEN"),
 612        exp.ArrayConcatAgg: lambda self, e: self.func("ARRAY_FLATTEN", exp.ArrayAgg(this=e.this)),
 613        exp.SHA2Digest: lambda self, e: self.func(
 614            "SHA2_BINARY", e.this, e.args.get("length") or exp.Literal.number(256)
 615        ),
 616    }
 617
 618    def sortarray_sql(self, expression: exp.SortArray) -> str:
 619        asc = expression.args.get("asc")
 620        nulls_first = expression.args.get("nulls_first")
 621        if asc == exp.false() and nulls_first == exp.true():
 622            nulls_first = None
 623        return self.func("ARRAY_SORT", expression.this, asc, nulls_first)
 624
 625    def nthvalue_sql(self, expression: exp.NthValue) -> str:
 626        result = self.func("NTH_VALUE", expression.this, expression.args.get("offset"))
 627
 628        from_first = expression.args.get("from_first")
 629
 630        if from_first is not None:
 631            if from_first:
 632                result = result + " FROM FIRST"
 633            else:
 634                result = result + " FROM LAST"
 635
 636        return result
 637
 638    SUPPORTED_JSON_PATH_PARTS = {
 639        exp.JSONPathKey,
 640        exp.JSONPathRoot,
 641        exp.JSONPathSubscript,
 642    }
 643
 644    TYPE_MAPPING = {
 645        **generator.Generator.TYPE_MAPPING,
 646        exp.DType.BIGDECIMAL: "DOUBLE",
 647        exp.DType.JSON: "VARIANT",
 648        exp.DType.NESTED: "OBJECT",
 649        exp.DType.STRUCT: "OBJECT",
 650        exp.DType.TEXT: "VARCHAR",
 651    }
 652
 653    TOKEN_MAPPING = {
 654        TokenType.AUTO_INCREMENT: "AUTOINCREMENT",
 655    }
 656
 657    PROPERTIES_LOCATION = {
 658        **generator.Generator.PROPERTIES_LOCATION,
 659        exp.CredentialsProperty: exp.Properties.Location.POST_WITH,
 660        exp.LocationProperty: exp.Properties.Location.POST_WITH,
 661        exp.PartitionedByProperty: exp.Properties.Location.POST_SCHEMA,
 662        exp.RowAccessProperty: exp.Properties.Location.POST_SCHEMA,
 663        exp.SetProperty: exp.Properties.Location.UNSUPPORTED,
 664        exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED,
 665    }
 666
 667    UNSUPPORTED_VALUES_EXPRESSIONS = {
 668        exp.Map,
 669        exp.StarMap,
 670        exp.Struct,
 671        exp.VarMap,
 672    }
 673
 674    RESPECT_IGNORE_NULLS_UNSUPPORTED_EXPRESSIONS = (exp.ArrayAgg,)
 675
 676    def with_properties(self, properties: exp.Properties) -> str:
 677        return self.properties(properties, wrapped=False, prefix=self.sep(""), sep=" ")
 678
 679    def values_sql(self, expression: exp.Values, values_as_table: bool = True) -> str:
 680        if expression.find(*self.UNSUPPORTED_VALUES_EXPRESSIONS):
 681            values_as_table = False
 682
 683        return super().values_sql(expression, values_as_table=values_as_table)
 684
 685    def datatype_sql(self, expression: exp.DataType) -> str:
 686        # Check if this is a FLOAT type nested inside a VECTOR type
 687        # VECTOR only accepts FLOAT (not DOUBLE), INT, and STRING as element types
 688        # https://docs.snowflake.com/en/sql-reference/data-types-vector
 689        if expression.is_type(exp.DType.DOUBLE):
 690            parent = expression.parent
 691            if isinstance(parent, exp.DataType) and parent.is_type(exp.DType.VECTOR):
 692                # Preserve FLOAT for VECTOR types instead of mapping to synonym DOUBLE
 693                return "FLOAT"
 694
 695        expressions = expression.expressions
 696        if expressions and expression.is_type(*exp.DataType.STRUCT_TYPES):
 697            for field_type in expressions:
 698                # The correct syntax is OBJECT [ (<key> <value_type [NOT NULL] [, ...]) ]
 699                if isinstance(field_type, exp.DataType):
 700                    return "OBJECT"
 701                if (
 702                    isinstance(field_type, exp.ColumnDef)
 703                    and field_type.this
 704                    and field_type.this.is_string
 705                ):
 706                    # Doing OBJECT('foo' VARCHAR) is invalid snowflake Syntax. Moreover, besides
 707                    # converting 'foo' into an identifier, we also need to quote it because these
 708                    # keys are case-sensitive. For example:
 709                    #
 710                    # WITH t AS (SELECT OBJECT_CONSTRUCT('x', 'y') AS c) SELECT c:x FROM t -- correct
 711                    # WITH t AS (SELECT OBJECT_CONSTRUCT('x', 'y') AS c) SELECT c:X FROM t -- incorrect, returns NULL
 712                    field_type.this.replace(exp.to_identifier(field_type.name, quoted=True))
 713
 714        return super().datatype_sql(expression)
 715
 716    def tonumber_sql(self, expression: exp.ToNumber) -> str:
 717        precision = expression.args.get("precision")
 718        scale = expression.args.get("scale")
 719
 720        default_precision = isinstance(precision, exp.Literal) and precision.name == "38"
 721        default_scale = isinstance(scale, exp.Literal) and scale.name == "0"
 722
 723        if default_precision and default_scale:
 724            precision = None
 725            scale = None
 726        elif default_scale:
 727            scale = None
 728
 729        func_name = "TRY_TO_NUMBER" if expression.args.get("safe") else "TO_NUMBER"
 730
 731        return self.func(
 732            func_name,
 733            expression.this,
 734            expression.args.get("format"),
 735            precision,
 736            scale,
 737        )
 738
 739    def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str:
 740        milli = expression.args.get("milli")
 741        if milli is not None:
 742            milli_to_nano = milli.pop() * exp.Literal.number(1000000)
 743            expression.set("nano", milli_to_nano)
 744
 745        return rename_func("TIMESTAMP_FROM_PARTS")(self, expression)
 746
 747    def cast_sql(self, expression: exp.Cast, safe_prefix: str | None = None) -> str:
 748        if expression.is_type(exp.DType.GEOGRAPHY):
 749            return self.func("TO_GEOGRAPHY", expression.this)
 750        if expression.is_type(exp.DType.GEOMETRY):
 751            return self.func("TO_GEOMETRY", expression.this)
 752
 753        return super().cast_sql(expression, safe_prefix=safe_prefix)
 754
 755    def trycast_sql(self, expression: exp.TryCast) -> str:
 756        value = expression.this
 757
 758        if value.type is None:
 759            from sqlglot.optimizer.annotate_types import annotate_types
 760
 761            value = annotate_types(value, dialect=self.dialect)
 762
 763        # Snowflake requires that TRY_CAST's value be a string
 764        # If TRY_CAST is being roundtripped (since Snowflake is the only dialect that sets "requires_string") or
 765        # if we can deduce that the value is a string, then we can generate TRY_CAST
 766        if expression.args.get("requires_string") or value.is_type(*exp.DataType.TEXT_TYPES):
 767            return super().trycast_sql(expression)
 768
 769        return self.cast_sql(expression)
 770
 771    def log_sql(self, expression: exp.Log) -> str:
 772        if not expression.expression:
 773            return self.func("LN", expression.this)
 774
 775        return super().log_sql(expression)
 776
 777    def greatest_sql(self, expression: exp.Greatest) -> str:
 778        name = "GREATEST_IGNORE_NULLS" if expression.args.get("ignore_nulls") else "GREATEST"
 779        return self.func(name, expression.this, *expression.expressions)
 780
 781    def least_sql(self, expression: exp.Least) -> str:
 782        name = "LEAST_IGNORE_NULLS" if expression.args.get("ignore_nulls") else "LEAST"
 783        return self.func(name, expression.this, *expression.expressions)
 784
 785    def generator_sql(self, expression: exp.Generator) -> str:
 786        args = []
 787        rowcount = expression.args.get("rowcount")
 788        timelimit = expression.args.get("timelimit")
 789
 790        if rowcount:
 791            args.append(exp.Kwarg(this=exp.var("ROWCOUNT"), expression=rowcount))
 792        if timelimit:
 793            args.append(exp.Kwarg(this=exp.var("TIMELIMIT"), expression=timelimit))
 794
 795        return self.func("GENERATOR", *args)
 796
 797    def unnest_sql(self, expression: exp.Unnest) -> str:
 798        unnest_alias = expression.args.get("alias")
 799        offset = expression.args.get("offset")
 800
 801        unnest_alias_columns = unnest_alias.columns if unnest_alias else []
 802        value = seq_get(unnest_alias_columns, 0) or exp.to_identifier("value")
 803
 804        columns = [
 805            exp.to_identifier("seq"),
 806            exp.to_identifier("key"),
 807            exp.to_identifier("path"),
 808            offset.pop() if isinstance(offset, exp.Expr) else exp.to_identifier("index"),
 809            value,
 810            exp.to_identifier("this"),
 811        ]
 812
 813        if unnest_alias:
 814            unnest_alias.set("columns", columns)
 815        else:
 816            unnest_alias = exp.TableAlias(this="_u", columns=columns)
 817
 818        table_input = self.sql(expression.expressions[0])
 819        if not table_input.startswith("INPUT =>"):
 820            table_input = f"INPUT => {table_input}"
 821
 822        expression_parent = expression.parent
 823
 824        explode = (
 825            f"FLATTEN({table_input})"
 826            if isinstance(expression_parent, exp.Lateral)
 827            else f"TABLE(FLATTEN({table_input}))"
 828        )
 829        alias = self.sql(unnest_alias)
 830        alias = f" AS {alias}" if alias else ""
 831        value = (
 832            ""
 833            if isinstance(expression_parent, (exp.From, exp.Join, exp.Lateral))
 834            else f"{value} FROM "
 835        )
 836
 837        return f"{value}{explode}{alias}"
 838
 839    def show_sql(self, expression: exp.Show) -> str:
 840        terse = "TERSE " if expression.args.get("terse") else ""
 841        iceberg = "ICEBERG " if expression.args.get("iceberg") else ""
 842        history = " HISTORY" if expression.args.get("history") else ""
 843        like = self.sql(expression, "like")
 844        like = f" LIKE {like}" if like else ""
 845
 846        scope = self.sql(expression, "scope")
 847        scope = f" {scope}" if scope else ""
 848
 849        scope_kind = self.sql(expression, "scope_kind")
 850        if scope_kind:
 851            scope_kind = f" IN {scope_kind}"
 852
 853        starts_with = self.sql(expression, "starts_with")
 854        if starts_with:
 855            starts_with = f" STARTS WITH {starts_with}"
 856
 857        limit = self.sql(expression, "limit")
 858
 859        from_ = self.sql(expression, "from_")
 860        if from_:
 861            from_ = f" FROM {from_}"
 862
 863        privileges = self.expressions(expression, key="privileges", flat=True)
 864        privileges = f" WITH PRIVILEGES {privileges}" if privileges else ""
 865
 866        return f"SHOW {terse}{iceberg}{expression.name}{history}{like}{scope_kind}{scope}{starts_with}{limit}{from_}{privileges}"
 867
 868    def rowaccessproperty_sql(self, expression: exp.RowAccessProperty) -> str:
 869        if not expression.this:
 870            return "ROW ACCESS"
 871        on = f" ON ({self.expressions(expression, flat=True)})" if expression.expressions else ""
 872        return f"WITH ROW ACCESS POLICY {self.sql(expression, 'this')}{on}"
 873
 874    def describe_sql(self, expression: exp.Describe) -> str:
 875        kind_value = expression.args.get("kind") or "TABLE"
 876
 877        properties = expression.args.get("properties")
 878        if properties:
 879            qualifier = self.expressions(properties, sep=" ")
 880            kind = f" {qualifier} {kind_value}"
 881        else:
 882            kind = f" {kind_value}"
 883
 884        this = f" {self.sql(expression, 'this')}"
 885        expressions = self.expressions(expression, flat=True)
 886        expressions = f" {expressions}" if expressions else ""
 887        return f"DESCRIBE{kind}{this}{expressions}"
 888
 889    def generatedasidentitycolumnconstraint_sql(
 890        self, expression: exp.GeneratedAsIdentityColumnConstraint
 891    ) -> str:
 892        start = expression.args.get("start")
 893        start = f" START {start}" if start else ""
 894        increment = expression.args.get("increment")
 895        increment = f" INCREMENT {increment}" if increment else ""
 896
 897        order = expression.args.get("order")
 898        if order is not None:
 899            order_clause = " ORDER" if order else " NOORDER"
 900        else:
 901            order_clause = ""
 902
 903        return f"AUTOINCREMENT{start}{increment}{order_clause}"
 904
 905    def cluster_sql(self, expression: exp.Cluster) -> str:
 906        return f"CLUSTER BY ({self.expressions(expression, flat=True)})"
 907
 908    def struct_sql(self, expression: exp.Struct) -> str:
 909        if len(expression.expressions) == 1:
 910            arg = expression.expressions[0]
 911            if arg.is_star or (isinstance(arg, exp.ILike) and arg.left.is_star):
 912                # Wildcard syntax: https://docs.snowflake.com/en/sql-reference/data-types-semistructured#object
 913                return f"{{{self.sql(expression.expressions[0])}}}"
 914
 915        keys = []
 916        values = []
 917
 918        for i, e in enumerate(expression.expressions):
 919            if isinstance(e, exp.PropertyEQ):
 920                keys.append(
 921                    exp.Literal.string(e.name) if isinstance(e.this, exp.Identifier) else e.this
 922                )
 923                values.append(e.expression)
 924            else:
 925                keys.append(exp.Literal.string(f"_{i}"))
 926                values.append(e)
 927
 928        return self.func("OBJECT_CONSTRUCT", *flatten(zip(keys, values)))
 929
 930    @unsupported_args("weight", "accuracy")
 931    def approxquantile_sql(self, expression: exp.ApproxQuantile) -> str:
 932        return self.func("APPROX_PERCENTILE", expression.this, expression.args.get("quantile"))
 933
 934    def alterset_sql(self, expression: exp.AlterSet) -> str:
 935        exprs = self.expressions(expression, flat=True)
 936        exprs = f" {exprs}" if exprs else ""
 937        file_format = self.expressions(expression, key="file_format", flat=True, sep=" ")
 938        file_format = f" STAGE_FILE_FORMAT = ({file_format})" if file_format else ""
 939        copy_options = self.expressions(expression, key="copy_options", flat=True, sep=" ")
 940        copy_options = f" STAGE_COPY_OPTIONS = ({copy_options})" if copy_options else ""
 941        tag = self.expressions(expression, key="tag", flat=True)
 942        tag = f" TAG {tag}" if tag else ""
 943
 944        return f"SET{exprs}{file_format}{copy_options}{tag}"
 945
 946    def strtotime_sql(self, expression: exp.StrToTime):
 947        # target_type is stored as a DataType instance
 948        target_type = expression.args.get("target_type")
 949
 950        # Get the type enum from DataType instance or from type annotation
 951        if isinstance(target_type, exp.DataType):
 952            type_enum = target_type.this
 953        elif expression.type:
 954            type_enum = expression.type.this
 955        else:
 956            type_enum = exp.DType.TIMESTAMP
 957
 958        func_name = TIMESTAMP_TYPES.get(type_enum, "TO_TIMESTAMP")
 959
 960        return self.func(
 961            f"{'TRY_' if expression.args.get('safe') else ''}{func_name}",
 962            expression.this,
 963            self.format_time(expression),
 964        )
 965
 966    def timestampsub_sql(self, expression: exp.TimestampSub):
 967        return self.sql(
 968            exp.TimestampAdd(
 969                this=expression.this,
 970                expression=expression.expression * -1,
 971                unit=expression.unit,
 972            )
 973        )
 974
 975    def jsonextract_sql(self, expression: exp.JSONExtract):
 976        this = expression.this
 977
 978        # JSON strings are valid coming from other dialects such as BQ so
 979        # for these cases we PARSE_JSON preemptively
 980        if not isinstance(this, (exp.ParseJSON, exp.JSONExtract)) and not expression.args.get(
 981            "requires_json"
 982        ):
 983            this = exp.ParseJSON(this=this)
 984
 985        return self.func(
 986            "GET_PATH",
 987            this,
 988            expression.expression,
 989        )
 990
 991    def timetostr_sql(self, expression: exp.TimeToStr) -> str:
 992        this = expression.this
 993        if this.is_string:
 994            this = exp.cast(this, exp.DType.TIMESTAMP)
 995
 996        return self.func("TO_CHAR", this, self.format_time(expression))
 997
 998    def datesub_sql(self, expression: exp.DateSub) -> str:
 999        value = expression.expression
1000        if value:
1001            value.replace(value * (-1))
1002        else:
1003            self.unsupported("DateSub cannot be transpiled if the subtracted count is unknown")
1004
1005        return date_delta_sql("DATEADD")(self, expression)
1006
1007    def select_sql(self, expression: exp.Select) -> str:
1008        limit = expression.args.get("limit")
1009        offset = expression.args.get("offset")
1010        if offset and not limit:
1011            expression.limit(exp.Null(), copy=False)
1012        return super().select_sql(expression)
1013
1014    def createable_sql(self, expression: exp.Create, locations: defaultdict) -> str:
1015        is_materialized = expression.find(exp.MaterializedProperty)
1016        copy_grants_property = expression.find(exp.CopyGrantsProperty)
1017
1018        if expression.kind == "VIEW" and is_materialized and copy_grants_property:
1019            # For materialized views, COPY GRANTS is located *before* the columns list
1020            # This is in contrast to normal views where COPY GRANTS is located *after* the columns list
1021            # We default CopyGrantsProperty to POST_SCHEMA which means we need to output it POST_NAME if a materialized view is detected
1022            # ref: https://docs.snowflake.com/en/sql-reference/sql/create-materialized-view#syntax
1023            # ref: https://docs.snowflake.com/en/sql-reference/sql/create-view#syntax
1024            post_schema_properties = locations[exp.Properties.Location.POST_SCHEMA]
1025            post_schema_properties.pop(post_schema_properties.index(copy_grants_property))
1026
1027            this_name = self.sql(expression.this, "this")
1028            copy_grants = self.sql(copy_grants_property)
1029            this_schema = self.schema_columns_sql(expression.this)
1030            this_schema = f"{self.sep()}{this_schema}" if this_schema else ""
1031
1032            return f"{this_name}{self.sep()}{copy_grants}{this_schema}"
1033
1034        return super().createable_sql(expression, locations)
1035
1036    def arrayagg_sql(self, expression: exp.ArrayAgg) -> str:
1037        this = expression.this
1038
1039        # If an ORDER BY clause is present, we need to remove it from ARRAY_AGG
1040        # and add it later as part of the WITHIN GROUP clause
1041        order = this if isinstance(this, exp.Order) else None
1042        if order:
1043            expression.set("this", order.this.pop())
1044
1045        expr_sql = super().arrayagg_sql(expression)
1046
1047        if order:
1048            expr_sql = self.sql(exp.WithinGroup(this=expr_sql, expression=order))
1049
1050        return expr_sql
1051
1052    def arraytostring_sql(self, expression: exp.ArrayToString) -> str:
1053        return self.func("ARRAY_TO_STRING", expression.this, expression.expression)
1054
1055    def array_sql(self, expression: exp.Array) -> str:
1056        expressions = expression.expressions
1057
1058        first_expr = seq_get(expressions, 0)
1059        if isinstance(first_expr, exp.Select):
1060            # SELECT AS STRUCT foo AS alias_foo -> ARRAY_AGG(OBJECT_CONSTRUCT('alias_foo', foo))
1061            if first_expr.text("kind").upper() == "STRUCT":
1062                object_construct_args = []
1063                for expr in first_expr.expressions:
1064                    # Alias case: SELECT AS STRUCT foo AS alias_foo -> OBJECT_CONSTRUCT('alias_foo', foo)
1065                    # Column case: SELECT AS STRUCT foo -> OBJECT_CONSTRUCT('foo', foo)
1066                    name = expr.this if isinstance(expr, exp.Alias) else expr
1067
1068                    object_construct_args.extend([exp.Literal.string(expr.alias_or_name), name])
1069
1070                array_agg = exp.ArrayAgg(this=build_object_construct(args=object_construct_args))
1071
1072                first_expr.set("kind", None)
1073                first_expr.set("expressions", [array_agg])
1074
1075                return self.sql(first_expr.subquery())
1076
1077        return inline_array_sql(self, expression)
1078
1079    def currentdate_sql(self, expression: exp.CurrentDate) -> str:
1080        zone = self.sql(expression, "this")
1081        if not zone:
1082            return super().currentdate_sql(expression)
1083
1084        expr = exp.Cast(
1085            this=exp.ConvertTimezone(target_tz=zone, timestamp=exp.CurrentTimestamp()),
1086            to=exp.DataType(this=exp.DType.DATE),
1087        )
1088        return self.sql(expr)
1089
1090    def dot_sql(self, expression: exp.Dot) -> str:
1091        this = expression.this
1092
1093        if not this.type:
1094            from sqlglot.optimizer.annotate_types import annotate_types
1095
1096            this = annotate_types(this, dialect=self.dialect)
1097
1098        if not isinstance(this, exp.Dot) and this.is_type(exp.DType.STRUCT):
1099            # Generate colon notation for the top level STRUCT
1100            return f"{self.sql(this)}:{self.sql(expression, 'expression')}"
1101
1102        return super().dot_sql(expression)
1103
1104    def modelattribute_sql(self, expression: exp.ModelAttribute) -> str:
1105        return f"{self.sql(expression, 'this')}!{self.sql(expression, 'expression')}"
1106
1107    def format_sql(self, expression: exp.Format) -> str:
1108        if expression.name.lower() == "%s" and len(expression.expressions) == 1:
1109            return self.func("TO_CHAR", expression.expressions[0])
1110
1111        return self.function_fallback_sql(expression)
1112
1113    def splitpart_sql(self, expression: exp.SplitPart) -> str:
1114        # Set part_index to 1 if missing
1115        if not expression.args.get("delimiter"):
1116            expression.set("delimiter", exp.Literal.string(" "))
1117
1118        if not expression.args.get("part_index"):
1119            expression.set("part_index", exp.Literal.number(1))
1120
1121        return rename_func("SPLIT_PART")(self, expression)
1122
1123    def uniform_sql(self, expression: exp.Uniform) -> str:
1124        gen = expression.args.get("gen")
1125        seed = expression.args.get("seed")
1126
1127        # From Databricks UNIFORM(min, max, seed) -> Wrap gen in RANDOM(seed)
1128        if seed:
1129            gen = exp.Rand(this=seed)
1130
1131        # No gen argument (from Databricks 2-arg UNIFORM(min, max)) -> Add RANDOM()
1132        if not gen:
1133            gen = exp.Rand()
1134
1135        return self.func("UNIFORM", expression.this, expression.expression, gen)
1136
1137    def window_sql(self, expression: exp.Window) -> str:
1138        spec = expression.args.get("spec")
1139        this = expression.this
1140
1141        if (
1142            (
1143                isinstance(this, RANKING_WINDOW_FUNCTIONS_WITH_FRAME)
1144                or (
1145                    isinstance(this, (exp.RespectNulls, exp.IgnoreNulls))
1146                    and isinstance(this.this, RANKING_WINDOW_FUNCTIONS_WITH_FRAME)
1147                )
1148            )
1149            and spec
1150            and (
1151                spec.text("kind").upper() == "ROWS"
1152                and spec.text("start").upper() == "UNBOUNDED"
1153                and spec.text("start_side").upper() == "PRECEDING"
1154                and spec.text("end").upper() == "UNBOUNDED"
1155                and spec.text("end_side").upper() == "FOLLOWING"
1156            )
1157        ):
1158            # omit the default window from window ranking functions
1159            expression.set("spec", None)
1160        return super().window_sql(expression)

Generator converts a given syntax tree to the corresponding SQL string.

Arguments:
  • pretty: Whether to format the produced SQL string. Default: False.
  • identify: Determines when an identifier should be quoted. Possible values are: False (default): Never quote, except in cases where it's mandatory by the dialect. True: Always quote except for specials cases. 'safe': Only quote identifiers that are case insensitive.
  • normalize: Whether to normalize identifiers to lowercase. Default: False.
  • pad: The pad size in a formatted string. For example, this affects the indentation of a projection in a query, relative to its nesting level. Default: 2.
  • indent: The indentation size in a formatted string. For example, this affects the indentation of subqueries and filters under a WHERE clause. Default: 2.
  • normalize_functions: How to normalize function names. Possible values are: "upper" or True (default): Convert names to uppercase. "lower": Convert names to lowercase. False: Disables function name normalization.
  • unsupported_level: Determines the generator's behavior when it encounters unsupported expressions. Default ErrorLevel.WARN.
  • max_unsupported: Maximum number of unsupported messages to include in a raised UnsupportedError. This is only relevant if unsupported_level is ErrorLevel.RAISE. Default: 3
  • leading_comma: Whether the comma is leading or trailing in select expressions. This is only relevant when generating in pretty mode. Default: False
  • max_text_width: The max number of characters in a segment before creating new lines in pretty mode. The default is on the smaller end because the length only represents a segment and not the true line length. Default: 80
  • comments: Whether to preserve comments in the output SQL code. Default: True
SELECT_KINDS: tuple[str, ...] = ()
PARAMETER_TOKEN = '$'
MATCHED_BY_SOURCE = False
SINGLE_STRING_INTERVAL = True
JOIN_HINTS = False
TABLE_HINTS = False
QUERY_HINTS = False
AGGREGATE_FILTER_SUPPORTED = False
SUPPORTS_TABLE_COPY = False
COLLATE_IS_FUNC = True
LIMIT_ONLY_LITERALS = True
JSON_KEY_VALUE_PAIR_SEP = ','
INSERT_OVERWRITE = ' OVERWRITE INTO'
STRUCT_DELIMITER = ('(', ')')
COPY_PARAMS_ARE_WRAPPED = False
COPY_PARAMS_EQ_REQUIRED = True
STAR_EXCEPT = 'EXCLUDE'
SUPPORTS_EXPLODING_PROJECTIONS = False
ARRAY_CONCAT_IS_VAR_LEN = False
SUPPORTS_CONVERT_TIMEZONE = True
EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE = False
SUPPORTS_MEDIAN = True
ARRAY_SIZE_NAME = 'ARRAY_SIZE'
SUPPORTS_DECODE_CASE = True
AFTER_HAVING_MODIFIER_TRANSFORMS = {'windows': <function <lambda>>, 'qualify': <function <lambda>>}
IS_BOOL_ALLOWED = False
DIRECTED_JOINS = True
SUPPORTS_UESCAPE = False
TRY_SUPPORTED = False
TRANSFORMS = {<class 'sqlglot.expressions.query.JSONPathKey'>: <function <lambda>>, <class 'sqlglot.expressions.query.JSONPathRoot'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.query.JSONPathSubscript'>: <function <lambda>>, <class 'sqlglot.expressions.core.Adjacent'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.AllowedValuesProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.query.AnalyzeColumns'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.query.AnalyzeWith'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.array.ArrayContainsAll'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.array.ArrayOverlaps'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.constraints.AssumeColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.AutoRefreshProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.BackupProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.constraints.CaseSpecificColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.math.Ceil'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.constraints.CharacterSetColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.CharacterSetProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.constraints.ClusteredColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.constraints.CollateColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.constraints.CommentColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.functions.ConnectByRoot'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.string.ConvertToCharset'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.CopyGrantsProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.CredentialsProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.functions.CurrentCatalog'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.functions.SessionUser'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.constraints.DateFormatColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.constraints.DefaultColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.ApiProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.ApplicationProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.CatalogProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.ComputeProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.DatabaseProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.DynamicProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.EmptyProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.constraints.EncodeColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.query.EndStatement'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.EnviromentProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.HandlerProperty'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.properties.ParameterStyleProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.constraints.EphemeralColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.constraints.ExcludeColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.ExecuteAsProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.query.Except'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.ExternalProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.math.Floor'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.query.Get'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.GlobalProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.HeapProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.HybridProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.IcebergProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.InheritsProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.constraints.InlineLengthColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.InputModelProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.query.Intersect'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.datatypes.IntervalSpan'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.functions.Int64'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.json.JSONBContainsAnyTopKeys'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.json.JSONBContainsAllTopKeys'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.json.JSONBDeleteAtPath'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.json.JSONObject'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.json.JSONObjectAgg'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.LanguageProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.LocationProperty'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.properties.LogProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.MaskingProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.MaterializedProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.functions.NetFunc'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.NetworkProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.constraints.NonClusteredColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.NoPrimaryIndexProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.constraints.NotForReplicationColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.OnCommitProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.OnProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.constraints.OnUpdateColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.core.Operator'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.OutputModelProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.core.ExtendsLeft'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.core.ExtendsRight'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.constraints.PathColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.PartitionedByBucket'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.PartitionByTruncate'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.core.PivotAny'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.array.PositionalColumn'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.constraints.ProjectionPolicyColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.constraints.InvisibleColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.constraints.ZeroFillColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.query.Put'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.RemoteWithConnectionModelProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.ReturnsProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.RowAccessProperty'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.core.SafeFunc'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.SampleProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.SecureProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.SecurityIntegrationProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.SetConfigProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.SetProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.SettingsProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.SharingProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.SqlReadWriteProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.SqlSecurityProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.StabilityProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.query.Stream'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.StreamingTableProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.StrictProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ddl.SwapTable'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.query.TableColumn'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.Tags'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.TemporaryProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.constraints.TitleColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.array.ToMap'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.ToTableProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.TransformModelProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.TransientProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.VirtualProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ddl.TriggerExecute'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.query.Union'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.UnloggedProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.UsingTemplateProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.query.UsingData'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.constraints.UppercaseColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.temporal.UtcDate'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.temporal.UtcTime'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.temporal.UtcTimestamp'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.query.Variadic'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.array.VarMap'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.properties.ViewAttributeProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.VolatileProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.WithJournalTableProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.WithProcedureOptions'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.WithSchemaBindingProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.constraints.WithOperator'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.ForceProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.core.ApproxDistinct'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.aggregate.ArgMax'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.aggregate.ArgMin'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.array.Array'>: <function preprocess.<locals>._to_sql>, <class 'sqlglot.expressions.array.ArrayConcat'>: <function array_concat_sql.<locals>._array_concat_sql>, <class 'sqlglot.expressions.array.ArrayAppend'>: <function array_append_sql.<locals>._array_append_sql>, <class 'sqlglot.expressions.array.ArrayPrepend'>: <function array_append_sql.<locals>._array_append_sql>, <class 'sqlglot.expressions.array.ArrayContains'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.array.ArrayPosition'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.array.ArrayIntersect'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.core.AtTimeZone'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.core.BitwiseOr'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.core.BitwiseXor'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.core.BitwiseAnd'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.math.BitwiseAndAgg'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.math.BitwiseOrAgg'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.math.BitwiseXorAgg'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.core.BitwiseNot'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.core.BitwiseLeftShift'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.core.BitwiseRightShift'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.ddl.Create'>: <function preprocess.<locals>._to_sql>, <class 'sqlglot.expressions.temporal.CurrentTimestamp'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.functions.CurrentSchemas'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.temporal.Localtime'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.temporal.Localtimestamp'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.temporal.DateAdd'>: <function date_delta_sql.<locals>._delta_sql>, <class 'sqlglot.expressions.temporal.DateDiff'>: <function date_delta_sql.<locals>._delta_sql>, <class 'sqlglot.expressions.temporal.DatetimeAdd'>: <function date_delta_sql.<locals>._delta_sql>, <class 'sqlglot.expressions.temporal.DatetimeDiff'>: <function timestampdiff_sql>, <class 'sqlglot.expressions.temporal.DateStrToDate'>: <function datestrtodate_sql>, <class 'sqlglot.expressions.string.Decrypt'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.string.DecryptRaw'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.temporal.DayOfMonth'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.temporal.DayOfWeek'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.temporal.DayOfWeekIso'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.temporal.DayOfYear'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.math.DotProduct'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.array.Explode'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.temporal.Extract'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.math.CosineDistance'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.math.EuclideanDistance'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.properties.FileFormatProperty'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.core.FromTimeZone'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.array.GenerateSeries'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.temporal.GetExtract'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.aggregate.GroupConcat'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.functions.If'>: <function if_sql.<locals>._if_sql>, <class 'sqlglot.expressions.json.JSONArray'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.json.JSONExtractArray'>: <function _json_extract_value_array_sql>, <class 'sqlglot.expressions.json.JSONExtractScalar'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.json.JSONKeys'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.query.JSONValueArray'>: <function _json_extract_value_array_sql>, <class 'sqlglot.expressions.string.Levenshtein'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.aggregate.LogicalAnd'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.aggregate.LogicalOr'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.array.Map'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.math.ManhattanDistance'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.temporal.MakeInterval'>: <function no_make_interval_sql>, <class 'sqlglot.expressions.aggregate.Max'>: <function max_or_greatest>, <class 'sqlglot.expressions.aggregate.Min'>: <function min_or_least>, <class 'sqlglot.expressions.json.ParseJSON'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.string.ToBinary'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.functions.ToBoolean'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.string.ToDouble'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.string.ToFile'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.json.JSONFormat'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.properties.PartitionedByProperty'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.aggregate.PercentileCont'>: <function preprocess.<locals>._to_sql>, <class 'sqlglot.expressions.aggregate.PercentileDisc'>: <function preprocess.<locals>._to_sql>, <class 'sqlglot.expressions.query.Pivot'>: <function preprocess.<locals>._to_sql>, <class 'sqlglot.expressions.string.RegexpExtract'>: <function _regexpextract_sql>, <class 'sqlglot.expressions.string.RegexpExtractAll'>: <function _regexpextract_sql>, <class 'sqlglot.expressions.string.RegexpILike'>: <function _regexpilike_sql>, <class 'sqlglot.expressions.query.Select'>: <function preprocess.<locals>._to_sql>, <class 'sqlglot.expressions.string.SHA'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.string.SHA1Digest'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.string.MD5Digest'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.string.MD5NumberLower64'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.string.MD5NumberUpper64'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.string.LowerHex'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.aggregate.Skewness'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.array.StarMap'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.string.StartsWith'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.string.EndsWith'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.functions.Rand'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.string.StrPosition'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.temporal.StrToDate'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.array.StringToArray'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.string.Stuff'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.array.StPoint'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.temporal.TimeAdd'>: <function date_delta_sql.<locals>._delta_sql>, <class 'sqlglot.expressions.temporal.TimeSlice'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.temporal.Timestamp'>: <function no_timestamp_sql>, <class 'sqlglot.expressions.temporal.TimestampAdd'>: <function date_delta_sql.<locals>._delta_sql>, <class 'sqlglot.expressions.temporal.TimestampDiff'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.temporal.TimestampTrunc'>: <function timestamptrunc_sql.<locals>._timestamptrunc_sql>, <class 'sqlglot.expressions.temporal.TimeStrToTime'>: <function timestrtotime_sql>, <class 'sqlglot.expressions.temporal.TimeToUnix'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.array.ToArray'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.string.ToChar'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.temporal.TsOrDsAdd'>: <function date_delta_sql.<locals>._delta_sql>, <class 'sqlglot.expressions.temporal.TsOrDsDiff'>: <function date_delta_sql.<locals>._delta_sql>, <class 'sqlglot.expressions.temporal.TsOrDsToDate'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.temporal.TsOrDsToTime'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.string.Unhex'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.temporal.UnixToTime'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.functions.Uuid'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.math.Booland'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.math.Boolor'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.temporal.WeekOfYear'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.temporal.YearOfWeek'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.temporal.YearOfWeekIso'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.core.Xor'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.string.ByteLength'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.array.Flatten'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.aggregate.ArrayConcatAgg'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.string.SHA2Digest'>: <function SnowflakeGenerator.<lambda>>}
def sortarray_sql(self, expression: sqlglot.expressions.array.SortArray) -> str:
618    def sortarray_sql(self, expression: exp.SortArray) -> str:
619        asc = expression.args.get("asc")
620        nulls_first = expression.args.get("nulls_first")
621        if asc == exp.false() and nulls_first == exp.true():
622            nulls_first = None
623        return self.func("ARRAY_SORT", expression.this, asc, nulls_first)
def nthvalue_sql(self, expression: sqlglot.expressions.aggregate.NthValue) -> str:
625    def nthvalue_sql(self, expression: exp.NthValue) -> str:
626        result = self.func("NTH_VALUE", expression.this, expression.args.get("offset"))
627
628        from_first = expression.args.get("from_first")
629
630        if from_first is not None:
631            if from_first:
632                result = result + " FROM FIRST"
633            else:
634                result = result + " FROM LAST"
635
636        return result
TYPE_MAPPING = {<DType.DATETIME2: 'DATETIME2'>: 'TIMESTAMP', <DType.NCHAR: 'NCHAR'>: 'CHAR', <DType.NVARCHAR: 'NVARCHAR'>: 'VARCHAR', <DType.MEDIUMTEXT: 'MEDIUMTEXT'>: 'TEXT', <DType.LONGTEXT: 'LONGTEXT'>: 'TEXT', <DType.TINYTEXT: 'TINYTEXT'>: 'TEXT', <DType.BLOB: 'BLOB'>: 'VARBINARY', <DType.MEDIUMBLOB: 'MEDIUMBLOB'>: 'BLOB', <DType.LONGBLOB: 'LONGBLOB'>: 'BLOB', <DType.TINYBLOB: 'TINYBLOB'>: 'BLOB', <DType.INET: 'INET'>: 'INET', <DType.ROWVERSION: 'ROWVERSION'>: 'VARBINARY', <DType.SMALLDATETIME: 'SMALLDATETIME'>: 'TIMESTAMP', <DType.BIGDECIMAL: 'BIGDECIMAL'>: 'DOUBLE', <DType.JSON: 'JSON'>: 'VARIANT', <DType.NESTED: 'NESTED'>: 'OBJECT', <DType.STRUCT: 'STRUCT'>: 'OBJECT', <DType.TEXT: 'TEXT'>: 'VARCHAR'}
TOKEN_MAPPING = {<TokenType.AUTO_INCREMENT: 226>: 'AUTOINCREMENT'}
PROPERTIES_LOCATION = {<class 'sqlglot.expressions.properties.AllowedValuesProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.AlgorithmProperty'>: <PropertiesLocation.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.properties.ApiProperty'>: <PropertiesLocation.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.properties.ApplicationProperty'>: <PropertiesLocation.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.properties.AutoIncrementProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.AutoRefreshProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.BackupProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.BlockCompressionProperty'>: <PropertiesLocation.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.properties.CatalogProperty'>: <PropertiesLocation.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.properties.CharacterSetProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.ChecksumProperty'>: <PropertiesLocation.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.properties.CollateProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.ComputeProperty'>: <PropertiesLocation.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.properties.CopyGrantsProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.query.Cluster'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.ClusteredByProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.DistributedByProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.DuplicateKeyProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.DataBlocksizeProperty'>: <PropertiesLocation.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.properties.DatabaseProperty'>: <PropertiesLocation.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.properties.DataDeletionProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.DefinerProperty'>: <PropertiesLocation.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.properties.DictRange'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.DictProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.DynamicProperty'>: <PropertiesLocation.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.properties.DistKeyProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.DistStyleProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.EmptyProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.EncodeProperty'>: <PropertiesLocation.POST_EXPRESSION: 'POST_EXPRESSION'>, <class 'sqlglot.expressions.properties.EngineProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.EnviromentProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.HandlerProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.ParameterStyleProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.ExecuteAsProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.ExternalProperty'>: <PropertiesLocation.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.properties.FallbackProperty'>: <PropertiesLocation.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.properties.FileFormatProperty'>: <PropertiesLocation.POST_WITH: 'POST_WITH'>, <class 'sqlglot.expressions.properties.FreespaceProperty'>: <PropertiesLocation.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.properties.GlobalProperty'>: <PropertiesLocation.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.properties.HeapProperty'>: <PropertiesLocation.POST_WITH: 'POST_WITH'>, <class 'sqlglot.expressions.properties.HybridProperty'>: <PropertiesLocation.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.properties.InheritsProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.IcebergProperty'>: <PropertiesLocation.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.properties.IncludeProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.InputModelProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.IsolatedLoadingProperty'>: <PropertiesLocation.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.properties.JournalProperty'>: <PropertiesLocation.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.properties.LanguageProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.LikeProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.LocationProperty'>: <PropertiesLocation.POST_WITH: 'POST_WITH'>, <class 'sqlglot.expressions.properties.LockProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.LockingProperty'>: <PropertiesLocation.POST_ALIAS: 'POST_ALIAS'>, <class 'sqlglot.expressions.properties.LogProperty'>: <PropertiesLocation.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.properties.MaskingProperty'>: <PropertiesLocation.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.properties.MaterializedProperty'>: <PropertiesLocation.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.properties.MergeBlockRatioProperty'>: <PropertiesLocation.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.properties.ModuleProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.NetworkProperty'>: <PropertiesLocation.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.properties.NoPrimaryIndexProperty'>: <PropertiesLocation.POST_EXPRESSION: 'POST_EXPRESSION'>, <class 'sqlglot.expressions.properties.OnProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.OnCommitProperty'>: <PropertiesLocation.POST_EXPRESSION: 'POST_EXPRESSION'>, <class 'sqlglot.expressions.query.Order'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.OutputModelProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.PartitionedByProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.PartitionedOfProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.constraints.PrimaryKey'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.Property'>: <PropertiesLocation.POST_WITH: 'POST_WITH'>, <class 'sqlglot.expressions.properties.RefreshTriggerProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.RemoteWithConnectionModelProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.ReturnsProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.RollupProperty'>: <PropertiesLocation.UNSUPPORTED: 'UNSUPPORTED'>, <class 'sqlglot.expressions.properties.RowAccessProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.RowFormatProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.RowFormatDelimitedProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.RowFormatSerdeProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.SampleProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.SchemaCommentProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.SecureProperty'>: <PropertiesLocation.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.properties.SecurityIntegrationProperty'>: <PropertiesLocation.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.properties.SerdeProperties'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.ddl.Set'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.SettingsProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.SetProperty'>: <PropertiesLocation.UNSUPPORTED: 'UNSUPPORTED'>, <class 'sqlglot.expressions.properties.SetConfigProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.SharingProperty'>: <PropertiesLocation.POST_EXPRESSION: 'POST_EXPRESSION'>, <class 'sqlglot.expressions.ddl.SequenceProperties'>: <PropertiesLocation.POST_EXPRESSION: 'POST_EXPRESSION'>, <class 'sqlglot.expressions.ddl.TriggerProperties'>: <PropertiesLocation.POST_EXPRESSION: 'POST_EXPRESSION'>, <class 'sqlglot.expressions.properties.SortKeyProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.SqlReadWriteProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.SqlSecurityProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.StabilityProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.StorageHandlerProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.StreamingTableProperty'>: <PropertiesLocation.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.properties.StrictProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.Tags'>: <PropertiesLocation.POST_WITH: 'POST_WITH'>, <class 'sqlglot.expressions.properties.TemporaryProperty'>: <PropertiesLocation.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.properties.ToTableProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.TransientProperty'>: <PropertiesLocation.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.properties.TransformModelProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.ddl.MergeTreeTTL'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.UnloggedProperty'>: <PropertiesLocation.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.properties.UsingProperty'>: <PropertiesLocation.POST_EXPRESSION: 'POST_EXPRESSION'>, <class 'sqlglot.expressions.properties.UsingTemplateProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.ViewAttributeProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.VirtualProperty'>: <PropertiesLocation.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.properties.VolatileProperty'>: <PropertiesLocation.UNSUPPORTED: 'UNSUPPORTED'>, <class 'sqlglot.expressions.properties.WithDataProperty'>: <PropertiesLocation.POST_EXPRESSION: 'POST_EXPRESSION'>, <class 'sqlglot.expressions.properties.WithJournalTableProperty'>: <PropertiesLocation.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.properties.WithProcedureOptions'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.WithSchemaBindingProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.WithSystemVersioningProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.ForceProperty'>: <PropertiesLocation.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.properties.CredentialsProperty'>: <PropertiesLocation.POST_WITH: 'POST_WITH'>}
RESPECT_IGNORE_NULLS_UNSUPPORTED_EXPRESSIONS = (<class 'sqlglot.expressions.aggregate.ArrayAgg'>,)
def with_properties(self, properties: sqlglot.expressions.properties.Properties) -> str:
676    def with_properties(self, properties: exp.Properties) -> str:
677        return self.properties(properties, wrapped=False, prefix=self.sep(""), sep=" ")
def values_sql( self, expression: sqlglot.expressions.query.Values, values_as_table: bool = True) -> str:
679    def values_sql(self, expression: exp.Values, values_as_table: bool = True) -> str:
680        if expression.find(*self.UNSUPPORTED_VALUES_EXPRESSIONS):
681            values_as_table = False
682
683        return super().values_sql(expression, values_as_table=values_as_table)
def datatype_sql(self, expression: sqlglot.expressions.datatypes.DataType) -> str:
685    def datatype_sql(self, expression: exp.DataType) -> str:
686        # Check if this is a FLOAT type nested inside a VECTOR type
687        # VECTOR only accepts FLOAT (not DOUBLE), INT, and STRING as element types
688        # https://docs.snowflake.com/en/sql-reference/data-types-vector
689        if expression.is_type(exp.DType.DOUBLE):
690            parent = expression.parent
691            if isinstance(parent, exp.DataType) and parent.is_type(exp.DType.VECTOR):
692                # Preserve FLOAT for VECTOR types instead of mapping to synonym DOUBLE
693                return "FLOAT"
694
695        expressions = expression.expressions
696        if expressions and expression.is_type(*exp.DataType.STRUCT_TYPES):
697            for field_type in expressions:
698                # The correct syntax is OBJECT [ (<key> <value_type [NOT NULL] [, ...]) ]
699                if isinstance(field_type, exp.DataType):
700                    return "OBJECT"
701                if (
702                    isinstance(field_type, exp.ColumnDef)
703                    and field_type.this
704                    and field_type.this.is_string
705                ):
706                    # Doing OBJECT('foo' VARCHAR) is invalid snowflake Syntax. Moreover, besides
707                    # converting 'foo' into an identifier, we also need to quote it because these
708                    # keys are case-sensitive. For example:
709                    #
710                    # WITH t AS (SELECT OBJECT_CONSTRUCT('x', 'y') AS c) SELECT c:x FROM t -- correct
711                    # WITH t AS (SELECT OBJECT_CONSTRUCT('x', 'y') AS c) SELECT c:X FROM t -- incorrect, returns NULL
712                    field_type.this.replace(exp.to_identifier(field_type.name, quoted=True))
713
714        return super().datatype_sql(expression)
def tonumber_sql(self, expression: sqlglot.expressions.string.ToNumber) -> str:
716    def tonumber_sql(self, expression: exp.ToNumber) -> str:
717        precision = expression.args.get("precision")
718        scale = expression.args.get("scale")
719
720        default_precision = isinstance(precision, exp.Literal) and precision.name == "38"
721        default_scale = isinstance(scale, exp.Literal) and scale.name == "0"
722
723        if default_precision and default_scale:
724            precision = None
725            scale = None
726        elif default_scale:
727            scale = None
728
729        func_name = "TRY_TO_NUMBER" if expression.args.get("safe") else "TO_NUMBER"
730
731        return self.func(
732            func_name,
733            expression.this,
734            expression.args.get("format"),
735            precision,
736            scale,
737        )
def timestampfromparts_sql(self, expression: sqlglot.expressions.temporal.TimestampFromParts) -> str:
739    def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str:
740        milli = expression.args.get("milli")
741        if milli is not None:
742            milli_to_nano = milli.pop() * exp.Literal.number(1000000)
743            expression.set("nano", milli_to_nano)
744
745        return rename_func("TIMESTAMP_FROM_PARTS")(self, expression)
def cast_sql( self, expression: sqlglot.expressions.functions.Cast, safe_prefix: str | None = None) -> str:
747    def cast_sql(self, expression: exp.Cast, safe_prefix: str | None = None) -> str:
748        if expression.is_type(exp.DType.GEOGRAPHY):
749            return self.func("TO_GEOGRAPHY", expression.this)
750        if expression.is_type(exp.DType.GEOMETRY):
751            return self.func("TO_GEOMETRY", expression.this)
752
753        return super().cast_sql(expression, safe_prefix=safe_prefix)
def trycast_sql(self, expression: sqlglot.expressions.functions.TryCast) -> str:
755    def trycast_sql(self, expression: exp.TryCast) -> str:
756        value = expression.this
757
758        if value.type is None:
759            from sqlglot.optimizer.annotate_types import annotate_types
760
761            value = annotate_types(value, dialect=self.dialect)
762
763        # Snowflake requires that TRY_CAST's value be a string
764        # If TRY_CAST is being roundtripped (since Snowflake is the only dialect that sets "requires_string") or
765        # if we can deduce that the value is a string, then we can generate TRY_CAST
766        if expression.args.get("requires_string") or value.is_type(*exp.DataType.TEXT_TYPES):
767            return super().trycast_sql(expression)
768
769        return self.cast_sql(expression)
def log_sql(self, expression: sqlglot.expressions.math.Log) -> str:
771    def log_sql(self, expression: exp.Log) -> str:
772        if not expression.expression:
773            return self.func("LN", expression.this)
774
775        return super().log_sql(expression)
def greatest_sql(self, expression: sqlglot.expressions.functions.Greatest) -> str:
777    def greatest_sql(self, expression: exp.Greatest) -> str:
778        name = "GREATEST_IGNORE_NULLS" if expression.args.get("ignore_nulls") else "GREATEST"
779        return self.func(name, expression.this, *expression.expressions)
def least_sql(self, expression: sqlglot.expressions.functions.Least) -> str:
781    def least_sql(self, expression: exp.Least) -> str:
782        name = "LEAST_IGNORE_NULLS" if expression.args.get("ignore_nulls") else "LEAST"
783        return self.func(name, expression.this, *expression.expressions)
def generator_sql(self, expression: sqlglot.expressions.array.Generator) -> str:
785    def generator_sql(self, expression: exp.Generator) -> str:
786        args = []
787        rowcount = expression.args.get("rowcount")
788        timelimit = expression.args.get("timelimit")
789
790        if rowcount:
791            args.append(exp.Kwarg(this=exp.var("ROWCOUNT"), expression=rowcount))
792        if timelimit:
793            args.append(exp.Kwarg(this=exp.var("TIMELIMIT"), expression=timelimit))
794
795        return self.func("GENERATOR", *args)
def unnest_sql(self, expression: sqlglot.expressions.array.Unnest) -> str:
797    def unnest_sql(self, expression: exp.Unnest) -> str:
798        unnest_alias = expression.args.get("alias")
799        offset = expression.args.get("offset")
800
801        unnest_alias_columns = unnest_alias.columns if unnest_alias else []
802        value = seq_get(unnest_alias_columns, 0) or exp.to_identifier("value")
803
804        columns = [
805            exp.to_identifier("seq"),
806            exp.to_identifier("key"),
807            exp.to_identifier("path"),
808            offset.pop() if isinstance(offset, exp.Expr) else exp.to_identifier("index"),
809            value,
810            exp.to_identifier("this"),
811        ]
812
813        if unnest_alias:
814            unnest_alias.set("columns", columns)
815        else:
816            unnest_alias = exp.TableAlias(this="_u", columns=columns)
817
818        table_input = self.sql(expression.expressions[0])
819        if not table_input.startswith("INPUT =>"):
820            table_input = f"INPUT => {table_input}"
821
822        expression_parent = expression.parent
823
824        explode = (
825            f"FLATTEN({table_input})"
826            if isinstance(expression_parent, exp.Lateral)
827            else f"TABLE(FLATTEN({table_input}))"
828        )
829        alias = self.sql(unnest_alias)
830        alias = f" AS {alias}" if alias else ""
831        value = (
832            ""
833            if isinstance(expression_parent, (exp.From, exp.Join, exp.Lateral))
834            else f"{value} FROM "
835        )
836
837        return f"{value}{explode}{alias}"
def show_sql(self, expression: sqlglot.expressions.ddl.Show) -> str:
839    def show_sql(self, expression: exp.Show) -> str:
840        terse = "TERSE " if expression.args.get("terse") else ""
841        iceberg = "ICEBERG " if expression.args.get("iceberg") else ""
842        history = " HISTORY" if expression.args.get("history") else ""
843        like = self.sql(expression, "like")
844        like = f" LIKE {like}" if like else ""
845
846        scope = self.sql(expression, "scope")
847        scope = f" {scope}" if scope else ""
848
849        scope_kind = self.sql(expression, "scope_kind")
850        if scope_kind:
851            scope_kind = f" IN {scope_kind}"
852
853        starts_with = self.sql(expression, "starts_with")
854        if starts_with:
855            starts_with = f" STARTS WITH {starts_with}"
856
857        limit = self.sql(expression, "limit")
858
859        from_ = self.sql(expression, "from_")
860        if from_:
861            from_ = f" FROM {from_}"
862
863        privileges = self.expressions(expression, key="privileges", flat=True)
864        privileges = f" WITH PRIVILEGES {privileges}" if privileges else ""
865
866        return f"SHOW {terse}{iceberg}{expression.name}{history}{like}{scope_kind}{scope}{starts_with}{limit}{from_}{privileges}"
def rowaccessproperty_sql( self, expression: sqlglot.expressions.properties.RowAccessProperty) -> str:
868    def rowaccessproperty_sql(self, expression: exp.RowAccessProperty) -> str:
869        if not expression.this:
870            return "ROW ACCESS"
871        on = f" ON ({self.expressions(expression, flat=True)})" if expression.expressions else ""
872        return f"WITH ROW ACCESS POLICY {self.sql(expression, 'this')}{on}"
def describe_sql(self, expression: sqlglot.expressions.ddl.Describe) -> str:
874    def describe_sql(self, expression: exp.Describe) -> str:
875        kind_value = expression.args.get("kind") or "TABLE"
876
877        properties = expression.args.get("properties")
878        if properties:
879            qualifier = self.expressions(properties, sep=" ")
880            kind = f" {qualifier} {kind_value}"
881        else:
882            kind = f" {kind_value}"
883
884        this = f" {self.sql(expression, 'this')}"
885        expressions = self.expressions(expression, flat=True)
886        expressions = f" {expressions}" if expressions else ""
887        return f"DESCRIBE{kind}{this}{expressions}"
def generatedasidentitycolumnconstraint_sql( self, expression: sqlglot.expressions.constraints.GeneratedAsIdentityColumnConstraint) -> str:
889    def generatedasidentitycolumnconstraint_sql(
890        self, expression: exp.GeneratedAsIdentityColumnConstraint
891    ) -> str:
892        start = expression.args.get("start")
893        start = f" START {start}" if start else ""
894        increment = expression.args.get("increment")
895        increment = f" INCREMENT {increment}" if increment else ""
896
897        order = expression.args.get("order")
898        if order is not None:
899            order_clause = " ORDER" if order else " NOORDER"
900        else:
901            order_clause = ""
902
903        return f"AUTOINCREMENT{start}{increment}{order_clause}"
def cluster_sql(self, expression: sqlglot.expressions.query.Cluster) -> str:
905    def cluster_sql(self, expression: exp.Cluster) -> str:
906        return f"CLUSTER BY ({self.expressions(expression, flat=True)})"
def struct_sql(self, expression: sqlglot.expressions.array.Struct) -> str:
908    def struct_sql(self, expression: exp.Struct) -> str:
909        if len(expression.expressions) == 1:
910            arg = expression.expressions[0]
911            if arg.is_star or (isinstance(arg, exp.ILike) and arg.left.is_star):
912                # Wildcard syntax: https://docs.snowflake.com/en/sql-reference/data-types-semistructured#object
913                return f"{{{self.sql(expression.expressions[0])}}}"
914
915        keys = []
916        values = []
917
918        for i, e in enumerate(expression.expressions):
919            if isinstance(e, exp.PropertyEQ):
920                keys.append(
921                    exp.Literal.string(e.name) if isinstance(e.this, exp.Identifier) else e.this
922                )
923                values.append(e.expression)
924            else:
925                keys.append(exp.Literal.string(f"_{i}"))
926                values.append(e)
927
928        return self.func("OBJECT_CONSTRUCT", *flatten(zip(keys, values)))
@unsupported_args('weight', 'accuracy')
def approxquantile_sql(self, expression: sqlglot.expressions.aggregate.ApproxQuantile) -> str:
930    @unsupported_args("weight", "accuracy")
931    def approxquantile_sql(self, expression: exp.ApproxQuantile) -> str:
932        return self.func("APPROX_PERCENTILE", expression.this, expression.args.get("quantile"))
def alterset_sql(self, expression: sqlglot.expressions.ddl.AlterSet) -> str:
934    def alterset_sql(self, expression: exp.AlterSet) -> str:
935        exprs = self.expressions(expression, flat=True)
936        exprs = f" {exprs}" if exprs else ""
937        file_format = self.expressions(expression, key="file_format", flat=True, sep=" ")
938        file_format = f" STAGE_FILE_FORMAT = ({file_format})" if file_format else ""
939        copy_options = self.expressions(expression, key="copy_options", flat=True, sep=" ")
940        copy_options = f" STAGE_COPY_OPTIONS = ({copy_options})" if copy_options else ""
941        tag = self.expressions(expression, key="tag", flat=True)
942        tag = f" TAG {tag}" if tag else ""
943
944        return f"SET{exprs}{file_format}{copy_options}{tag}"
def strtotime_sql(self, expression: sqlglot.expressions.temporal.StrToTime):
946    def strtotime_sql(self, expression: exp.StrToTime):
947        # target_type is stored as a DataType instance
948        target_type = expression.args.get("target_type")
949
950        # Get the type enum from DataType instance or from type annotation
951        if isinstance(target_type, exp.DataType):
952            type_enum = target_type.this
953        elif expression.type:
954            type_enum = expression.type.this
955        else:
956            type_enum = exp.DType.TIMESTAMP
957
958        func_name = TIMESTAMP_TYPES.get(type_enum, "TO_TIMESTAMP")
959
960        return self.func(
961            f"{'TRY_' if expression.args.get('safe') else ''}{func_name}",
962            expression.this,
963            self.format_time(expression),
964        )
def timestampsub_sql(self, expression: sqlglot.expressions.temporal.TimestampSub):
966    def timestampsub_sql(self, expression: exp.TimestampSub):
967        return self.sql(
968            exp.TimestampAdd(
969                this=expression.this,
970                expression=expression.expression * -1,
971                unit=expression.unit,
972            )
973        )
def jsonextract_sql(self, expression: sqlglot.expressions.json.JSONExtract):
975    def jsonextract_sql(self, expression: exp.JSONExtract):
976        this = expression.this
977
978        # JSON strings are valid coming from other dialects such as BQ so
979        # for these cases we PARSE_JSON preemptively
980        if not isinstance(this, (exp.ParseJSON, exp.JSONExtract)) and not expression.args.get(
981            "requires_json"
982        ):
983            this = exp.ParseJSON(this=this)
984
985        return self.func(
986            "GET_PATH",
987            this,
988            expression.expression,
989        )
def timetostr_sql(self, expression: sqlglot.expressions.temporal.TimeToStr) -> str:
991    def timetostr_sql(self, expression: exp.TimeToStr) -> str:
992        this = expression.this
993        if this.is_string:
994            this = exp.cast(this, exp.DType.TIMESTAMP)
995
996        return self.func("TO_CHAR", this, self.format_time(expression))
def datesub_sql(self, expression: sqlglot.expressions.temporal.DateSub) -> str:
 998    def datesub_sql(self, expression: exp.DateSub) -> str:
 999        value = expression.expression
1000        if value:
1001            value.replace(value * (-1))
1002        else:
1003            self.unsupported("DateSub cannot be transpiled if the subtracted count is unknown")
1004
1005        return date_delta_sql("DATEADD")(self, expression)
def select_sql(self, expression: sqlglot.expressions.query.Select) -> str:
1007    def select_sql(self, expression: exp.Select) -> str:
1008        limit = expression.args.get("limit")
1009        offset = expression.args.get("offset")
1010        if offset and not limit:
1011            expression.limit(exp.Null(), copy=False)
1012        return super().select_sql(expression)
def createable_sql( self, expression: sqlglot.expressions.ddl.Create, locations: collections.defaultdict) -> str:
1014    def createable_sql(self, expression: exp.Create, locations: defaultdict) -> str:
1015        is_materialized = expression.find(exp.MaterializedProperty)
1016        copy_grants_property = expression.find(exp.CopyGrantsProperty)
1017
1018        if expression.kind == "VIEW" and is_materialized and copy_grants_property:
1019            # For materialized views, COPY GRANTS is located *before* the columns list
1020            # This is in contrast to normal views where COPY GRANTS is located *after* the columns list
1021            # We default CopyGrantsProperty to POST_SCHEMA which means we need to output it POST_NAME if a materialized view is detected
1022            # ref: https://docs.snowflake.com/en/sql-reference/sql/create-materialized-view#syntax
1023            # ref: https://docs.snowflake.com/en/sql-reference/sql/create-view#syntax
1024            post_schema_properties = locations[exp.Properties.Location.POST_SCHEMA]
1025            post_schema_properties.pop(post_schema_properties.index(copy_grants_property))
1026
1027            this_name = self.sql(expression.this, "this")
1028            copy_grants = self.sql(copy_grants_property)
1029            this_schema = self.schema_columns_sql(expression.this)
1030            this_schema = f"{self.sep()}{this_schema}" if this_schema else ""
1031
1032            return f"{this_name}{self.sep()}{copy_grants}{this_schema}"
1033
1034        return super().createable_sql(expression, locations)
def arrayagg_sql(self, expression: sqlglot.expressions.aggregate.ArrayAgg) -> str:
1036    def arrayagg_sql(self, expression: exp.ArrayAgg) -> str:
1037        this = expression.this
1038
1039        # If an ORDER BY clause is present, we need to remove it from ARRAY_AGG
1040        # and add it later as part of the WITHIN GROUP clause
1041        order = this if isinstance(this, exp.Order) else None
1042        if order:
1043            expression.set("this", order.this.pop())
1044
1045        expr_sql = super().arrayagg_sql(expression)
1046
1047        if order:
1048            expr_sql = self.sql(exp.WithinGroup(this=expr_sql, expression=order))
1049
1050        return expr_sql
def arraytostring_sql(self, expression: sqlglot.expressions.array.ArrayToString) -> str:
1052    def arraytostring_sql(self, expression: exp.ArrayToString) -> str:
1053        return self.func("ARRAY_TO_STRING", expression.this, expression.expression)
def array_sql(self, expression: sqlglot.expressions.array.Array) -> str:
1055    def array_sql(self, expression: exp.Array) -> str:
1056        expressions = expression.expressions
1057
1058        first_expr = seq_get(expressions, 0)
1059        if isinstance(first_expr, exp.Select):
1060            # SELECT AS STRUCT foo AS alias_foo -> ARRAY_AGG(OBJECT_CONSTRUCT('alias_foo', foo))
1061            if first_expr.text("kind").upper() == "STRUCT":
1062                object_construct_args = []
1063                for expr in first_expr.expressions:
1064                    # Alias case: SELECT AS STRUCT foo AS alias_foo -> OBJECT_CONSTRUCT('alias_foo', foo)
1065                    # Column case: SELECT AS STRUCT foo -> OBJECT_CONSTRUCT('foo', foo)
1066                    name = expr.this if isinstance(expr, exp.Alias) else expr
1067
1068                    object_construct_args.extend([exp.Literal.string(expr.alias_or_name), name])
1069
1070                array_agg = exp.ArrayAgg(this=build_object_construct(args=object_construct_args))
1071
1072                first_expr.set("kind", None)
1073                first_expr.set("expressions", [array_agg])
1074
1075                return self.sql(first_expr.subquery())
1076
1077        return inline_array_sql(self, expression)
def currentdate_sql(self, expression: sqlglot.expressions.temporal.CurrentDate) -> str:
1079    def currentdate_sql(self, expression: exp.CurrentDate) -> str:
1080        zone = self.sql(expression, "this")
1081        if not zone:
1082            return super().currentdate_sql(expression)
1083
1084        expr = exp.Cast(
1085            this=exp.ConvertTimezone(target_tz=zone, timestamp=exp.CurrentTimestamp()),
1086            to=exp.DataType(this=exp.DType.DATE),
1087        )
1088        return self.sql(expr)
def dot_sql(self, expression: sqlglot.expressions.core.Dot) -> str:
1090    def dot_sql(self, expression: exp.Dot) -> str:
1091        this = expression.this
1092
1093        if not this.type:
1094            from sqlglot.optimizer.annotate_types import annotate_types
1095
1096            this = annotate_types(this, dialect=self.dialect)
1097
1098        if not isinstance(this, exp.Dot) and this.is_type(exp.DType.STRUCT):
1099            # Generate colon notation for the top level STRUCT
1100            return f"{self.sql(this)}:{self.sql(expression, 'expression')}"
1101
1102        return super().dot_sql(expression)
def modelattribute_sql(self, expression: sqlglot.expressions.query.ModelAttribute) -> str:
1104    def modelattribute_sql(self, expression: exp.ModelAttribute) -> str:
1105        return f"{self.sql(expression, 'this')}!{self.sql(expression, 'expression')}"
def format_sql(self, expression: sqlglot.expressions.string.Format) -> str:
1107    def format_sql(self, expression: exp.Format) -> str:
1108        if expression.name.lower() == "%s" and len(expression.expressions) == 1:
1109            return self.func("TO_CHAR", expression.expressions[0])
1110
1111        return self.function_fallback_sql(expression)
def splitpart_sql(self, expression: sqlglot.expressions.string.SplitPart) -> str:
1113    def splitpart_sql(self, expression: exp.SplitPart) -> str:
1114        # Set part_index to 1 if missing
1115        if not expression.args.get("delimiter"):
1116            expression.set("delimiter", exp.Literal.string(" "))
1117
1118        if not expression.args.get("part_index"):
1119            expression.set("part_index", exp.Literal.number(1))
1120
1121        return rename_func("SPLIT_PART")(self, expression)
def uniform_sql(self, expression: sqlglot.expressions.functions.Uniform) -> str:
1123    def uniform_sql(self, expression: exp.Uniform) -> str:
1124        gen = expression.args.get("gen")
1125        seed = expression.args.get("seed")
1126
1127        # From Databricks UNIFORM(min, max, seed) -> Wrap gen in RANDOM(seed)
1128        if seed:
1129            gen = exp.Rand(this=seed)
1130
1131        # No gen argument (from Databricks 2-arg UNIFORM(min, max)) -> Add RANDOM()
1132        if not gen:
1133            gen = exp.Rand()
1134
1135        return self.func("UNIFORM", expression.this, expression.expression, gen)
def window_sql(self, expression: sqlglot.expressions.query.Window) -> str:
1137    def window_sql(self, expression: exp.Window) -> str:
1138        spec = expression.args.get("spec")
1139        this = expression.this
1140
1141        if (
1142            (
1143                isinstance(this, RANKING_WINDOW_FUNCTIONS_WITH_FRAME)
1144                or (
1145                    isinstance(this, (exp.RespectNulls, exp.IgnoreNulls))
1146                    and isinstance(this.this, RANKING_WINDOW_FUNCTIONS_WITH_FRAME)
1147                )
1148            )
1149            and spec
1150            and (
1151                spec.text("kind").upper() == "ROWS"
1152                and spec.text("start").upper() == "UNBOUNDED"
1153                and spec.text("start_side").upper() == "PRECEDING"
1154                and spec.text("end").upper() == "UNBOUNDED"
1155                and spec.text("end_side").upper() == "FOLLOWING"
1156            )
1157        ):
1158            # omit the default window from window ranking functions
1159            expression.set("spec", None)
1160        return super().window_sql(expression)
Inherited Members
sqlglot.generator.Generator
Generator
NULL_ORDERING_SUPPORTED
WINDOW_FUNCS_WITH_NULL_ORDERING
IGNORE_NULLS_IN_FUNC
IGNORE_NULLS_BEFORE_ORDER
LOCKING_READS_SUPPORTED
WRAP_DERIVED_VALUES
CREATE_FUNCTION_RETURN_AS
SUPPORTS_MERGE_WHERE
INTERVAL_ALLOWS_PLURAL_FORM
LIMIT_FETCH
RENAME_TABLE_WITH_DB
GROUPINGS_SEP
INDEX_ON
INOUT_SEPARATOR
QUERY_HINT_SEP
DUPLICATE_KEY_UPDATE_WITH_SET
LIMIT_IS_TOP
RETURNING_END
EXTRACT_ALLOWS_QUOTES
TZ_TO_WITH_TIME_ZONE
NVL2_SUPPORTED
VALUES_AS_TABLE
ALTER_TABLE_INCLUDE_COLUMN_KEYWORD
UNNEST_WITH_ORDINALITY
SEMI_ANTI_JOIN_WITH_SIDE
COMPUTED_COLUMN_WITH_TYPE
TABLESAMPLE_REQUIRES_PARENS
TABLESAMPLE_SIZE_IS_ROWS
TABLESAMPLE_KEYWORDS
TABLESAMPLE_WITH_METHOD
TABLESAMPLE_SEED_KEYWORD
DATA_TYPE_SPECIFIERS_ALLOWED
ENSURE_BOOLS
CTE_RECURSIVE_KEYWORD_REQUIRED
SUPPORTS_SINGLE_ARG_CONCAT
LAST_DAY_SUPPORTS_DATE_PART
SUPPORTS_TABLE_ALIAS_COLUMNS
UNPIVOT_ALIASES_ARE_IDENTIFIERS
SUPPORTS_SELECT_INTO
SUPPORTS_UNLOGGED_TABLES
SUPPORTS_CREATE_TABLE_LIKE
LIKE_PROPERTY_INSIDE_SCHEMA
MULTI_ARG_DISTINCT
JSON_TYPE_REQUIRED_FOR_EXTRACTION
JSON_PATH_BRACKETED_KEY_SUPPORTED
JSON_PATH_SINGLE_QUOTE_ESCAPE
CAN_IMPLEMENT_ARRAY_ANY
SUPPORTS_TO_NUMBER
SUPPORTS_WINDOW_EXCLUDE
SET_OP_MODIFIERS
COPY_HAS_INTO_KEYWORD
UNICODE_SUBSTITUTE
HEX_FUNC
WITH_PROPERTIES_PREFIX
QUOTE_JSON_PATH
PAD_FILL_PATTERN_IS_REQUIRED
SUPPORTS_UNIX_SECONDS
ALTER_SET_WRAPPED
NORMALIZE_EXTRACT_DATE_PARTS
PARSE_JSON_NAME
ALTER_SET_TYPE
ARRAY_SIZE_DIM_REQUIRED
SUPPORTS_BETWEEN_FLAGS
SUPPORTS_LIKE_QUANTIFIERS
MATCH_AGAINST_TABLE_PREFIX
SET_ASSIGNMENT_REQUIRES_VARIABLE_KEYWORD
DECLARE_DEFAULT_ASSIGNMENT
UPDATE_STATEMENT_SUPPORTS_FROM
STAR_EXCLUDE_REQUIRES_DERIVED_TABLE
SUPPORTS_DROP_ALTER_ICEBERG_PROPERTY
UNSUPPORTED_TYPES
TIME_PART_SINGULARS
NAMED_PLACEHOLDER_TOKEN
EXPRESSION_PRECEDES_PROPERTIES_CREATABLES
RESERVED_KEYWORDS
WITH_SEPARATED_COMMENTS
EXCLUDE_COMMENTS
UNWRAPPED_INTERVAL_VALUES
PARAMETERIZABLE_TEXT_TYPES
EXPRESSIONS_WITHOUT_NESTED_CTES
SAFE_JSON_PATH_KEY_RE
SENTINEL_LINE_BREAK
pretty
identify
normalize
pad
unsupported_level
max_unsupported
leading_comma
max_text_width
comments
dialect
normalize_functions
unsupported_messages
generate
preprocess
unsupported
sep
seg
sanitize_comment
maybe_comment
wrap
no_identify
normalize_func
indent
sql
uncache_sql
cache_sql
characterset_sql
column_parts
column_sql
pseudocolumn_sql
columnposition_sql
columndef_sql
columnconstraint_sql
computedcolumnconstraint_sql
autoincrementcolumnconstraint_sql
compresscolumnconstraint_sql
generatedasrowcolumnconstraint_sql
periodforsystemtimeconstraint_sql
notnullcolumnconstraint_sql
primarykeycolumnconstraint_sql
uniquecolumnconstraint_sql
inoutcolumnconstraint_sql
create_sql
sequenceproperties_sql
triggerproperties_sql
triggerreferencing_sql
triggerevent_sql
clone_sql
heredoc_sql
prepend_ctes
with_sql
cte_sql
tablealias_sql
bitstring_sql
hexstring_sql
bytestring_sql
unicodestring_sql
rawstring_sql
datatypeparam_sql
directory_sql
delete_sql
drop_sql
set_operation
set_operations
fetch_sql
limitoptions_sql
filter_sql
hint_sql
indexparameters_sql
index_sql
identifier_sql
hex_sql
lowerhex_sql
inputoutputformat_sql
national_sql
partition_sql
properties_sql
root_properties
properties
locate_properties
property_name
property_sql
uuidproperty_sql
likeproperty_sql
fallbackproperty_sql
journalproperty_sql
freespaceproperty_sql
checksumproperty_sql
mergeblockratioproperty_sql
moduleproperty_sql
datablocksizeproperty_sql
blockcompressionproperty_sql
isolatedloadingproperty_sql
partitionboundspec_sql
partitionedofproperty_sql
lockingproperty_sql
withdataproperty_sql
withsystemversioningproperty_sql
insert_sql
introducer_sql
kill_sql
pseudotype_sql
objectidentifier_sql
onconflict_sql
returning_sql
rowformatdelimitedproperty_sql
withtablehint_sql
indextablehint_sql
historicaldata_sql
table_parts
table_sql
tablefromrows_sql
tablesample_sql
pivot_sql
version_sql
tuple_sql
update_sql
var_sql
into_sql
from_sql
groupingsets_sql
rollup_sql
rollupindex_sql
rollupproperty_sql
cube_sql
group_sql
having_sql
connect_sql
prior_sql
join_sql
lambda_sql
lateral_op
lateral_sql
limit_sql
offset_sql
setitem_sql
set_sql
queryband_sql
pragma_sql
lock_sql
literal_sql
escape_str
loaddata_sql
null_sql
boolean_sql
booland_sql
boolor_sql
order_sql
withfill_sql
distribute_sql
sort_sql
ordered_sql
matchrecognizemeasure_sql
matchrecognize_sql
query_modifiers
options_modifier
for_modifiers
queryoption_sql
offset_limit_modifiers
after_limit_modifiers
schema_sql
schema_columns_sql
star_sql
parameter_sql
sessionparameter_sql
placeholder_sql
subquery_sql
qualify_sql
prewhere_sql
where_sql
partition_by_sql
windowspec_sql
withingroup_sql
between_sql
bracket_offset_expressions
bracket_sql
all_sql
any_sql
exists_sql
case_sql
constraint_sql
nextvaluefor_sql
extract_sql
trim_sql
convert_concat_args
concat_sql
concatws_sql
check_sql
foreignkey_sql
primarykey_sql
if_sql
matchagainst_sql
jsonkeyvalue_sql
jsonpath_sql
json_path_part
formatjson_sql
formatphrase_sql
jsonarray_sql
jsonarrayagg_sql
jsoncolumndef_sql
jsonschema_sql
jsontable_sql
openjsoncolumndef_sql
openjson_sql
in_sql
in_unnest_op
interval_sql
return_sql
reference_sql
anonymous_sql
paren_sql
neg_sql
not_sql
alias_sql
pivotalias_sql
aliases_sql
atindex_sql
attimezone_sql
fromtimezone_sql
add_sql
and_sql
or_sql
xor_sql
connector_sql
bitwiseand_sql
bitwiseleftshift_sql
bitwisenot_sql
bitwiseor_sql
bitwiserightshift_sql
bitwisexor_sql
collate_sql
command_sql
comment_sql
mergetreettlaction_sql
mergetreettl_sql
transaction_sql
commit_sql
rollback_sql
altercolumn_sql
alterindex_sql
alterdiststyle_sql
altersortkey_sql
alterrename_sql
renamecolumn_sql
alter_sql
altersession_sql
add_column_sql
droppartition_sql
addconstraint_sql
addpartition_sql
distinct_sql
ignorenulls_sql
respectnulls_sql
havingmax_sql
intdiv_sql
dpipe_sql
div_sql
safedivide_sql
overlaps_sql
distance_sql
eq_sql
propertyeq_sql
escape_sql
glob_sql
gt_sql
gte_sql
is_sql
like_sql
ilike_sql
match_sql
similarto_sql
lt_sql
lte_sql
mod_sql
mul_sql
neq_sql
nullsafeeq_sql
nullsafeneq_sql
sub_sql
jsoncast_sql
try_sql
use_sql
binary
ceil_floor
function_fallback_sql
func
format_args
too_wide
format_time
expressions
op_expressions
naked_property
tag_sql
token_sql
userdefinedfunction_sql
joinhint_sql
kwarg_sql
when_sql
whens_sql
merge_sql
tochar_sql
dictproperty_sql
dictrange_sql
dictsubproperty_sql
duplicatekeyproperty_sql
uniquekeyproperty_sql
distributedbyproperty_sql
oncluster_sql
clusteredbyproperty_sql
anyvalue_sql
querytransform_sql
indexconstraintoption_sql
checkcolumnconstraint_sql
indexcolumnconstraint_sql
nvl2_sql
comprehension_sql
columnprefix_sql
opclass_sql
predict_sql
generateembedding_sql
generatetext_sql
generatetable_sql
generatebool_sql
generateint_sql
generatedouble_sql
mltranslate_sql
mlforecast_sql
aiforecast_sql
featuresattime_sql
vectorsearch_sql
forin_sql
refresh_sql
toarray_sql
tsordstotime_sql
tsordstotimestamp_sql
tsordstodatetime_sql
tsordstodate_sql
unixdate_sql
lastday_sql
dateadd_sql
arrayany_sql
partitionrange_sql
truncatetable_sql
convert_sql
copyparameter_sql
credentials_sql
copy_sql
semicolon_sql
datadeletionproperty_sql
maskingpolicycolumnconstraint_sql
gapfill_sql
scope_resolution
scoperesolution_sql
parsejson_sql
rand_sql
changes_sql
pad_sql
summarize_sql
explodinggenerateseries_sql
converttimezone_sql
json_sql
jsonvalue_sql
skipjsoncolumn_sql
conditionalinsert_sql
multitableinserts_sql
oncondition_sql
jsonextractquote_sql
jsonexists_sql
slice_sql
apply_sql
grant_sql
revoke_sql
grantprivilege_sql
grantprincipal_sql
columns_sql
overlay_sql
todouble_sql
string_sql
median_sql
overflowtruncatebehavior_sql
unixseconds_sql
arraysize_sql
attach_sql
detach_sql
attachoption_sql
watermarkcolumnconstraint_sql
encodeproperty_sql
includeproperty_sql
xmlelement_sql
xmlkeyvalueoption_sql
partitionbyrangeproperty_sql
partitionbyrangepropertydynamic_sql
unpivotcolumns_sql
analyzesample_sql
analyzestatistics_sql
analyzehistogram_sql
analyzedelete_sql
analyzelistchainedrows_sql
analyzevalidate_sql
analyze_sql
xmltable_sql
xmlnamespace_sql
export_sql
declare_sql
declareitem_sql
recursivewithsearch_sql
parameterizedagg_sql
anonymousaggfunc_sql
combinedaggfunc_sql
combinedparameterizedagg_sql
install_sql
get_put_sql
translatecharacters_sql
decodecase_sql
semanticview_sql
getextract_sql
datefromunixdate_sql
space_sql
buildproperty_sql
refreshtriggerproperty_sql
directorystage_sql
uuid_sql
initcap_sql
localtime_sql
localtimestamp_sql
weekstart_sql
chr_sql
block_sql
storedprocedure_sql
ifblock_sql
whileblock_sql
execute_sql
executesql_sql
altermodifysqlsecurity_sql
usingproperty_sql
renameindex_sql