Edit on GitHub

sqlglot.generators.snowflake

   1from __future__ import annotations
   2
   3import typing as t
   4
   5from sqlglot import exp, generator, transforms
   6from sqlglot.dialects.dialect import (
   7    array_append_sql,
   8    array_concat_sql,
   9    date_delta_sql,
  10    datestrtodate_sql,
  11    groupconcat_sql,
  12    if_sql,
  13    inline_array_sql,
  14    map_date_part,
  15    max_or_greatest,
  16    min_or_least,
  17    no_make_interval_sql,
  18    no_timestamp_sql,
  19    rename_func,
  20    strposition_sql,
  21    timestampdiff_sql,
  22    timestamptrunc_sql,
  23    timestrtotime_sql,
  24    unit_to_str,
  25    var_map_sql,
  26)
  27from sqlglot.generator import unsupported_args
  28from sqlglot.helper import find_new_name, flatten, seq_get
  29from sqlglot.optimizer.scope import build_scope, find_all_in_scope
  30from sqlglot.parsers.snowflake import (
  31    RANKING_WINDOW_FUNCTIONS_WITH_FRAME,
  32    TIMESTAMP_TYPES,
  33    SnowflakeParser,
  34    build_object_construct,
  35)
  36from sqlglot.tokens import TokenType
  37from collections import defaultdict
  38
  39if t.TYPE_CHECKING:
  40    from sqlglot._typing import E
  41
  42
  43def _build_datediff(args: list) -> exp.DateDiff:
  44    return exp.DateDiff(
  45        this=seq_get(args, 2),
  46        expression=seq_get(args, 1),
  47        unit=map_date_part(seq_get(args, 0)),
  48        date_part_boundary=True,
  49    )
  50
  51
  52def _build_date_time_add(expr_type: type[E]) -> t.Callable[[list], E]:
  53    def _builder(args: list) -> E:
  54        return expr_type(
  55            this=seq_get(args, 2),
  56            expression=seq_get(args, 1),
  57            unit=map_date_part(seq_get(args, 0)),
  58        )
  59
  60    return _builder
  61
  62
  63def _regexpilike_sql(self: SnowflakeGenerator, expression: exp.RegexpILike) -> str:
  64    flag = expression.text("flag")
  65
  66    if "i" not in flag:
  67        flag += "i"
  68
  69    return self.func(
  70        "REGEXP_LIKE", expression.this, expression.expression, exp.Literal.string(flag)
  71    )
  72
  73
  74def _unqualify_pivot_columns(expression: exp.Expr) -> exp.Expr:
  75    """
  76    Snowflake doesn't allow columns referenced in UNPIVOT to be qualified,
  77    so we need to unqualify them. Same goes for ANY ORDER BY <column>.
  78
  79    Example:
  80        >>> from sqlglot import parse_one
  81        >>> expr = parse_one("SELECT * FROM m_sales UNPIVOT(sales FOR month IN (m_sales.jan, feb, mar, april))")
  82        >>> print(_unqualify_pivot_columns(expr).sql(dialect="snowflake"))
  83        SELECT * FROM m_sales UNPIVOT(sales FOR month IN (jan, feb, mar, april))
  84    """
  85    if isinstance(expression, exp.Pivot):
  86        if expression.unpivot:
  87            expression = transforms.unqualify_columns(expression)
  88        else:
  89            for field in expression.fields:
  90                field_expr = seq_get(field.expressions if field else [], 0)
  91
  92                if isinstance(field_expr, exp.PivotAny):
  93                    unqualified_field_expr = transforms.unqualify_columns(field_expr)
  94                    t.cast(exp.Expr, field).set("expressions", unqualified_field_expr, 0)
  95
  96    return expression
  97
  98
  99def _flatten_structured_types_unless_iceberg(expression: exp.Expr) -> exp.Expr:
 100    assert isinstance(expression, exp.Create)
 101
 102    def _flatten_structured_type(expression: exp.Expr) -> exp.Expr:
 103        if isinstance(expression, exp.DataType) and expression.this in exp.DataType.NESTED_TYPES:
 104            expression.set("expressions", None)
 105        return expression
 106
 107    props = expression.args.get("properties")
 108    if isinstance(expression.this, exp.Schema) and not (props and props.find(exp.IcebergProperty)):
 109        for schema_expression in expression.this.expressions:
 110            if isinstance(schema_expression, exp.ColumnDef):
 111                column_type = schema_expression.kind
 112                if isinstance(column_type, exp.DataType):
 113                    column_type.transform(_flatten_structured_type, copy=False)
 114
 115    return expression
 116
 117
 118def _unnest_generate_date_array(unnest: exp.Unnest) -> None:
 119    generate_date_array = unnest.expressions[0]
 120    start = generate_date_array.args.get("start")
 121    end = generate_date_array.args.get("end")
 122    step = generate_date_array.args.get("step")
 123
 124    if not start or not end or not isinstance(step, exp.Interval) or step.name != "1":
 125        return
 126
 127    unit = step.args.get("unit")
 128
 129    unnest_alias = unnest.args.get("alias")
 130    if unnest_alias:
 131        unnest_alias = unnest_alias.copy()
 132        sequence_value_name = seq_get(unnest_alias.columns, 0) or "value"
 133    else:
 134        sequence_value_name = "value"
 135
 136    # We'll add the next sequence value to the starting date and project the result
 137    date_add = _build_date_time_add(exp.DateAdd)(
 138        [unit, exp.cast(sequence_value_name, "int"), exp.cast(start, "date")]
 139    )
 140
 141    # We use DATEDIFF to compute the number of sequence values needed
 142    number_sequence = SnowflakeParser.FUNCTIONS["ARRAY_GENERATE_RANGE"](
 143        [exp.Literal.number(0), _build_datediff([unit, start, end]) + 1]
 144    )
 145
 146    unnest.set("expressions", [number_sequence])
 147
 148    unnest_parent = unnest.parent
 149    if isinstance(unnest_parent, exp.Join):
 150        select = unnest_parent.parent
 151        if isinstance(select, exp.Select):
 152            replace_column_name = (
 153                sequence_value_name
 154                if isinstance(sequence_value_name, str)
 155                else sequence_value_name.name
 156            )
 157
 158            scope = build_scope(select)
 159            if scope:
 160                for column in scope.columns:
 161                    if column.name.lower() == replace_column_name.lower():
 162                        column.replace(
 163                            date_add.as_(replace_column_name)
 164                            if isinstance(column.parent, exp.Select)
 165                            else date_add
 166                        )
 167
 168            lateral = exp.Lateral(this=unnest_parent.this.pop())
 169            unnest_parent.replace(exp.Join(this=lateral))
 170    else:
 171        unnest.replace(
 172            exp.select(date_add.as_(sequence_value_name))
 173            .from_(unnest.copy())
 174            .subquery(unnest_alias)
 175        )
 176
 177
 178def _transform_generate_date_array(expression: exp.Expr) -> exp.Expr:
 179    if isinstance(expression, exp.Select):
 180        for generate_date_array in expression.find_all(exp.GenerateDateArray):
 181            parent = generate_date_array.parent
 182
 183            # If GENERATE_DATE_ARRAY is used directly as an array (e.g passed into ARRAY_LENGTH), the transformed Snowflake
 184            # query is the following (it'll be unnested properly on the next iteration due to copy):
 185            # SELECT ref(GENERATE_DATE_ARRAY(...)) -> SELECT ref((SELECT ARRAY_AGG(*) FROM UNNEST(GENERATE_DATE_ARRAY(...))))
 186            if not isinstance(parent, exp.Unnest):
 187                unnest = exp.Unnest(expressions=[generate_date_array.copy()])
 188                generate_date_array.replace(
 189                    exp.select(exp.ArrayAgg(this=exp.Star())).from_(unnest).subquery()
 190                )
 191
 192            if (
 193                isinstance(parent, exp.Unnest)
 194                and isinstance(parent.parent, (exp.From, exp.Join))
 195                and len(parent.expressions) == 1
 196            ):
 197                _unnest_generate_date_array(parent)
 198
 199    return expression
 200
 201
 202def _regexpextract_sql(
 203    self: SnowflakeGenerator, expression: exp.RegexpExtract | exp.RegexpExtractAll
 204) -> str:
 205    # Other dialects don't support all of the following parameters, so we need to
 206    # generate default values as necessary to ensure the transpilation is correct
 207    group = expression.args.get("group")
 208
 209    # To avoid generating all these default values, we set group to None if
 210    # it's 0 (also default value) which doesn't trigger the following chain
 211    if group and group.name == "0":
 212        group = None
 213
 214    parameters = expression.args.get("parameters") or (group and exp.Literal.string("c"))
 215    occurrence = expression.args.get("occurrence") or (parameters and exp.Literal.number(1))
 216    position = expression.args.get("position") or (occurrence and exp.Literal.number(1))
 217
 218    return self.func(
 219        "REGEXP_SUBSTR" if isinstance(expression, exp.RegexpExtract) else "REGEXP_SUBSTR_ALL",
 220        expression.this,
 221        expression.expression,
 222        position,
 223        occurrence,
 224        parameters,
 225        group,
 226    )
 227
 228
 229def _json_extract_value_array_sql(
 230    self: SnowflakeGenerator, expression: exp.JSONValueArray | exp.JSONExtractArray
 231) -> str:
 232    json_extract = exp.JSONExtract(this=expression.this, expression=expression.expression)
 233    ident = exp.to_identifier("x")
 234
 235    if isinstance(expression, exp.JSONValueArray):
 236        this: exp.Expr = exp.cast(ident, to=exp.DType.VARCHAR)
 237    else:
 238        this = exp.ParseJSON(this=f"TO_JSON({ident})")
 239
 240    transform_lambda = exp.Lambda(expressions=[ident], this=this)
 241
 242    return self.func("TRANSFORM", json_extract, transform_lambda)
 243
 244
 245def _qualify_unnested_columns(expression: exp.Expr) -> exp.Expr:
 246    if isinstance(expression, exp.Select):
 247        scope = build_scope(expression)
 248        if not scope:
 249            return expression
 250
 251        unnests = list(scope.find_all(exp.Unnest))
 252
 253        if not unnests:
 254            return expression
 255
 256        taken_source_names = set(scope.sources)
 257        column_source: dict[str, exp.Identifier] = {}
 258        unnest_to_identifier: dict[exp.Unnest, exp.Identifier] = {}
 259
 260        unnest_identifier: exp.Identifier | None = None
 261        orig_expression = expression.copy()
 262
 263        for unnest in unnests:
 264            if not isinstance(unnest.parent, (exp.From, exp.Join)):
 265                continue
 266
 267            # Try to infer column names produced by an unnest operator. This is only possible
 268            # when we can peek into the (statically known) contents of the unnested value.
 269            unnest_columns: set[str] = set()
 270            for unnest_expr in unnest.expressions:
 271                if not isinstance(unnest_expr, exp.Array):
 272                    continue
 273
 274                for array_expr in unnest_expr.expressions:
 275                    if not (
 276                        isinstance(array_expr, exp.Struct)
 277                        and array_expr.expressions
 278                        and all(
 279                            isinstance(struct_expr, exp.PropertyEQ)
 280                            for struct_expr in array_expr.expressions
 281                        )
 282                    ):
 283                        continue
 284
 285                    unnest_columns.update(
 286                        struct_expr.this.name.lower() for struct_expr in array_expr.expressions
 287                    )
 288                    break
 289
 290                if unnest_columns:
 291                    break
 292
 293            unnest_alias = unnest.args.get("alias")
 294            if not unnest_alias:
 295                alias_name = find_new_name(taken_source_names, "value")
 296                taken_source_names.add(alias_name)
 297
 298                # Produce a `TableAlias` AST similar to what is produced for BigQuery. This
 299                # will be corrected later, when we generate SQL for the `Unnest` AST node.
 300                aliased_unnest = exp.alias_(unnest, None, table=[alias_name])
 301                scope.replace(unnest, aliased_unnest)
 302
 303                unnest_identifier = aliased_unnest.args["alias"].columns[0]
 304            else:
 305                alias_columns = getattr(unnest_alias, "columns", [])
 306                unnest_identifier = unnest_alias.this or seq_get(alias_columns, 0)
 307
 308            if not isinstance(unnest_identifier, exp.Identifier):
 309                return orig_expression
 310
 311            unnest_to_identifier[unnest] = unnest_identifier
 312            column_source.update({c.lower(): unnest_identifier for c in unnest_columns})
 313
 314        for column in scope.columns:
 315            if column.table:
 316                continue
 317
 318            table = column_source.get(column.name.lower())
 319            if (
 320                unnest_identifier
 321                and not table
 322                and len(scope.sources) == 1
 323                and column.name.lower() != unnest_identifier.name.lower()
 324            ):
 325                unnest_ancestor = column.find_ancestor(exp.Unnest, exp.Select)
 326                if isinstance(unnest_ancestor, exp.Unnest):
 327                    ancestor_identifier = unnest_to_identifier.get(unnest_ancestor)
 328                    if (
 329                        ancestor_identifier
 330                        and ancestor_identifier.name.lower() == unnest_identifier.name.lower()
 331                    ):
 332                        continue
 333
 334                table = unnest_identifier
 335
 336            column.set("table", table and table.copy())
 337
 338    return expression
 339
 340
 341def _eliminate_dot_variant_lookup(expression: exp.Expr) -> exp.Expr:
 342    if isinstance(expression, exp.Select):
 343        # This transformation is used to facilitate transpilation of BigQuery `UNNEST` operations
 344        # to Snowflake. It should not affect roundtrip because `Unnest` nodes cannot be produced
 345        # by Snowflake's parser.
 346        #
 347        # Additionally, at the time of writing this, BigQuery is the only dialect that produces a
 348        # `TableAlias` node that only fills `columns` and not `this`, due to `UNNEST_COLUMN_ONLY`.
 349        unnest_aliases = set()
 350        for unnest in find_all_in_scope(expression, exp.Unnest):
 351            unnest_alias = unnest.args.get("alias")
 352            if (
 353                isinstance(unnest_alias, exp.TableAlias)
 354                and not unnest_alias.this
 355                and len(unnest_alias.columns) == 1
 356            ):
 357                unnest_aliases.add(unnest_alias.columns[0].name)
 358
 359        if unnest_aliases:
 360            for c in find_all_in_scope(expression, exp.Column):
 361                if c.table in unnest_aliases:
 362                    bracket_lhs = c.args["table"]
 363                    bracket_rhs = exp.Literal.string(c.name)
 364                    bracket = exp.Bracket(this=bracket_lhs, expressions=[bracket_rhs])
 365
 366                    if c.parent is expression:
 367                        # Retain column projection names by using aliases
 368                        c.replace(exp.alias_(bracket, c.this.copy()))
 369                    else:
 370                        c.replace(bracket)
 371
 372    return expression
 373
 374
 375class SnowflakeGenerator(generator.Generator):
 376    SELECT_KINDS: tuple[str, ...] = ()
 377    PARAMETER_TOKEN = "$"
 378    MATCHED_BY_SOURCE = False
 379    SINGLE_STRING_INTERVAL = True
 380    JOIN_HINTS = False
 381    TABLE_HINTS = False
 382    QUERY_HINTS = False
 383    AGGREGATE_FILTER_SUPPORTED = False
 384    SUPPORTS_TABLE_COPY = False
 385    COLLATE_IS_FUNC = True
 386    LIMIT_ONLY_LITERALS = True
 387    JSON_KEY_VALUE_PAIR_SEP = ","
 388    INSERT_OVERWRITE = " OVERWRITE INTO"
 389    STRUCT_DELIMITER = ("(", ")")
 390    COPY_PARAMS_ARE_WRAPPED = False
 391    COPY_PARAMS_EQ_REQUIRED = True
 392    STAR_EXCEPT = "EXCLUDE"
 393    SUPPORTS_EXPLODING_PROJECTIONS = False
 394    ARRAY_CONCAT_IS_VAR_LEN = False
 395    SUPPORTS_CONVERT_TIMEZONE = True
 396    EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE = False
 397    SUPPORTS_MEDIAN = True
 398    ARRAY_SIZE_NAME = "ARRAY_SIZE"
 399    SUPPORTS_DECODE_CASE = True
 400
 401    AFTER_HAVING_MODIFIER_TRANSFORMS = generator.AFTER_HAVING_MODIFIER_TRANSFORMS
 402
 403    IS_BOOL_ALLOWED = False
 404    DIRECTED_JOINS = True
 405    SUPPORTS_UESCAPE = False
 406    TRY_SUPPORTED = False
 407
 408    TRANSFORMS = {
 409        **generator.Generator.TRANSFORMS,
 410        exp.ApproxDistinct: rename_func("APPROX_COUNT_DISTINCT"),
 411        exp.ArgMax: rename_func("MAX_BY"),
 412        exp.ArgMin: rename_func("MIN_BY"),
 413        exp.Array: transforms.preprocess([transforms.inherit_struct_field_names]),
 414        exp.ArrayConcat: array_concat_sql("ARRAY_CAT"),
 415        exp.ArrayAppend: array_append_sql("ARRAY_APPEND"),
 416        exp.ArrayPrepend: array_append_sql("ARRAY_PREPEND"),
 417        exp.ArrayContains: lambda self, e: self.func(
 418            "ARRAY_CONTAINS",
 419            e.expression
 420            if e.args.get("ensure_variant") is False
 421            else exp.cast(e.expression, exp.DType.VARIANT, copy=False),
 422            e.this,
 423        ),
 424        exp.ArrayPosition: lambda self, e: self.func(
 425            "ARRAY_POSITION",
 426            e.expression,
 427            e.this,
 428        ),
 429        exp.ArrayIntersect: rename_func("ARRAY_INTERSECTION"),
 430        exp.ArrayOverlaps: rename_func("ARRAYS_OVERLAP"),
 431        exp.AtTimeZone: lambda self, e: self.func("CONVERT_TIMEZONE", e.args.get("zone"), e.this),
 432        exp.BitwiseOr: rename_func("BITOR"),
 433        exp.BitwiseXor: rename_func("BITXOR"),
 434        exp.BitwiseAnd: rename_func("BITAND"),
 435        exp.BitwiseAndAgg: rename_func("BITANDAGG"),
 436        exp.BitwiseOrAgg: rename_func("BITORAGG"),
 437        exp.BitwiseXorAgg: rename_func("BITXORAGG"),
 438        exp.BitwiseNot: rename_func("BITNOT"),
 439        exp.BitwiseLeftShift: rename_func("BITSHIFTLEFT"),
 440        exp.BitwiseRightShift: rename_func("BITSHIFTRIGHT"),
 441        exp.Create: transforms.preprocess([_flatten_structured_types_unless_iceberg]),
 442        exp.CurrentTimestamp: lambda self, e: (
 443            self.func("SYSDATE") if e.args.get("sysdate") else self.function_fallback_sql(e)
 444        ),
 445        exp.CurrentSchemas: lambda self, e: self.func("CURRENT_SCHEMAS"),
 446        exp.Localtime: lambda self, e: (
 447            self.func("CURRENT_TIME", e.this) if e.this else "CURRENT_TIME"
 448        ),
 449        exp.Localtimestamp: lambda self, e: (
 450            self.func("CURRENT_TIMESTAMP", e.this) if e.this else "CURRENT_TIMESTAMP"
 451        ),
 452        exp.DateAdd: date_delta_sql("DATEADD"),
 453        exp.DateDiff: date_delta_sql("DATEDIFF"),
 454        exp.DatetimeAdd: date_delta_sql("TIMESTAMPADD"),
 455        exp.DatetimeDiff: timestampdiff_sql,
 456        exp.DateStrToDate: datestrtodate_sql,
 457        exp.Decrypt: lambda self, e: self.func(
 458            f"{'TRY_' if e.args.get('safe') else ''}DECRYPT",
 459            e.this,
 460            e.args.get("passphrase"),
 461            e.args.get("aad"),
 462            e.args.get("encryption_method"),
 463        ),
 464        exp.DecryptRaw: lambda self, e: self.func(
 465            f"{'TRY_' if e.args.get('safe') else ''}DECRYPT_RAW",
 466            e.this,
 467            e.args.get("key"),
 468            e.args.get("iv"),
 469            e.args.get("aad"),
 470            e.args.get("encryption_method"),
 471            e.args.get("aead"),
 472        ),
 473        exp.DayOfMonth: rename_func("DAYOFMONTH"),
 474        exp.DayOfWeek: rename_func("DAYOFWEEK"),
 475        exp.DayOfWeekIso: rename_func("DAYOFWEEKISO"),
 476        exp.DayOfYear: rename_func("DAYOFYEAR"),
 477        exp.DotProduct: rename_func("VECTOR_INNER_PRODUCT"),
 478        exp.Explode: rename_func("FLATTEN"),
 479        exp.Extract: lambda self, e: self.func(
 480            "DATE_PART", map_date_part(e.this, self.dialect), e.expression
 481        ),
 482        exp.CosineDistance: rename_func("VECTOR_COSINE_SIMILARITY"),
 483        exp.EuclideanDistance: rename_func("VECTOR_L2_DISTANCE"),
 484        exp.HandlerProperty: lambda self, e: f"HANDLER = {self.sql(e, 'this')}",
 485        exp.FileFormatProperty: lambda self, e: (
 486            f"FILE_FORMAT=({self.expressions(e, 'expressions', sep=' ')})"
 487        ),
 488        exp.FromTimeZone: lambda self, e: self.func(
 489            "CONVERT_TIMEZONE", e.args.get("zone"), "'UTC'", e.this
 490        ),
 491        exp.GenerateSeries: lambda self, e: self.func(
 492            "ARRAY_GENERATE_RANGE",
 493            e.args["start"],
 494            e.args["end"] if e.args.get("is_end_exclusive") else e.args["end"] + 1,
 495            e.args.get("step"),
 496        ),
 497        exp.GetExtract: rename_func("GET"),
 498        exp.GroupConcat: lambda self, e: groupconcat_sql(self, e, sep=""),
 499        exp.If: if_sql(name="IFF", false_value="NULL"),
 500        exp.JSONArray: lambda self, e: self.func(
 501            "TO_VARIANT", self.func("ARRAY_CONSTRUCT", *e.expressions)
 502        ),
 503        exp.JSONExtractArray: _json_extract_value_array_sql,
 504        exp.JSONExtractScalar: lambda self, e: self.func(
 505            "JSON_EXTRACT_PATH_TEXT", e.this, e.expression
 506        ),
 507        exp.JSONKeys: rename_func("OBJECT_KEYS"),
 508        exp.JSONObject: lambda self, e: self.func("OBJECT_CONSTRUCT_KEEP_NULL", *e.expressions),
 509        exp.JSONPathRoot: lambda *_: "",
 510        exp.JSONValueArray: _json_extract_value_array_sql,
 511        exp.Levenshtein: unsupported_args("ins_cost", "del_cost", "sub_cost")(
 512            rename_func("EDITDISTANCE")
 513        ),
 514        exp.LocationProperty: lambda self, e: f"LOCATION={self.sql(e, 'this')}",
 515        exp.LogicalAnd: rename_func("BOOLAND_AGG"),
 516        exp.LogicalOr: rename_func("BOOLOR_AGG"),
 517        exp.Map: lambda self, e: var_map_sql(self, e, "OBJECT_CONSTRUCT"),
 518        exp.ManhattanDistance: rename_func("VECTOR_L1_DISTANCE"),
 519        exp.MakeInterval: no_make_interval_sql,
 520        exp.Max: max_or_greatest,
 521        exp.Min: min_or_least,
 522        exp.ParseJSON: lambda self, e: self.func(
 523            f"{'TRY_' if e.args.get('safe') else ''}PARSE_JSON", e.this
 524        ),
 525        exp.ToBinary: lambda self, e: self.func(
 526            f"{'TRY_' if e.args.get('safe') else ''}TO_BINARY", e.this, e.args.get("format")
 527        ),
 528        exp.ToBoolean: lambda self, e: self.func(
 529            f"{'TRY_' if e.args.get('safe') else ''}TO_BOOLEAN", e.this
 530        ),
 531        exp.ToDouble: lambda self, e: self.func(
 532            f"{'TRY_' if e.args.get('safe') else ''}TO_DOUBLE", e.this, e.args.get("format")
 533        ),
 534        exp.ToFile: lambda self, e: self.func(
 535            f"{'TRY_' if e.args.get('safe') else ''}TO_FILE", e.this, e.args.get("path")
 536        ),
 537        exp.JSONFormat: rename_func("TO_JSON"),
 538        exp.PartitionedByProperty: lambda self, e: f"PARTITION BY {self.sql(e, 'this')}",
 539        exp.PercentileCont: transforms.preprocess([transforms.add_within_group_for_percentiles]),
 540        exp.PercentileDisc: transforms.preprocess([transforms.add_within_group_for_percentiles]),
 541        exp.Pivot: transforms.preprocess([_unqualify_pivot_columns]),
 542        exp.RegexpExtract: _regexpextract_sql,
 543        exp.RegexpExtractAll: _regexpextract_sql,
 544        exp.RegexpILike: _regexpilike_sql,
 545        exp.RowAccessProperty: lambda self, e: self.rowaccessproperty_sql(e),
 546        exp.Select: transforms.preprocess(
 547            [
 548                transforms.eliminate_window_clause,
 549                transforms.eliminate_distinct_on,
 550                transforms.explode_projection_to_unnest(),
 551                transforms.eliminate_semi_and_anti_joins,
 552                _transform_generate_date_array,
 553                _qualify_unnested_columns,
 554                _eliminate_dot_variant_lookup,
 555            ]
 556        ),
 557        exp.SHA: rename_func("SHA1"),
 558        exp.SHA1Digest: rename_func("SHA1_BINARY"),
 559        exp.MD5Digest: rename_func("MD5_BINARY"),
 560        exp.MD5NumberLower64: rename_func("MD5_NUMBER_LOWER64"),
 561        exp.MD5NumberUpper64: rename_func("MD5_NUMBER_UPPER64"),
 562        exp.Hex: rename_func("HEX_ENCODE"),
 563        exp.LowerHex: rename_func("TO_CHAR"),
 564        exp.Skewness: rename_func("SKEW"),
 565        exp.StarMap: rename_func("OBJECT_CONSTRUCT"),
 566        exp.StartsWith: rename_func("STARTSWITH"),
 567        exp.EndsWith: rename_func("ENDSWITH"),
 568        exp.Rand: lambda self, e: self.func("RANDOM", e.this),
 569        exp.StrPosition: lambda self, e: strposition_sql(
 570            self, e, func_name="CHARINDEX", supports_position=True
 571        ),
 572        exp.StrToDate: lambda self, e: self.func("DATE", e.this, self.format_time(e)),
 573        exp.StringToArray: rename_func("STRTOK_TO_ARRAY"),
 574        exp.StrtokToArray: rename_func("STRTOK_TO_ARRAY"),
 575        exp.Stuff: rename_func("INSERT"),
 576        exp.StPoint: rename_func("ST_MAKEPOINT"),
 577        exp.TimeAdd: date_delta_sql("TIMEADD"),
 578        exp.TimeSlice: lambda self, e: self.func(
 579            "TIME_SLICE",
 580            e.this,
 581            e.expression,
 582            unit_to_str(e),
 583            e.args.get("kind"),
 584        ),
 585        exp.Timestamp: no_timestamp_sql,
 586        exp.TimestampAdd: date_delta_sql("TIMESTAMPADD"),
 587        exp.TimestampDiff: lambda self, e: self.func("TIMESTAMPDIFF", e.unit, e.expression, e.this),
 588        exp.TimestampTrunc: timestamptrunc_sql(),
 589        exp.TimeStrToTime: timestrtotime_sql,
 590        exp.TimeToUnix: lambda self, e: f"EXTRACT(epoch_second FROM {self.sql(e, 'this')})",
 591        exp.ToArray: rename_func("TO_ARRAY"),
 592        exp.ToChar: lambda self, e: self.function_fallback_sql(e),
 593        exp.TsOrDsAdd: date_delta_sql("DATEADD", cast=True),
 594        exp.TsOrDsDiff: date_delta_sql("DATEDIFF"),
 595        exp.TsOrDsToDate: lambda self, e: self.func(
 596            f"{'TRY_' if e.args.get('safe') else ''}TO_DATE", e.this, self.format_time(e)
 597        ),
 598        exp.TsOrDsToTime: lambda self, e: self.func(
 599            f"{'TRY_' if e.args.get('safe') else ''}TO_TIME", e.this, self.format_time(e)
 600        ),
 601        exp.Unhex: rename_func("HEX_DECODE_BINARY"),
 602        exp.UnixToTime: lambda self, e: self.func("TO_TIMESTAMP", e.this, e.args.get("scale")),
 603        exp.Uuid: rename_func("UUID_STRING"),
 604        exp.VarMap: lambda self, e: var_map_sql(self, e, "OBJECT_CONSTRUCT"),
 605        exp.Booland: rename_func("BOOLAND"),
 606        exp.Boolor: rename_func("BOOLOR"),
 607        exp.WeekOfYear: rename_func("WEEKISO"),
 608        exp.YearOfWeek: rename_func("YEAROFWEEK"),
 609        exp.YearOfWeekIso: rename_func("YEAROFWEEKISO"),
 610        exp.Xor: rename_func("BOOLXOR"),
 611        exp.ByteLength: rename_func("OCTET_LENGTH"),
 612        exp.Flatten: rename_func("ARRAY_FLATTEN"),
 613        exp.ArrayConcatAgg: lambda self, e: self.func("ARRAY_FLATTEN", exp.ArrayAgg(this=e.this)),
 614        exp.SHA2Digest: lambda self, e: self.func(
 615            "SHA2_BINARY", e.this, e.args.get("length") or exp.Literal.number(256)
 616        ),
 617    }
 618
 619    def sortarray_sql(self, expression: exp.SortArray) -> str:
 620        asc = expression.args.get("asc")
 621        nulls_first = expression.args.get("nulls_first")
 622        if asc == exp.false() and nulls_first == exp.true():
 623            nulls_first = None
 624        return self.func("ARRAY_SORT", expression.this, asc, nulls_first)
 625
 626    def nthvalue_sql(self, expression: exp.NthValue) -> str:
 627        result = self.func("NTH_VALUE", expression.this, expression.args.get("offset"))
 628
 629        from_first = expression.args.get("from_first")
 630
 631        if from_first is not None:
 632            if from_first:
 633                result = result + " FROM FIRST"
 634            else:
 635                result = result + " FROM LAST"
 636
 637        return result
 638
 639    SUPPORTED_JSON_PATH_PARTS = {
 640        exp.JSONPathKey,
 641        exp.JSONPathRoot,
 642        exp.JSONPathSubscript,
 643    }
 644
 645    TYPE_MAPPING = {
 646        **generator.Generator.TYPE_MAPPING,
 647        exp.DType.BIGDECIMAL: "DOUBLE",
 648        exp.DType.JSON: "VARIANT",
 649        exp.DType.NESTED: "OBJECT",
 650        exp.DType.STRUCT: "OBJECT",
 651        exp.DType.TEXT: "VARCHAR",
 652    }
 653
 654    TOKEN_MAPPING = {
 655        TokenType.AUTO_INCREMENT: "AUTOINCREMENT",
 656    }
 657
 658    PROPERTIES_LOCATION = {
 659        **generator.Generator.PROPERTIES_LOCATION,
 660        exp.CredentialsProperty: exp.Properties.Location.POST_WITH,
 661        exp.LocationProperty: exp.Properties.Location.POST_WITH,
 662        exp.PartitionedByProperty: exp.Properties.Location.POST_SCHEMA,
 663        exp.RowAccessProperty: exp.Properties.Location.POST_SCHEMA,
 664        exp.SetProperty: exp.Properties.Location.UNSUPPORTED,
 665        exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED,
 666    }
 667
 668    UNSUPPORTED_VALUES_EXPRESSIONS = {
 669        exp.Map,
 670        exp.StarMap,
 671        exp.Struct,
 672        exp.VarMap,
 673    }
 674
 675    RESPECT_IGNORE_NULLS_UNSUPPORTED_EXPRESSIONS = (exp.ArrayAgg,)
 676
 677    def with_properties(self, properties: exp.Properties) -> str:
 678        return self.properties(properties, wrapped=False, prefix=self.sep(""), sep=" ")
 679
 680    def values_sql(self, expression: exp.Values, values_as_table: bool = True) -> str:
 681        if expression.find(*self.UNSUPPORTED_VALUES_EXPRESSIONS):
 682            values_as_table = False
 683
 684        return super().values_sql(expression, values_as_table=values_as_table)
 685
 686    def datatype_sql(self, expression: exp.DataType) -> str:
 687        # Check if this is a FLOAT type nested inside a VECTOR type
 688        # VECTOR only accepts FLOAT (not DOUBLE), INT, and STRING as element types
 689        # https://docs.snowflake.com/en/sql-reference/data-types-vector
 690        if expression.is_type(exp.DType.DOUBLE):
 691            parent = expression.parent
 692            if isinstance(parent, exp.DataType) and parent.is_type(exp.DType.VECTOR):
 693                # Preserve FLOAT for VECTOR types instead of mapping to synonym DOUBLE
 694                return "FLOAT"
 695
 696        expressions = expression.expressions
 697        if expressions and expression.is_type(*exp.DataType.STRUCT_TYPES):
 698            for field_type in expressions:
 699                # The correct syntax is OBJECT [ (<key> <value_type [NOT NULL] [, ...]) ]
 700                if isinstance(field_type, exp.DataType):
 701                    return "OBJECT"
 702                if (
 703                    isinstance(field_type, exp.ColumnDef)
 704                    and field_type.this
 705                    and field_type.this.is_string
 706                ):
 707                    # Doing OBJECT('foo' VARCHAR) is invalid snowflake Syntax. Moreover, besides
 708                    # converting 'foo' into an identifier, we also need to quote it because these
 709                    # keys are case-sensitive. For example:
 710                    #
 711                    # WITH t AS (SELECT OBJECT_CONSTRUCT('x', 'y') AS c) SELECT c:x FROM t -- correct
 712                    # WITH t AS (SELECT OBJECT_CONSTRUCT('x', 'y') AS c) SELECT c:X FROM t -- incorrect, returns NULL
 713                    field_type.this.replace(exp.to_identifier(field_type.name, quoted=True))
 714
 715        return super().datatype_sql(expression)
 716
 717    def tonumber_sql(self, expression: exp.ToNumber) -> str:
 718        precision = expression.args.get("precision")
 719        scale = expression.args.get("scale")
 720
 721        default_precision = isinstance(precision, exp.Literal) and precision.name == "38"
 722        default_scale = isinstance(scale, exp.Literal) and scale.name == "0"
 723
 724        if default_precision and default_scale:
 725            precision = None
 726            scale = None
 727        elif default_scale:
 728            scale = None
 729
 730        func_name = "TRY_TO_NUMBER" if expression.args.get("safe") else "TO_NUMBER"
 731
 732        return self.func(
 733            func_name,
 734            expression.this,
 735            expression.args.get("format"),
 736            precision,
 737            scale,
 738        )
 739
 740    def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str:
 741        milli = expression.args.get("milli")
 742        if milli is not None:
 743            milli_to_nano = milli.pop() * exp.Literal.number(1000000)
 744            expression.set("nano", milli_to_nano)
 745
 746        return rename_func("TIMESTAMP_FROM_PARTS")(self, expression)
 747
 748    def cast_sql(self, expression: exp.Cast, safe_prefix: str | None = None) -> str:
 749        if expression.is_type(exp.DType.GEOGRAPHY):
 750            return self.func("TO_GEOGRAPHY", expression.this)
 751        if expression.is_type(exp.DType.GEOMETRY):
 752            return self.func("TO_GEOMETRY", expression.this)
 753
 754        return super().cast_sql(expression, safe_prefix=safe_prefix)
 755
 756    def trycast_sql(self, expression: exp.TryCast) -> str:
 757        value = expression.this
 758
 759        if value.type is None:
 760            from sqlglot.optimizer.annotate_types import annotate_types
 761
 762            value = annotate_types(value, dialect=self.dialect)
 763
 764        # Snowflake requires that TRY_CAST's value be a string
 765        # If TRY_CAST is being roundtripped (since Snowflake is the only dialect that sets "requires_string") or
 766        # if we can deduce that the value is a string, then we can generate TRY_CAST
 767        if expression.args.get("requires_string") or value.is_type(*exp.DataType.TEXT_TYPES):
 768            return super().trycast_sql(expression)
 769
 770        return self.cast_sql(expression)
 771
 772    def log_sql(self, expression: exp.Log) -> str:
 773        if not expression.expression:
 774            return self.func("LN", expression.this)
 775
 776        return super().log_sql(expression)
 777
 778    def greatest_sql(self, expression: exp.Greatest) -> str:
 779        name = "GREATEST_IGNORE_NULLS" if expression.args.get("ignore_nulls") else "GREATEST"
 780        return self.func(name, expression.this, *expression.expressions)
 781
 782    def least_sql(self, expression: exp.Least) -> str:
 783        name = "LEAST_IGNORE_NULLS" if expression.args.get("ignore_nulls") else "LEAST"
 784        return self.func(name, expression.this, *expression.expressions)
 785
 786    def generator_sql(self, expression: exp.Generator) -> str:
 787        args = []
 788        rowcount = expression.args.get("rowcount")
 789        timelimit = expression.args.get("timelimit")
 790
 791        if rowcount:
 792            args.append(exp.Kwarg(this=exp.var("ROWCOUNT"), expression=rowcount))
 793        if timelimit:
 794            args.append(exp.Kwarg(this=exp.var("TIMELIMIT"), expression=timelimit))
 795
 796        return self.func("GENERATOR", *args)
 797
 798    def unnest_sql(self, expression: exp.Unnest) -> str:
 799        unnest_alias = expression.args.get("alias")
 800        offset = expression.args.get("offset")
 801
 802        unnest_alias_columns = unnest_alias.columns if unnest_alias else []
 803        value = seq_get(unnest_alias_columns, 0) or exp.to_identifier("value")
 804
 805        columns = [
 806            exp.to_identifier("seq"),
 807            exp.to_identifier("key"),
 808            exp.to_identifier("path"),
 809            offset.pop() if isinstance(offset, exp.Expr) else exp.to_identifier("index"),
 810            value,
 811            exp.to_identifier("this"),
 812        ]
 813
 814        if unnest_alias:
 815            unnest_alias.set("columns", columns)
 816        else:
 817            unnest_alias = exp.TableAlias(this="_u", columns=columns)
 818
 819        table_input = self.sql(expression.expressions[0])
 820        if not table_input.startswith("INPUT =>"):
 821            table_input = f"INPUT => {table_input}"
 822
 823        expression_parent = expression.parent
 824
 825        explode = (
 826            f"FLATTEN({table_input})"
 827            if isinstance(expression_parent, exp.Lateral)
 828            else f"TABLE(FLATTEN({table_input}))"
 829        )
 830        alias = self.sql(unnest_alias)
 831        alias = f" AS {alias}" if alias else ""
 832        value = (
 833            ""
 834            if isinstance(expression_parent, (exp.From, exp.Join, exp.Lateral))
 835            else f"{value} FROM "
 836        )
 837
 838        return f"{value}{explode}{alias}"
 839
 840    def show_sql(self, expression: exp.Show) -> str:
 841        terse = "TERSE " if expression.args.get("terse") else ""
 842        iceberg = "ICEBERG " if expression.args.get("iceberg") else ""
 843        history = " HISTORY" if expression.args.get("history") else ""
 844        like = self.sql(expression, "like")
 845        like = f" LIKE {like}" if like else ""
 846
 847        scope = self.sql(expression, "scope")
 848        scope = f" {scope}" if scope else ""
 849
 850        scope_kind = self.sql(expression, "scope_kind")
 851        if scope_kind:
 852            scope_kind = f" IN {scope_kind}"
 853
 854        starts_with = self.sql(expression, "starts_with")
 855        if starts_with:
 856            starts_with = f" STARTS WITH {starts_with}"
 857
 858        limit = self.sql(expression, "limit")
 859
 860        from_ = self.sql(expression, "from_")
 861        if from_:
 862            from_ = f" FROM {from_}"
 863
 864        privileges = self.expressions(expression, key="privileges", flat=True)
 865        privileges = f" WITH PRIVILEGES {privileges}" if privileges else ""
 866
 867        return f"SHOW {terse}{iceberg}{expression.name}{history}{like}{scope_kind}{scope}{starts_with}{limit}{from_}{privileges}"
 868
 869    def rowaccessproperty_sql(self, expression: exp.RowAccessProperty) -> str:
 870        if not expression.this:
 871            return "ROW ACCESS"
 872        on = f" ON ({self.expressions(expression, flat=True)})" if expression.expressions else ""
 873        return f"WITH ROW ACCESS POLICY {self.sql(expression, 'this')}{on}"
 874
 875    def describe_sql(self, expression: exp.Describe) -> str:
 876        kind_value = expression.args.get("kind") or "TABLE"
 877
 878        properties = expression.args.get("properties")
 879        if properties:
 880            qualifier = self.expressions(properties, sep=" ")
 881            kind = f" {qualifier} {kind_value}"
 882        else:
 883            kind = f" {kind_value}"
 884
 885        this = f" {self.sql(expression, 'this')}"
 886        expressions = self.expressions(expression, flat=True)
 887        expressions = f" {expressions}" if expressions else ""
 888        return f"DESCRIBE{kind}{this}{expressions}"
 889
 890    def generatedasidentitycolumnconstraint_sql(
 891        self, expression: exp.GeneratedAsIdentityColumnConstraint
 892    ) -> str:
 893        start = expression.args.get("start")
 894        start = f" START {start}" if start else ""
 895        increment = expression.args.get("increment")
 896        increment = f" INCREMENT {increment}" if increment else ""
 897
 898        order = expression.args.get("order")
 899        if order is not None:
 900            order_clause = " ORDER" if order else " NOORDER"
 901        else:
 902            order_clause = ""
 903
 904        return f"AUTOINCREMENT{start}{increment}{order_clause}"
 905
 906    def cluster_sql(self, expression: exp.Cluster) -> str:
 907        return f"CLUSTER BY ({self.expressions(expression, flat=True)})"
 908
 909    def struct_sql(self, expression: exp.Struct) -> str:
 910        if len(expression.expressions) == 1:
 911            arg = expression.expressions[0]
 912            if arg.is_star or (isinstance(arg, exp.ILike) and arg.left.is_star):
 913                # Wildcard syntax: https://docs.snowflake.com/en/sql-reference/data-types-semistructured#object
 914                return f"{{{self.sql(expression.expressions[0])}}}"
 915
 916        keys = []
 917        values = []
 918
 919        for i, e in enumerate(expression.expressions):
 920            if isinstance(e, exp.PropertyEQ):
 921                keys.append(
 922                    exp.Literal.string(e.name) if isinstance(e.this, exp.Identifier) else e.this
 923                )
 924                values.append(e.expression)
 925            else:
 926                keys.append(exp.Literal.string(f"_{i}"))
 927                values.append(e)
 928
 929        return self.func("OBJECT_CONSTRUCT", *flatten(zip(keys, values)))
 930
 931    @unsupported_args("weight", "accuracy")
 932    def approxquantile_sql(self, expression: exp.ApproxQuantile) -> str:
 933        return self.func("APPROX_PERCENTILE", expression.this, expression.args.get("quantile"))
 934
 935    def alterset_sql(self, expression: exp.AlterSet) -> str:
 936        exprs = self.expressions(expression, flat=True)
 937        exprs = f" {exprs}" if exprs else ""
 938        file_format = self.expressions(expression, key="file_format", flat=True, sep=" ")
 939        file_format = f" STAGE_FILE_FORMAT = ({file_format})" if file_format else ""
 940        copy_options = self.expressions(expression, key="copy_options", flat=True, sep=" ")
 941        copy_options = f" STAGE_COPY_OPTIONS = ({copy_options})" if copy_options else ""
 942        tag = self.expressions(expression, key="tag", flat=True)
 943        tag = f" TAG {tag}" if tag else ""
 944
 945        return f"SET{exprs}{file_format}{copy_options}{tag}"
 946
 947    def strtotime_sql(self, expression: exp.StrToTime):
 948        # target_type is stored as a DataType instance
 949        target_type = expression.args.get("target_type")
 950
 951        # Get the type enum from DataType instance or from type annotation
 952        if isinstance(target_type, exp.DataType):
 953            type_enum = target_type.this
 954        elif expression.type:
 955            type_enum = expression.type.this
 956        else:
 957            type_enum = exp.DType.TIMESTAMP
 958
 959        func_name = TIMESTAMP_TYPES.get(type_enum, "TO_TIMESTAMP")
 960
 961        return self.func(
 962            f"{'TRY_' if expression.args.get('safe') else ''}{func_name}",
 963            expression.this,
 964            self.format_time(expression),
 965        )
 966
 967    def timestampsub_sql(self, expression: exp.TimestampSub):
 968        return self.sql(
 969            exp.TimestampAdd(
 970                this=expression.this,
 971                expression=expression.expression * -1,
 972                unit=expression.unit,
 973            )
 974        )
 975
 976    def jsonextract_sql(self, expression: exp.JSONExtract):
 977        this = expression.this
 978
 979        # JSON strings are valid coming from other dialects such as BQ so
 980        # for these cases we PARSE_JSON preemptively
 981        if not isinstance(this, (exp.ParseJSON, exp.JSONExtract)) and not expression.args.get(
 982            "requires_json"
 983        ):
 984            this = exp.ParseJSON(this=this)
 985
 986        return self.func(
 987            "GET_PATH",
 988            this,
 989            expression.expression,
 990        )
 991
 992    def timetostr_sql(self, expression: exp.TimeToStr) -> str:
 993        this = expression.this
 994        if this.is_string:
 995            this = exp.cast(this, exp.DType.TIMESTAMP)
 996
 997        return self.func("TO_CHAR", this, self.format_time(expression))
 998
 999    def datesub_sql(self, expression: exp.DateSub) -> str:
1000        value = expression.expression
1001        if value:
1002            value.replace(value * (-1))
1003        else:
1004            self.unsupported("DateSub cannot be transpiled if the subtracted count is unknown")
1005
1006        return date_delta_sql("DATEADD")(self, expression)
1007
1008    def select_sql(self, expression: exp.Select) -> str:
1009        limit = expression.args.get("limit")
1010        offset = expression.args.get("offset")
1011        if offset and not limit:
1012            expression.limit(exp.Null(), copy=False)
1013        return super().select_sql(expression)
1014
1015    def createable_sql(self, expression: exp.Create, locations: defaultdict) -> str:
1016        is_materialized = expression.find(exp.MaterializedProperty)
1017        copy_grants_property = expression.find(exp.CopyGrantsProperty)
1018
1019        if expression.kind == "VIEW" and is_materialized and copy_grants_property:
1020            # For materialized views, COPY GRANTS is located *before* the columns list
1021            # This is in contrast to normal views where COPY GRANTS is located *after* the columns list
1022            # We default CopyGrantsProperty to POST_SCHEMA which means we need to output it POST_NAME if a materialized view is detected
1023            # ref: https://docs.snowflake.com/en/sql-reference/sql/create-materialized-view#syntax
1024            # ref: https://docs.snowflake.com/en/sql-reference/sql/create-view#syntax
1025            post_schema_properties = locations[exp.Properties.Location.POST_SCHEMA]
1026            post_schema_properties.pop(post_schema_properties.index(copy_grants_property))
1027
1028            this_name = self.sql(expression.this, "this")
1029            copy_grants = self.sql(copy_grants_property)
1030            this_schema = self.schema_columns_sql(expression.this)
1031            this_schema = f"{self.sep()}{this_schema}" if this_schema else ""
1032
1033            return f"{this_name}{self.sep()}{copy_grants}{this_schema}"
1034
1035        return super().createable_sql(expression, locations)
1036
1037    def arrayagg_sql(self, expression: exp.ArrayAgg) -> str:
1038        this = expression.this
1039
1040        # If an ORDER BY clause is present, we need to remove it from ARRAY_AGG
1041        # and add it later as part of the WITHIN GROUP clause
1042        order = this if isinstance(this, exp.Order) else None
1043        if order:
1044            expression.set("this", order.this.pop())
1045
1046        expr_sql = super().arrayagg_sql(expression)
1047
1048        if order:
1049            expr_sql = self.sql(exp.WithinGroup(this=expr_sql, expression=order))
1050
1051        return expr_sql
1052
1053    def arraydistinct_sql(self, expression: exp.ArrayDistinct) -> str:
1054        if expression.args.get("check_null"):
1055            return self.func("ARRAY_DISTINCT", expression.this)
1056        return self.func("ARRAY_DISTINCT", exp.ArrayCompact(this=expression.this))
1057
1058    def arraytostring_sql(self, expression: exp.ArrayToString) -> str:
1059        return self.func("ARRAY_TO_STRING", expression.this, expression.expression)
1060
1061    def array_sql(self, expression: exp.Array) -> str:
1062        expressions = expression.expressions
1063
1064        first_expr = seq_get(expressions, 0)
1065        if isinstance(first_expr, exp.Select):
1066            # SELECT AS STRUCT foo AS alias_foo -> ARRAY_AGG(OBJECT_CONSTRUCT('alias_foo', foo))
1067            if first_expr.text("kind").upper() == "STRUCT":
1068                object_construct_args = []
1069                for expr in first_expr.expressions:
1070                    # Alias case: SELECT AS STRUCT foo AS alias_foo -> OBJECT_CONSTRUCT('alias_foo', foo)
1071                    # Column case: SELECT AS STRUCT foo -> OBJECT_CONSTRUCT('foo', foo)
1072                    name = expr.this if isinstance(expr, exp.Alias) else expr
1073
1074                    object_construct_args.extend([exp.Literal.string(expr.alias_or_name), name])
1075
1076                array_agg = exp.ArrayAgg(this=build_object_construct(args=object_construct_args))
1077
1078                first_expr.set("kind", None)
1079                first_expr.set("expressions", [array_agg])
1080
1081                return self.sql(first_expr.subquery())
1082
1083        return inline_array_sql(self, expression)
1084
1085    def currentdate_sql(self, expression: exp.CurrentDate) -> str:
1086        zone = self.sql(expression, "this")
1087        if not zone:
1088            return super().currentdate_sql(expression)
1089
1090        expr = exp.Cast(
1091            this=exp.ConvertTimezone(target_tz=zone, timestamp=exp.CurrentTimestamp()),
1092            to=exp.DataType(this=exp.DType.DATE),
1093        )
1094        return self.sql(expr)
1095
1096    def dot_sql(self, expression: exp.Dot) -> str:
1097        this = expression.this
1098
1099        if not this.type:
1100            from sqlglot.optimizer.annotate_types import annotate_types
1101
1102            this = annotate_types(this, dialect=self.dialect)
1103
1104        if not isinstance(this, exp.Dot) and this.is_type(exp.DType.STRUCT):
1105            # Generate colon notation for the top level STRUCT
1106            return f"{self.sql(this)}:{self.sql(expression, 'expression')}"
1107
1108        return super().dot_sql(expression)
1109
1110    def modelattribute_sql(self, expression: exp.ModelAttribute) -> str:
1111        return f"{self.sql(expression, 'this')}!{self.sql(expression, 'expression')}"
1112
1113    def format_sql(self, expression: exp.Format) -> str:
1114        if expression.name.lower() == "%s" and len(expression.expressions) == 1:
1115            return self.func("TO_CHAR", expression.expressions[0])
1116
1117        return self.function_fallback_sql(expression)
1118
1119    def splitpart_sql(self, expression: exp.SplitPart) -> str:
1120        # Set part_index to 1 if missing
1121        if not expression.args.get("delimiter"):
1122            expression.set("delimiter", exp.Literal.string(" "))
1123
1124        if not expression.args.get("part_index"):
1125            expression.set("part_index", exp.Literal.number(1))
1126
1127        return rename_func("SPLIT_PART")(self, expression)
1128
1129    def uniform_sql(self, expression: exp.Uniform) -> str:
1130        gen = expression.args.get("gen")
1131        seed = expression.args.get("seed")
1132
1133        # From Databricks UNIFORM(min, max, seed) -> Wrap gen in RANDOM(seed)
1134        if seed:
1135            gen = exp.Rand(this=seed)
1136
1137        # No gen argument (from Databricks 2-arg UNIFORM(min, max)) -> Add RANDOM()
1138        if not gen:
1139            gen = exp.Rand()
1140
1141        return self.func("UNIFORM", expression.this, expression.expression, gen)
1142
1143    def window_sql(self, expression: exp.Window) -> str:
1144        spec = expression.args.get("spec")
1145        this = expression.this
1146
1147        if (
1148            (
1149                isinstance(this, RANKING_WINDOW_FUNCTIONS_WITH_FRAME)
1150                or (
1151                    isinstance(this, (exp.RespectNulls, exp.IgnoreNulls))
1152                    and isinstance(this.this, RANKING_WINDOW_FUNCTIONS_WITH_FRAME)
1153                )
1154            )
1155            and spec
1156            and (
1157                spec.text("kind").upper() == "ROWS"
1158                and spec.text("start").upper() == "UNBOUNDED"
1159                and spec.text("start_side").upper() == "PRECEDING"
1160                and spec.text("end").upper() == "UNBOUNDED"
1161                and spec.text("end_side").upper() == "FOLLOWING"
1162            )
1163        ):
1164            # omit the default window from window ranking functions
1165            expression.set("spec", None)
1166        return super().window_sql(expression)
class SnowflakeGenerator(sqlglot.generator.Generator):
 376class SnowflakeGenerator(generator.Generator):
 377    SELECT_KINDS: tuple[str, ...] = ()
 378    PARAMETER_TOKEN = "$"
 379    MATCHED_BY_SOURCE = False
 380    SINGLE_STRING_INTERVAL = True
 381    JOIN_HINTS = False
 382    TABLE_HINTS = False
 383    QUERY_HINTS = False
 384    AGGREGATE_FILTER_SUPPORTED = False
 385    SUPPORTS_TABLE_COPY = False
 386    COLLATE_IS_FUNC = True
 387    LIMIT_ONLY_LITERALS = True
 388    JSON_KEY_VALUE_PAIR_SEP = ","
 389    INSERT_OVERWRITE = " OVERWRITE INTO"
 390    STRUCT_DELIMITER = ("(", ")")
 391    COPY_PARAMS_ARE_WRAPPED = False
 392    COPY_PARAMS_EQ_REQUIRED = True
 393    STAR_EXCEPT = "EXCLUDE"
 394    SUPPORTS_EXPLODING_PROJECTIONS = False
 395    ARRAY_CONCAT_IS_VAR_LEN = False
 396    SUPPORTS_CONVERT_TIMEZONE = True
 397    EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE = False
 398    SUPPORTS_MEDIAN = True
 399    ARRAY_SIZE_NAME = "ARRAY_SIZE"
 400    SUPPORTS_DECODE_CASE = True
 401
 402    AFTER_HAVING_MODIFIER_TRANSFORMS = generator.AFTER_HAVING_MODIFIER_TRANSFORMS
 403
 404    IS_BOOL_ALLOWED = False
 405    DIRECTED_JOINS = True
 406    SUPPORTS_UESCAPE = False
 407    TRY_SUPPORTED = False
 408
 409    TRANSFORMS = {
 410        **generator.Generator.TRANSFORMS,
 411        exp.ApproxDistinct: rename_func("APPROX_COUNT_DISTINCT"),
 412        exp.ArgMax: rename_func("MAX_BY"),
 413        exp.ArgMin: rename_func("MIN_BY"),
 414        exp.Array: transforms.preprocess([transforms.inherit_struct_field_names]),
 415        exp.ArrayConcat: array_concat_sql("ARRAY_CAT"),
 416        exp.ArrayAppend: array_append_sql("ARRAY_APPEND"),
 417        exp.ArrayPrepend: array_append_sql("ARRAY_PREPEND"),
 418        exp.ArrayContains: lambda self, e: self.func(
 419            "ARRAY_CONTAINS",
 420            e.expression
 421            if e.args.get("ensure_variant") is False
 422            else exp.cast(e.expression, exp.DType.VARIANT, copy=False),
 423            e.this,
 424        ),
 425        exp.ArrayPosition: lambda self, e: self.func(
 426            "ARRAY_POSITION",
 427            e.expression,
 428            e.this,
 429        ),
 430        exp.ArrayIntersect: rename_func("ARRAY_INTERSECTION"),
 431        exp.ArrayOverlaps: rename_func("ARRAYS_OVERLAP"),
 432        exp.AtTimeZone: lambda self, e: self.func("CONVERT_TIMEZONE", e.args.get("zone"), e.this),
 433        exp.BitwiseOr: rename_func("BITOR"),
 434        exp.BitwiseXor: rename_func("BITXOR"),
 435        exp.BitwiseAnd: rename_func("BITAND"),
 436        exp.BitwiseAndAgg: rename_func("BITANDAGG"),
 437        exp.BitwiseOrAgg: rename_func("BITORAGG"),
 438        exp.BitwiseXorAgg: rename_func("BITXORAGG"),
 439        exp.BitwiseNot: rename_func("BITNOT"),
 440        exp.BitwiseLeftShift: rename_func("BITSHIFTLEFT"),
 441        exp.BitwiseRightShift: rename_func("BITSHIFTRIGHT"),
 442        exp.Create: transforms.preprocess([_flatten_structured_types_unless_iceberg]),
 443        exp.CurrentTimestamp: lambda self, e: (
 444            self.func("SYSDATE") if e.args.get("sysdate") else self.function_fallback_sql(e)
 445        ),
 446        exp.CurrentSchemas: lambda self, e: self.func("CURRENT_SCHEMAS"),
 447        exp.Localtime: lambda self, e: (
 448            self.func("CURRENT_TIME", e.this) if e.this else "CURRENT_TIME"
 449        ),
 450        exp.Localtimestamp: lambda self, e: (
 451            self.func("CURRENT_TIMESTAMP", e.this) if e.this else "CURRENT_TIMESTAMP"
 452        ),
 453        exp.DateAdd: date_delta_sql("DATEADD"),
 454        exp.DateDiff: date_delta_sql("DATEDIFF"),
 455        exp.DatetimeAdd: date_delta_sql("TIMESTAMPADD"),
 456        exp.DatetimeDiff: timestampdiff_sql,
 457        exp.DateStrToDate: datestrtodate_sql,
 458        exp.Decrypt: lambda self, e: self.func(
 459            f"{'TRY_' if e.args.get('safe') else ''}DECRYPT",
 460            e.this,
 461            e.args.get("passphrase"),
 462            e.args.get("aad"),
 463            e.args.get("encryption_method"),
 464        ),
 465        exp.DecryptRaw: lambda self, e: self.func(
 466            f"{'TRY_' if e.args.get('safe') else ''}DECRYPT_RAW",
 467            e.this,
 468            e.args.get("key"),
 469            e.args.get("iv"),
 470            e.args.get("aad"),
 471            e.args.get("encryption_method"),
 472            e.args.get("aead"),
 473        ),
 474        exp.DayOfMonth: rename_func("DAYOFMONTH"),
 475        exp.DayOfWeek: rename_func("DAYOFWEEK"),
 476        exp.DayOfWeekIso: rename_func("DAYOFWEEKISO"),
 477        exp.DayOfYear: rename_func("DAYOFYEAR"),
 478        exp.DotProduct: rename_func("VECTOR_INNER_PRODUCT"),
 479        exp.Explode: rename_func("FLATTEN"),
 480        exp.Extract: lambda self, e: self.func(
 481            "DATE_PART", map_date_part(e.this, self.dialect), e.expression
 482        ),
 483        exp.CosineDistance: rename_func("VECTOR_COSINE_SIMILARITY"),
 484        exp.EuclideanDistance: rename_func("VECTOR_L2_DISTANCE"),
 485        exp.HandlerProperty: lambda self, e: f"HANDLER = {self.sql(e, 'this')}",
 486        exp.FileFormatProperty: lambda self, e: (
 487            f"FILE_FORMAT=({self.expressions(e, 'expressions', sep=' ')})"
 488        ),
 489        exp.FromTimeZone: lambda self, e: self.func(
 490            "CONVERT_TIMEZONE", e.args.get("zone"), "'UTC'", e.this
 491        ),
 492        exp.GenerateSeries: lambda self, e: self.func(
 493            "ARRAY_GENERATE_RANGE",
 494            e.args["start"],
 495            e.args["end"] if e.args.get("is_end_exclusive") else e.args["end"] + 1,
 496            e.args.get("step"),
 497        ),
 498        exp.GetExtract: rename_func("GET"),
 499        exp.GroupConcat: lambda self, e: groupconcat_sql(self, e, sep=""),
 500        exp.If: if_sql(name="IFF", false_value="NULL"),
 501        exp.JSONArray: lambda self, e: self.func(
 502            "TO_VARIANT", self.func("ARRAY_CONSTRUCT", *e.expressions)
 503        ),
 504        exp.JSONExtractArray: _json_extract_value_array_sql,
 505        exp.JSONExtractScalar: lambda self, e: self.func(
 506            "JSON_EXTRACT_PATH_TEXT", e.this, e.expression
 507        ),
 508        exp.JSONKeys: rename_func("OBJECT_KEYS"),
 509        exp.JSONObject: lambda self, e: self.func("OBJECT_CONSTRUCT_KEEP_NULL", *e.expressions),
 510        exp.JSONPathRoot: lambda *_: "",
 511        exp.JSONValueArray: _json_extract_value_array_sql,
 512        exp.Levenshtein: unsupported_args("ins_cost", "del_cost", "sub_cost")(
 513            rename_func("EDITDISTANCE")
 514        ),
 515        exp.LocationProperty: lambda self, e: f"LOCATION={self.sql(e, 'this')}",
 516        exp.LogicalAnd: rename_func("BOOLAND_AGG"),
 517        exp.LogicalOr: rename_func("BOOLOR_AGG"),
 518        exp.Map: lambda self, e: var_map_sql(self, e, "OBJECT_CONSTRUCT"),
 519        exp.ManhattanDistance: rename_func("VECTOR_L1_DISTANCE"),
 520        exp.MakeInterval: no_make_interval_sql,
 521        exp.Max: max_or_greatest,
 522        exp.Min: min_or_least,
 523        exp.ParseJSON: lambda self, e: self.func(
 524            f"{'TRY_' if e.args.get('safe') else ''}PARSE_JSON", e.this
 525        ),
 526        exp.ToBinary: lambda self, e: self.func(
 527            f"{'TRY_' if e.args.get('safe') else ''}TO_BINARY", e.this, e.args.get("format")
 528        ),
 529        exp.ToBoolean: lambda self, e: self.func(
 530            f"{'TRY_' if e.args.get('safe') else ''}TO_BOOLEAN", e.this
 531        ),
 532        exp.ToDouble: lambda self, e: self.func(
 533            f"{'TRY_' if e.args.get('safe') else ''}TO_DOUBLE", e.this, e.args.get("format")
 534        ),
 535        exp.ToFile: lambda self, e: self.func(
 536            f"{'TRY_' if e.args.get('safe') else ''}TO_FILE", e.this, e.args.get("path")
 537        ),
 538        exp.JSONFormat: rename_func("TO_JSON"),
 539        exp.PartitionedByProperty: lambda self, e: f"PARTITION BY {self.sql(e, 'this')}",
 540        exp.PercentileCont: transforms.preprocess([transforms.add_within_group_for_percentiles]),
 541        exp.PercentileDisc: transforms.preprocess([transforms.add_within_group_for_percentiles]),
 542        exp.Pivot: transforms.preprocess([_unqualify_pivot_columns]),
 543        exp.RegexpExtract: _regexpextract_sql,
 544        exp.RegexpExtractAll: _regexpextract_sql,
 545        exp.RegexpILike: _regexpilike_sql,
 546        exp.RowAccessProperty: lambda self, e: self.rowaccessproperty_sql(e),
 547        exp.Select: transforms.preprocess(
 548            [
 549                transforms.eliminate_window_clause,
 550                transforms.eliminate_distinct_on,
 551                transforms.explode_projection_to_unnest(),
 552                transforms.eliminate_semi_and_anti_joins,
 553                _transform_generate_date_array,
 554                _qualify_unnested_columns,
 555                _eliminate_dot_variant_lookup,
 556            ]
 557        ),
 558        exp.SHA: rename_func("SHA1"),
 559        exp.SHA1Digest: rename_func("SHA1_BINARY"),
 560        exp.MD5Digest: rename_func("MD5_BINARY"),
 561        exp.MD5NumberLower64: rename_func("MD5_NUMBER_LOWER64"),
 562        exp.MD5NumberUpper64: rename_func("MD5_NUMBER_UPPER64"),
 563        exp.Hex: rename_func("HEX_ENCODE"),
 564        exp.LowerHex: rename_func("TO_CHAR"),
 565        exp.Skewness: rename_func("SKEW"),
 566        exp.StarMap: rename_func("OBJECT_CONSTRUCT"),
 567        exp.StartsWith: rename_func("STARTSWITH"),
 568        exp.EndsWith: rename_func("ENDSWITH"),
 569        exp.Rand: lambda self, e: self.func("RANDOM", e.this),
 570        exp.StrPosition: lambda self, e: strposition_sql(
 571            self, e, func_name="CHARINDEX", supports_position=True
 572        ),
 573        exp.StrToDate: lambda self, e: self.func("DATE", e.this, self.format_time(e)),
 574        exp.StringToArray: rename_func("STRTOK_TO_ARRAY"),
 575        exp.StrtokToArray: rename_func("STRTOK_TO_ARRAY"),
 576        exp.Stuff: rename_func("INSERT"),
 577        exp.StPoint: rename_func("ST_MAKEPOINT"),
 578        exp.TimeAdd: date_delta_sql("TIMEADD"),
 579        exp.TimeSlice: lambda self, e: self.func(
 580            "TIME_SLICE",
 581            e.this,
 582            e.expression,
 583            unit_to_str(e),
 584            e.args.get("kind"),
 585        ),
 586        exp.Timestamp: no_timestamp_sql,
 587        exp.TimestampAdd: date_delta_sql("TIMESTAMPADD"),
 588        exp.TimestampDiff: lambda self, e: self.func("TIMESTAMPDIFF", e.unit, e.expression, e.this),
 589        exp.TimestampTrunc: timestamptrunc_sql(),
 590        exp.TimeStrToTime: timestrtotime_sql,
 591        exp.TimeToUnix: lambda self, e: f"EXTRACT(epoch_second FROM {self.sql(e, 'this')})",
 592        exp.ToArray: rename_func("TO_ARRAY"),
 593        exp.ToChar: lambda self, e: self.function_fallback_sql(e),
 594        exp.TsOrDsAdd: date_delta_sql("DATEADD", cast=True),
 595        exp.TsOrDsDiff: date_delta_sql("DATEDIFF"),
 596        exp.TsOrDsToDate: lambda self, e: self.func(
 597            f"{'TRY_' if e.args.get('safe') else ''}TO_DATE", e.this, self.format_time(e)
 598        ),
 599        exp.TsOrDsToTime: lambda self, e: self.func(
 600            f"{'TRY_' if e.args.get('safe') else ''}TO_TIME", e.this, self.format_time(e)
 601        ),
 602        exp.Unhex: rename_func("HEX_DECODE_BINARY"),
 603        exp.UnixToTime: lambda self, e: self.func("TO_TIMESTAMP", e.this, e.args.get("scale")),
 604        exp.Uuid: rename_func("UUID_STRING"),
 605        exp.VarMap: lambda self, e: var_map_sql(self, e, "OBJECT_CONSTRUCT"),
 606        exp.Booland: rename_func("BOOLAND"),
 607        exp.Boolor: rename_func("BOOLOR"),
 608        exp.WeekOfYear: rename_func("WEEKISO"),
 609        exp.YearOfWeek: rename_func("YEAROFWEEK"),
 610        exp.YearOfWeekIso: rename_func("YEAROFWEEKISO"),
 611        exp.Xor: rename_func("BOOLXOR"),
 612        exp.ByteLength: rename_func("OCTET_LENGTH"),
 613        exp.Flatten: rename_func("ARRAY_FLATTEN"),
 614        exp.ArrayConcatAgg: lambda self, e: self.func("ARRAY_FLATTEN", exp.ArrayAgg(this=e.this)),
 615        exp.SHA2Digest: lambda self, e: self.func(
 616            "SHA2_BINARY", e.this, e.args.get("length") or exp.Literal.number(256)
 617        ),
 618    }
 619
 620    def sortarray_sql(self, expression: exp.SortArray) -> str:
 621        asc = expression.args.get("asc")
 622        nulls_first = expression.args.get("nulls_first")
 623        if asc == exp.false() and nulls_first == exp.true():
 624            nulls_first = None
 625        return self.func("ARRAY_SORT", expression.this, asc, nulls_first)
 626
 627    def nthvalue_sql(self, expression: exp.NthValue) -> str:
 628        result = self.func("NTH_VALUE", expression.this, expression.args.get("offset"))
 629
 630        from_first = expression.args.get("from_first")
 631
 632        if from_first is not None:
 633            if from_first:
 634                result = result + " FROM FIRST"
 635            else:
 636                result = result + " FROM LAST"
 637
 638        return result
 639
 640    SUPPORTED_JSON_PATH_PARTS = {
 641        exp.JSONPathKey,
 642        exp.JSONPathRoot,
 643        exp.JSONPathSubscript,
 644    }
 645
 646    TYPE_MAPPING = {
 647        **generator.Generator.TYPE_MAPPING,
 648        exp.DType.BIGDECIMAL: "DOUBLE",
 649        exp.DType.JSON: "VARIANT",
 650        exp.DType.NESTED: "OBJECT",
 651        exp.DType.STRUCT: "OBJECT",
 652        exp.DType.TEXT: "VARCHAR",
 653    }
 654
 655    TOKEN_MAPPING = {
 656        TokenType.AUTO_INCREMENT: "AUTOINCREMENT",
 657    }
 658
 659    PROPERTIES_LOCATION = {
 660        **generator.Generator.PROPERTIES_LOCATION,
 661        exp.CredentialsProperty: exp.Properties.Location.POST_WITH,
 662        exp.LocationProperty: exp.Properties.Location.POST_WITH,
 663        exp.PartitionedByProperty: exp.Properties.Location.POST_SCHEMA,
 664        exp.RowAccessProperty: exp.Properties.Location.POST_SCHEMA,
 665        exp.SetProperty: exp.Properties.Location.UNSUPPORTED,
 666        exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED,
 667    }
 668
 669    UNSUPPORTED_VALUES_EXPRESSIONS = {
 670        exp.Map,
 671        exp.StarMap,
 672        exp.Struct,
 673        exp.VarMap,
 674    }
 675
 676    RESPECT_IGNORE_NULLS_UNSUPPORTED_EXPRESSIONS = (exp.ArrayAgg,)
 677
 678    def with_properties(self, properties: exp.Properties) -> str:
 679        return self.properties(properties, wrapped=False, prefix=self.sep(""), sep=" ")
 680
 681    def values_sql(self, expression: exp.Values, values_as_table: bool = True) -> str:
 682        if expression.find(*self.UNSUPPORTED_VALUES_EXPRESSIONS):
 683            values_as_table = False
 684
 685        return super().values_sql(expression, values_as_table=values_as_table)
 686
 687    def datatype_sql(self, expression: exp.DataType) -> str:
 688        # Check if this is a FLOAT type nested inside a VECTOR type
 689        # VECTOR only accepts FLOAT (not DOUBLE), INT, and STRING as element types
 690        # https://docs.snowflake.com/en/sql-reference/data-types-vector
 691        if expression.is_type(exp.DType.DOUBLE):
 692            parent = expression.parent
 693            if isinstance(parent, exp.DataType) and parent.is_type(exp.DType.VECTOR):
 694                # Preserve FLOAT for VECTOR types instead of mapping to synonym DOUBLE
 695                return "FLOAT"
 696
 697        expressions = expression.expressions
 698        if expressions and expression.is_type(*exp.DataType.STRUCT_TYPES):
 699            for field_type in expressions:
 700                # The correct syntax is OBJECT [ (<key> <value_type [NOT NULL] [, ...]) ]
 701                if isinstance(field_type, exp.DataType):
 702                    return "OBJECT"
 703                if (
 704                    isinstance(field_type, exp.ColumnDef)
 705                    and field_type.this
 706                    and field_type.this.is_string
 707                ):
 708                    # Doing OBJECT('foo' VARCHAR) is invalid snowflake Syntax. Moreover, besides
 709                    # converting 'foo' into an identifier, we also need to quote it because these
 710                    # keys are case-sensitive. For example:
 711                    #
 712                    # WITH t AS (SELECT OBJECT_CONSTRUCT('x', 'y') AS c) SELECT c:x FROM t -- correct
 713                    # WITH t AS (SELECT OBJECT_CONSTRUCT('x', 'y') AS c) SELECT c:X FROM t -- incorrect, returns NULL
 714                    field_type.this.replace(exp.to_identifier(field_type.name, quoted=True))
 715
 716        return super().datatype_sql(expression)
 717
 718    def tonumber_sql(self, expression: exp.ToNumber) -> str:
 719        precision = expression.args.get("precision")
 720        scale = expression.args.get("scale")
 721
 722        default_precision = isinstance(precision, exp.Literal) and precision.name == "38"
 723        default_scale = isinstance(scale, exp.Literal) and scale.name == "0"
 724
 725        if default_precision and default_scale:
 726            precision = None
 727            scale = None
 728        elif default_scale:
 729            scale = None
 730
 731        func_name = "TRY_TO_NUMBER" if expression.args.get("safe") else "TO_NUMBER"
 732
 733        return self.func(
 734            func_name,
 735            expression.this,
 736            expression.args.get("format"),
 737            precision,
 738            scale,
 739        )
 740
 741    def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str:
 742        milli = expression.args.get("milli")
 743        if milli is not None:
 744            milli_to_nano = milli.pop() * exp.Literal.number(1000000)
 745            expression.set("nano", milli_to_nano)
 746
 747        return rename_func("TIMESTAMP_FROM_PARTS")(self, expression)
 748
 749    def cast_sql(self, expression: exp.Cast, safe_prefix: str | None = None) -> str:
 750        if expression.is_type(exp.DType.GEOGRAPHY):
 751            return self.func("TO_GEOGRAPHY", expression.this)
 752        if expression.is_type(exp.DType.GEOMETRY):
 753            return self.func("TO_GEOMETRY", expression.this)
 754
 755        return super().cast_sql(expression, safe_prefix=safe_prefix)
 756
 757    def trycast_sql(self, expression: exp.TryCast) -> str:
 758        value = expression.this
 759
 760        if value.type is None:
 761            from sqlglot.optimizer.annotate_types import annotate_types
 762
 763            value = annotate_types(value, dialect=self.dialect)
 764
 765        # Snowflake requires that TRY_CAST's value be a string
 766        # If TRY_CAST is being roundtripped (since Snowflake is the only dialect that sets "requires_string") or
 767        # if we can deduce that the value is a string, then we can generate TRY_CAST
 768        if expression.args.get("requires_string") or value.is_type(*exp.DataType.TEXT_TYPES):
 769            return super().trycast_sql(expression)
 770
 771        return self.cast_sql(expression)
 772
 773    def log_sql(self, expression: exp.Log) -> str:
 774        if not expression.expression:
 775            return self.func("LN", expression.this)
 776
 777        return super().log_sql(expression)
 778
 779    def greatest_sql(self, expression: exp.Greatest) -> str:
 780        name = "GREATEST_IGNORE_NULLS" if expression.args.get("ignore_nulls") else "GREATEST"
 781        return self.func(name, expression.this, *expression.expressions)
 782
 783    def least_sql(self, expression: exp.Least) -> str:
 784        name = "LEAST_IGNORE_NULLS" if expression.args.get("ignore_nulls") else "LEAST"
 785        return self.func(name, expression.this, *expression.expressions)
 786
 787    def generator_sql(self, expression: exp.Generator) -> str:
 788        args = []
 789        rowcount = expression.args.get("rowcount")
 790        timelimit = expression.args.get("timelimit")
 791
 792        if rowcount:
 793            args.append(exp.Kwarg(this=exp.var("ROWCOUNT"), expression=rowcount))
 794        if timelimit:
 795            args.append(exp.Kwarg(this=exp.var("TIMELIMIT"), expression=timelimit))
 796
 797        return self.func("GENERATOR", *args)
 798
 799    def unnest_sql(self, expression: exp.Unnest) -> str:
 800        unnest_alias = expression.args.get("alias")
 801        offset = expression.args.get("offset")
 802
 803        unnest_alias_columns = unnest_alias.columns if unnest_alias else []
 804        value = seq_get(unnest_alias_columns, 0) or exp.to_identifier("value")
 805
 806        columns = [
 807            exp.to_identifier("seq"),
 808            exp.to_identifier("key"),
 809            exp.to_identifier("path"),
 810            offset.pop() if isinstance(offset, exp.Expr) else exp.to_identifier("index"),
 811            value,
 812            exp.to_identifier("this"),
 813        ]
 814
 815        if unnest_alias:
 816            unnest_alias.set("columns", columns)
 817        else:
 818            unnest_alias = exp.TableAlias(this="_u", columns=columns)
 819
 820        table_input = self.sql(expression.expressions[0])
 821        if not table_input.startswith("INPUT =>"):
 822            table_input = f"INPUT => {table_input}"
 823
 824        expression_parent = expression.parent
 825
 826        explode = (
 827            f"FLATTEN({table_input})"
 828            if isinstance(expression_parent, exp.Lateral)
 829            else f"TABLE(FLATTEN({table_input}))"
 830        )
 831        alias = self.sql(unnest_alias)
 832        alias = f" AS {alias}" if alias else ""
 833        value = (
 834            ""
 835            if isinstance(expression_parent, (exp.From, exp.Join, exp.Lateral))
 836            else f"{value} FROM "
 837        )
 838
 839        return f"{value}{explode}{alias}"
 840
 841    def show_sql(self, expression: exp.Show) -> str:
 842        terse = "TERSE " if expression.args.get("terse") else ""
 843        iceberg = "ICEBERG " if expression.args.get("iceberg") else ""
 844        history = " HISTORY" if expression.args.get("history") else ""
 845        like = self.sql(expression, "like")
 846        like = f" LIKE {like}" if like else ""
 847
 848        scope = self.sql(expression, "scope")
 849        scope = f" {scope}" if scope else ""
 850
 851        scope_kind = self.sql(expression, "scope_kind")
 852        if scope_kind:
 853            scope_kind = f" IN {scope_kind}"
 854
 855        starts_with = self.sql(expression, "starts_with")
 856        if starts_with:
 857            starts_with = f" STARTS WITH {starts_with}"
 858
 859        limit = self.sql(expression, "limit")
 860
 861        from_ = self.sql(expression, "from_")
 862        if from_:
 863            from_ = f" FROM {from_}"
 864
 865        privileges = self.expressions(expression, key="privileges", flat=True)
 866        privileges = f" WITH PRIVILEGES {privileges}" if privileges else ""
 867
 868        return f"SHOW {terse}{iceberg}{expression.name}{history}{like}{scope_kind}{scope}{starts_with}{limit}{from_}{privileges}"
 869
 870    def rowaccessproperty_sql(self, expression: exp.RowAccessProperty) -> str:
 871        if not expression.this:
 872            return "ROW ACCESS"
 873        on = f" ON ({self.expressions(expression, flat=True)})" if expression.expressions else ""
 874        return f"WITH ROW ACCESS POLICY {self.sql(expression, 'this')}{on}"
 875
 876    def describe_sql(self, expression: exp.Describe) -> str:
 877        kind_value = expression.args.get("kind") or "TABLE"
 878
 879        properties = expression.args.get("properties")
 880        if properties:
 881            qualifier = self.expressions(properties, sep=" ")
 882            kind = f" {qualifier} {kind_value}"
 883        else:
 884            kind = f" {kind_value}"
 885
 886        this = f" {self.sql(expression, 'this')}"
 887        expressions = self.expressions(expression, flat=True)
 888        expressions = f" {expressions}" if expressions else ""
 889        return f"DESCRIBE{kind}{this}{expressions}"
 890
 891    def generatedasidentitycolumnconstraint_sql(
 892        self, expression: exp.GeneratedAsIdentityColumnConstraint
 893    ) -> str:
 894        start = expression.args.get("start")
 895        start = f" START {start}" if start else ""
 896        increment = expression.args.get("increment")
 897        increment = f" INCREMENT {increment}" if increment else ""
 898
 899        order = expression.args.get("order")
 900        if order is not None:
 901            order_clause = " ORDER" if order else " NOORDER"
 902        else:
 903            order_clause = ""
 904
 905        return f"AUTOINCREMENT{start}{increment}{order_clause}"
 906
 907    def cluster_sql(self, expression: exp.Cluster) -> str:
 908        return f"CLUSTER BY ({self.expressions(expression, flat=True)})"
 909
 910    def struct_sql(self, expression: exp.Struct) -> str:
 911        if len(expression.expressions) == 1:
 912            arg = expression.expressions[0]
 913            if arg.is_star or (isinstance(arg, exp.ILike) and arg.left.is_star):
 914                # Wildcard syntax: https://docs.snowflake.com/en/sql-reference/data-types-semistructured#object
 915                return f"{{{self.sql(expression.expressions[0])}}}"
 916
 917        keys = []
 918        values = []
 919
 920        for i, e in enumerate(expression.expressions):
 921            if isinstance(e, exp.PropertyEQ):
 922                keys.append(
 923                    exp.Literal.string(e.name) if isinstance(e.this, exp.Identifier) else e.this
 924                )
 925                values.append(e.expression)
 926            else:
 927                keys.append(exp.Literal.string(f"_{i}"))
 928                values.append(e)
 929
 930        return self.func("OBJECT_CONSTRUCT", *flatten(zip(keys, values)))
 931
 932    @unsupported_args("weight", "accuracy")
 933    def approxquantile_sql(self, expression: exp.ApproxQuantile) -> str:
 934        return self.func("APPROX_PERCENTILE", expression.this, expression.args.get("quantile"))
 935
 936    def alterset_sql(self, expression: exp.AlterSet) -> str:
 937        exprs = self.expressions(expression, flat=True)
 938        exprs = f" {exprs}" if exprs else ""
 939        file_format = self.expressions(expression, key="file_format", flat=True, sep=" ")
 940        file_format = f" STAGE_FILE_FORMAT = ({file_format})" if file_format else ""
 941        copy_options = self.expressions(expression, key="copy_options", flat=True, sep=" ")
 942        copy_options = f" STAGE_COPY_OPTIONS = ({copy_options})" if copy_options else ""
 943        tag = self.expressions(expression, key="tag", flat=True)
 944        tag = f" TAG {tag}" if tag else ""
 945
 946        return f"SET{exprs}{file_format}{copy_options}{tag}"
 947
 948    def strtotime_sql(self, expression: exp.StrToTime):
 949        # target_type is stored as a DataType instance
 950        target_type = expression.args.get("target_type")
 951
 952        # Get the type enum from DataType instance or from type annotation
 953        if isinstance(target_type, exp.DataType):
 954            type_enum = target_type.this
 955        elif expression.type:
 956            type_enum = expression.type.this
 957        else:
 958            type_enum = exp.DType.TIMESTAMP
 959
 960        func_name = TIMESTAMP_TYPES.get(type_enum, "TO_TIMESTAMP")
 961
 962        return self.func(
 963            f"{'TRY_' if expression.args.get('safe') else ''}{func_name}",
 964            expression.this,
 965            self.format_time(expression),
 966        )
 967
 968    def timestampsub_sql(self, expression: exp.TimestampSub):
 969        return self.sql(
 970            exp.TimestampAdd(
 971                this=expression.this,
 972                expression=expression.expression * -1,
 973                unit=expression.unit,
 974            )
 975        )
 976
 977    def jsonextract_sql(self, expression: exp.JSONExtract):
 978        this = expression.this
 979
 980        # JSON strings are valid coming from other dialects such as BQ so
 981        # for these cases we PARSE_JSON preemptively
 982        if not isinstance(this, (exp.ParseJSON, exp.JSONExtract)) and not expression.args.get(
 983            "requires_json"
 984        ):
 985            this = exp.ParseJSON(this=this)
 986
 987        return self.func(
 988            "GET_PATH",
 989            this,
 990            expression.expression,
 991        )
 992
 993    def timetostr_sql(self, expression: exp.TimeToStr) -> str:
 994        this = expression.this
 995        if this.is_string:
 996            this = exp.cast(this, exp.DType.TIMESTAMP)
 997
 998        return self.func("TO_CHAR", this, self.format_time(expression))
 999
1000    def datesub_sql(self, expression: exp.DateSub) -> str:
1001        value = expression.expression
1002        if value:
1003            value.replace(value * (-1))
1004        else:
1005            self.unsupported("DateSub cannot be transpiled if the subtracted count is unknown")
1006
1007        return date_delta_sql("DATEADD")(self, expression)
1008
1009    def select_sql(self, expression: exp.Select) -> str:
1010        limit = expression.args.get("limit")
1011        offset = expression.args.get("offset")
1012        if offset and not limit:
1013            expression.limit(exp.Null(), copy=False)
1014        return super().select_sql(expression)
1015
1016    def createable_sql(self, expression: exp.Create, locations: defaultdict) -> str:
1017        is_materialized = expression.find(exp.MaterializedProperty)
1018        copy_grants_property = expression.find(exp.CopyGrantsProperty)
1019
1020        if expression.kind == "VIEW" and is_materialized and copy_grants_property:
1021            # For materialized views, COPY GRANTS is located *before* the columns list
1022            # This is in contrast to normal views where COPY GRANTS is located *after* the columns list
1023            # We default CopyGrantsProperty to POST_SCHEMA which means we need to output it POST_NAME if a materialized view is detected
1024            # ref: https://docs.snowflake.com/en/sql-reference/sql/create-materialized-view#syntax
1025            # ref: https://docs.snowflake.com/en/sql-reference/sql/create-view#syntax
1026            post_schema_properties = locations[exp.Properties.Location.POST_SCHEMA]
1027            post_schema_properties.pop(post_schema_properties.index(copy_grants_property))
1028
1029            this_name = self.sql(expression.this, "this")
1030            copy_grants = self.sql(copy_grants_property)
1031            this_schema = self.schema_columns_sql(expression.this)
1032            this_schema = f"{self.sep()}{this_schema}" if this_schema else ""
1033
1034            return f"{this_name}{self.sep()}{copy_grants}{this_schema}"
1035
1036        return super().createable_sql(expression, locations)
1037
1038    def arrayagg_sql(self, expression: exp.ArrayAgg) -> str:
1039        this = expression.this
1040
1041        # If an ORDER BY clause is present, we need to remove it from ARRAY_AGG
1042        # and add it later as part of the WITHIN GROUP clause
1043        order = this if isinstance(this, exp.Order) else None
1044        if order:
1045            expression.set("this", order.this.pop())
1046
1047        expr_sql = super().arrayagg_sql(expression)
1048
1049        if order:
1050            expr_sql = self.sql(exp.WithinGroup(this=expr_sql, expression=order))
1051
1052        return expr_sql
1053
1054    def arraydistinct_sql(self, expression: exp.ArrayDistinct) -> str:
1055        if expression.args.get("check_null"):
1056            return self.func("ARRAY_DISTINCT", expression.this)
1057        return self.func("ARRAY_DISTINCT", exp.ArrayCompact(this=expression.this))
1058
1059    def arraytostring_sql(self, expression: exp.ArrayToString) -> str:
1060        return self.func("ARRAY_TO_STRING", expression.this, expression.expression)
1061
1062    def array_sql(self, expression: exp.Array) -> str:
1063        expressions = expression.expressions
1064
1065        first_expr = seq_get(expressions, 0)
1066        if isinstance(first_expr, exp.Select):
1067            # SELECT AS STRUCT foo AS alias_foo -> ARRAY_AGG(OBJECT_CONSTRUCT('alias_foo', foo))
1068            if first_expr.text("kind").upper() == "STRUCT":
1069                object_construct_args = []
1070                for expr in first_expr.expressions:
1071                    # Alias case: SELECT AS STRUCT foo AS alias_foo -> OBJECT_CONSTRUCT('alias_foo', foo)
1072                    # Column case: SELECT AS STRUCT foo -> OBJECT_CONSTRUCT('foo', foo)
1073                    name = expr.this if isinstance(expr, exp.Alias) else expr
1074
1075                    object_construct_args.extend([exp.Literal.string(expr.alias_or_name), name])
1076
1077                array_agg = exp.ArrayAgg(this=build_object_construct(args=object_construct_args))
1078
1079                first_expr.set("kind", None)
1080                first_expr.set("expressions", [array_agg])
1081
1082                return self.sql(first_expr.subquery())
1083
1084        return inline_array_sql(self, expression)
1085
1086    def currentdate_sql(self, expression: exp.CurrentDate) -> str:
1087        zone = self.sql(expression, "this")
1088        if not zone:
1089            return super().currentdate_sql(expression)
1090
1091        expr = exp.Cast(
1092            this=exp.ConvertTimezone(target_tz=zone, timestamp=exp.CurrentTimestamp()),
1093            to=exp.DataType(this=exp.DType.DATE),
1094        )
1095        return self.sql(expr)
1096
1097    def dot_sql(self, expression: exp.Dot) -> str:
1098        this = expression.this
1099
1100        if not this.type:
1101            from sqlglot.optimizer.annotate_types import annotate_types
1102
1103            this = annotate_types(this, dialect=self.dialect)
1104
1105        if not isinstance(this, exp.Dot) and this.is_type(exp.DType.STRUCT):
1106            # Generate colon notation for the top level STRUCT
1107            return f"{self.sql(this)}:{self.sql(expression, 'expression')}"
1108
1109        return super().dot_sql(expression)
1110
1111    def modelattribute_sql(self, expression: exp.ModelAttribute) -> str:
1112        return f"{self.sql(expression, 'this')}!{self.sql(expression, 'expression')}"
1113
1114    def format_sql(self, expression: exp.Format) -> str:
1115        if expression.name.lower() == "%s" and len(expression.expressions) == 1:
1116            return self.func("TO_CHAR", expression.expressions[0])
1117
1118        return self.function_fallback_sql(expression)
1119
1120    def splitpart_sql(self, expression: exp.SplitPart) -> str:
1121        # Set part_index to 1 if missing
1122        if not expression.args.get("delimiter"):
1123            expression.set("delimiter", exp.Literal.string(" "))
1124
1125        if not expression.args.get("part_index"):
1126            expression.set("part_index", exp.Literal.number(1))
1127
1128        return rename_func("SPLIT_PART")(self, expression)
1129
1130    def uniform_sql(self, expression: exp.Uniform) -> str:
1131        gen = expression.args.get("gen")
1132        seed = expression.args.get("seed")
1133
1134        # From Databricks UNIFORM(min, max, seed) -> Wrap gen in RANDOM(seed)
1135        if seed:
1136            gen = exp.Rand(this=seed)
1137
1138        # No gen argument (from Databricks 2-arg UNIFORM(min, max)) -> Add RANDOM()
1139        if not gen:
1140            gen = exp.Rand()
1141
1142        return self.func("UNIFORM", expression.this, expression.expression, gen)
1143
1144    def window_sql(self, expression: exp.Window) -> str:
1145        spec = expression.args.get("spec")
1146        this = expression.this
1147
1148        if (
1149            (
1150                isinstance(this, RANKING_WINDOW_FUNCTIONS_WITH_FRAME)
1151                or (
1152                    isinstance(this, (exp.RespectNulls, exp.IgnoreNulls))
1153                    and isinstance(this.this, RANKING_WINDOW_FUNCTIONS_WITH_FRAME)
1154                )
1155            )
1156            and spec
1157            and (
1158                spec.text("kind").upper() == "ROWS"
1159                and spec.text("start").upper() == "UNBOUNDED"
1160                and spec.text("start_side").upper() == "PRECEDING"
1161                and spec.text("end").upper() == "UNBOUNDED"
1162                and spec.text("end_side").upper() == "FOLLOWING"
1163            )
1164        ):
1165            # omit the default window from window ranking functions
1166            expression.set("spec", None)
1167        return super().window_sql(expression)

Generator converts a given syntax tree to the corresponding SQL string.

Arguments:
  • pretty: Whether to format the produced SQL string. Default: False.
  • identify: Determines when an identifier should be quoted. Possible values are: False (default): Never quote, except in cases where it's mandatory by the dialect. True: Always quote except for specials cases. 'safe': Only quote identifiers that are case insensitive.
  • normalize: Whether to normalize identifiers to lowercase. Default: False.
  • pad: The pad size in a formatted string. For example, this affects the indentation of a projection in a query, relative to its nesting level. Default: 2.
  • indent: The indentation size in a formatted string. For example, this affects the indentation of subqueries and filters under a WHERE clause. Default: 2.
  • normalize_functions: How to normalize function names. Possible values are: "upper" or True (default): Convert names to uppercase. "lower": Convert names to lowercase. False: Disables function name normalization.
  • unsupported_level: Determines the generator's behavior when it encounters unsupported expressions. Default ErrorLevel.WARN.
  • max_unsupported: Maximum number of unsupported messages to include in a raised UnsupportedError. This is only relevant if unsupported_level is ErrorLevel.RAISE. Default: 3
  • leading_comma: Whether the comma is leading or trailing in select expressions. This is only relevant when generating in pretty mode. Default: False
  • max_text_width: The max number of characters in a segment before creating new lines in pretty mode. The default is on the smaller end because the length only represents a segment and not the true line length. Default: 80
  • comments: Whether to preserve comments in the output SQL code. Default: True
SELECT_KINDS: tuple[str, ...] = ()
PARAMETER_TOKEN = '$'
MATCHED_BY_SOURCE = False
SINGLE_STRING_INTERVAL = True
JOIN_HINTS = False
TABLE_HINTS = False
QUERY_HINTS = False
AGGREGATE_FILTER_SUPPORTED = False
SUPPORTS_TABLE_COPY = False
COLLATE_IS_FUNC = True
LIMIT_ONLY_LITERALS = True
JSON_KEY_VALUE_PAIR_SEP = ','
INSERT_OVERWRITE = ' OVERWRITE INTO'
STRUCT_DELIMITER = ('(', ')')
COPY_PARAMS_ARE_WRAPPED = False
COPY_PARAMS_EQ_REQUIRED = True
STAR_EXCEPT = 'EXCLUDE'
SUPPORTS_EXPLODING_PROJECTIONS = False
ARRAY_CONCAT_IS_VAR_LEN = False
SUPPORTS_CONVERT_TIMEZONE = True
EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE = False
SUPPORTS_MEDIAN = True
ARRAY_SIZE_NAME = 'ARRAY_SIZE'
SUPPORTS_DECODE_CASE = True
AFTER_HAVING_MODIFIER_TRANSFORMS = {'windows': <function <lambda>>, 'qualify': <function <lambda>>}
IS_BOOL_ALLOWED = False
DIRECTED_JOINS = True
SUPPORTS_UESCAPE = False
TRY_SUPPORTED = False
TRANSFORMS = {<class 'sqlglot.expressions.query.JSONPathKey'>: <function <lambda>>, <class 'sqlglot.expressions.query.JSONPathRoot'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.query.JSONPathSubscript'>: <function <lambda>>, <class 'sqlglot.expressions.core.Adjacent'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.AllowedValuesProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.query.AnalyzeColumns'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.query.AnalyzeWith'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.array.ArrayContainsAll'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.array.ArrayOverlaps'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.constraints.AssumeColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.AutoRefreshProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.BackupProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.constraints.CaseSpecificColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.math.Ceil'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.constraints.CharacterSetColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.CharacterSetProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.constraints.ClusteredColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.constraints.CollateColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.constraints.CommentColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.functions.ConnectByRoot'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.string.ConvertToCharset'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.CopyGrantsProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.CredentialsProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.functions.CurrentCatalog'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.functions.SessionUser'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.constraints.DateFormatColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.constraints.DefaultColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.ApiProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.ApplicationProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.CatalogProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.ComputeProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.DatabaseProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.DynamicProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.EmptyProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.constraints.EncodeColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.query.EndStatement'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.EnviromentProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.HandlerProperty'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.properties.ParameterStyleProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.constraints.EphemeralColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.constraints.ExcludeColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.ExecuteAsProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.query.Except'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.ExternalProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.math.Floor'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.query.Get'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.GlobalProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.HeapProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.HybridProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.IcebergProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.InheritsProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.constraints.InlineLengthColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.InputModelProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.query.Intersect'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.datatypes.IntervalSpan'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.functions.Int64'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.json.JSONBContainsAnyTopKeys'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.json.JSONBContainsAllTopKeys'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.json.JSONBDeleteAtPath'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.json.JSONObject'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.json.JSONObjectAgg'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.LanguageProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.LocationProperty'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.properties.LogProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.MaskingProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.MaterializedProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.functions.NetFunc'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.NetworkProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.constraints.NonClusteredColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.NoPrimaryIndexProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.constraints.NotForReplicationColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.OnCommitProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.OnProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.constraints.OnUpdateColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.core.Operator'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.OutputModelProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.core.ExtendsLeft'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.core.ExtendsRight'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.constraints.PathColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.PartitionedByBucket'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.PartitionByTruncate'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.core.PivotAny'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.array.PositionalColumn'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.constraints.ProjectionPolicyColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.constraints.InvisibleColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.constraints.ZeroFillColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.query.Put'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.RemoteWithConnectionModelProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.ReturnsProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.RowAccessProperty'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.core.SafeFunc'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.SampleProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.SecureProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.SecurityIntegrationProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.SetConfigProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.SetProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.SettingsProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.SharingProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.SqlReadWriteProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.SqlSecurityProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.StabilityProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.query.Stream'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.StreamingTableProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.StrictProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ddl.SwapTable'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.query.TableColumn'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.Tags'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.TemporaryProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.constraints.TitleColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.array.ToMap'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.ToTableProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.TransformModelProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.TransientProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.VirtualProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ddl.TriggerExecute'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.query.Union'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.UnloggedProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.UsingTemplateProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.query.UsingData'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.constraints.UppercaseColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.temporal.UtcDate'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.temporal.UtcTime'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.temporal.UtcTimestamp'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.query.Variadic'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.array.VarMap'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.properties.ViewAttributeProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.VolatileProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.WithJournalTableProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.WithProcedureOptions'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.WithSchemaBindingProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.constraints.WithOperator'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.properties.ForceProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.core.ApproxDistinct'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.aggregate.ArgMax'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.aggregate.ArgMin'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.array.Array'>: <function preprocess.<locals>._to_sql>, <class 'sqlglot.expressions.array.ArrayConcat'>: <function array_concat_sql.<locals>._array_concat_sql>, <class 'sqlglot.expressions.array.ArrayAppend'>: <function array_append_sql.<locals>._array_append_sql>, <class 'sqlglot.expressions.array.ArrayPrepend'>: <function array_append_sql.<locals>._array_append_sql>, <class 'sqlglot.expressions.array.ArrayContains'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.array.ArrayPosition'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.array.ArrayIntersect'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.core.AtTimeZone'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.core.BitwiseOr'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.core.BitwiseXor'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.core.BitwiseAnd'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.math.BitwiseAndAgg'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.math.BitwiseOrAgg'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.math.BitwiseXorAgg'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.core.BitwiseNot'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.core.BitwiseLeftShift'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.core.BitwiseRightShift'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.ddl.Create'>: <function preprocess.<locals>._to_sql>, <class 'sqlglot.expressions.temporal.CurrentTimestamp'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.functions.CurrentSchemas'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.temporal.Localtime'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.temporal.Localtimestamp'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.temporal.DateAdd'>: <function date_delta_sql.<locals>._delta_sql>, <class 'sqlglot.expressions.temporal.DateDiff'>: <function date_delta_sql.<locals>._delta_sql>, <class 'sqlglot.expressions.temporal.DatetimeAdd'>: <function date_delta_sql.<locals>._delta_sql>, <class 'sqlglot.expressions.temporal.DatetimeDiff'>: <function timestampdiff_sql>, <class 'sqlglot.expressions.temporal.DateStrToDate'>: <function datestrtodate_sql>, <class 'sqlglot.expressions.string.Decrypt'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.string.DecryptRaw'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.temporal.DayOfMonth'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.temporal.DayOfWeek'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.temporal.DayOfWeekIso'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.temporal.DayOfYear'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.math.DotProduct'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.array.Explode'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.temporal.Extract'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.math.CosineDistance'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.math.EuclideanDistance'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.properties.FileFormatProperty'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.core.FromTimeZone'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.array.GenerateSeries'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.temporal.GetExtract'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.aggregate.GroupConcat'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.functions.If'>: <function if_sql.<locals>._if_sql>, <class 'sqlglot.expressions.json.JSONArray'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.json.JSONExtractArray'>: <function _json_extract_value_array_sql>, <class 'sqlglot.expressions.json.JSONExtractScalar'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.json.JSONKeys'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.query.JSONValueArray'>: <function _json_extract_value_array_sql>, <class 'sqlglot.expressions.string.Levenshtein'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.aggregate.LogicalAnd'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.aggregate.LogicalOr'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.array.Map'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.math.ManhattanDistance'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.temporal.MakeInterval'>: <function no_make_interval_sql>, <class 'sqlglot.expressions.aggregate.Max'>: <function max_or_greatest>, <class 'sqlglot.expressions.aggregate.Min'>: <function min_or_least>, <class 'sqlglot.expressions.json.ParseJSON'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.string.ToBinary'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.functions.ToBoolean'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.string.ToDouble'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.string.ToFile'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.json.JSONFormat'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.properties.PartitionedByProperty'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.aggregate.PercentileCont'>: <function preprocess.<locals>._to_sql>, <class 'sqlglot.expressions.aggregate.PercentileDisc'>: <function preprocess.<locals>._to_sql>, <class 'sqlglot.expressions.query.Pivot'>: <function preprocess.<locals>._to_sql>, <class 'sqlglot.expressions.string.RegexpExtract'>: <function _regexpextract_sql>, <class 'sqlglot.expressions.string.RegexpExtractAll'>: <function _regexpextract_sql>, <class 'sqlglot.expressions.string.RegexpILike'>: <function _regexpilike_sql>, <class 'sqlglot.expressions.query.Select'>: <function preprocess.<locals>._to_sql>, <class 'sqlglot.expressions.string.SHA'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.string.SHA1Digest'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.string.MD5Digest'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.string.MD5NumberLower64'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.string.MD5NumberUpper64'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.string.Hex'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.string.LowerHex'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.aggregate.Skewness'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.array.StarMap'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.string.StartsWith'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.string.EndsWith'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.functions.Rand'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.string.StrPosition'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.temporal.StrToDate'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.array.StringToArray'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.array.StrtokToArray'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.string.Stuff'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.array.StPoint'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.temporal.TimeAdd'>: <function date_delta_sql.<locals>._delta_sql>, <class 'sqlglot.expressions.temporal.TimeSlice'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.temporal.Timestamp'>: <function no_timestamp_sql>, <class 'sqlglot.expressions.temporal.TimestampAdd'>: <function date_delta_sql.<locals>._delta_sql>, <class 'sqlglot.expressions.temporal.TimestampDiff'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.temporal.TimestampTrunc'>: <function timestamptrunc_sql.<locals>._timestamptrunc_sql>, <class 'sqlglot.expressions.temporal.TimeStrToTime'>: <function timestrtotime_sql>, <class 'sqlglot.expressions.temporal.TimeToUnix'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.array.ToArray'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.string.ToChar'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.temporal.TsOrDsAdd'>: <function date_delta_sql.<locals>._delta_sql>, <class 'sqlglot.expressions.temporal.TsOrDsDiff'>: <function date_delta_sql.<locals>._delta_sql>, <class 'sqlglot.expressions.temporal.TsOrDsToDate'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.temporal.TsOrDsToTime'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.string.Unhex'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.temporal.UnixToTime'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.functions.Uuid'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.math.Booland'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.math.Boolor'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.temporal.WeekOfYear'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.temporal.YearOfWeek'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.temporal.YearOfWeekIso'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.core.Xor'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.string.ByteLength'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.array.Flatten'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.aggregate.ArrayConcatAgg'>: <function SnowflakeGenerator.<lambda>>, <class 'sqlglot.expressions.string.SHA2Digest'>: <function SnowflakeGenerator.<lambda>>}
def sortarray_sql(self, expression: sqlglot.expressions.array.SortArray) -> str:
620    def sortarray_sql(self, expression: exp.SortArray) -> str:
621        asc = expression.args.get("asc")
622        nulls_first = expression.args.get("nulls_first")
623        if asc == exp.false() and nulls_first == exp.true():
624            nulls_first = None
625        return self.func("ARRAY_SORT", expression.this, asc, nulls_first)
def nthvalue_sql(self, expression: sqlglot.expressions.aggregate.NthValue) -> str:
627    def nthvalue_sql(self, expression: exp.NthValue) -> str:
628        result = self.func("NTH_VALUE", expression.this, expression.args.get("offset"))
629
630        from_first = expression.args.get("from_first")
631
632        if from_first is not None:
633            if from_first:
634                result = result + " FROM FIRST"
635            else:
636                result = result + " FROM LAST"
637
638        return result
TYPE_MAPPING = {<DType.DATETIME2: 'DATETIME2'>: 'TIMESTAMP', <DType.NCHAR: 'NCHAR'>: 'CHAR', <DType.NVARCHAR: 'NVARCHAR'>: 'VARCHAR', <DType.MEDIUMTEXT: 'MEDIUMTEXT'>: 'TEXT', <DType.LONGTEXT: 'LONGTEXT'>: 'TEXT', <DType.TINYTEXT: 'TINYTEXT'>: 'TEXT', <DType.BLOB: 'BLOB'>: 'VARBINARY', <DType.MEDIUMBLOB: 'MEDIUMBLOB'>: 'BLOB', <DType.LONGBLOB: 'LONGBLOB'>: 'BLOB', <DType.TINYBLOB: 'TINYBLOB'>: 'BLOB', <DType.INET: 'INET'>: 'INET', <DType.ROWVERSION: 'ROWVERSION'>: 'VARBINARY', <DType.SMALLDATETIME: 'SMALLDATETIME'>: 'TIMESTAMP', <DType.BIGDECIMAL: 'BIGDECIMAL'>: 'DOUBLE', <DType.JSON: 'JSON'>: 'VARIANT', <DType.NESTED: 'NESTED'>: 'OBJECT', <DType.STRUCT: 'STRUCT'>: 'OBJECT', <DType.TEXT: 'TEXT'>: 'VARCHAR'}
TOKEN_MAPPING = {<TokenType.AUTO_INCREMENT: 226>: 'AUTOINCREMENT'}
PROPERTIES_LOCATION = {<class 'sqlglot.expressions.properties.AllowedValuesProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.AlgorithmProperty'>: <PropertiesLocation.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.properties.ApiProperty'>: <PropertiesLocation.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.properties.ApplicationProperty'>: <PropertiesLocation.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.properties.AutoIncrementProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.AutoRefreshProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.BackupProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.BlockCompressionProperty'>: <PropertiesLocation.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.properties.CatalogProperty'>: <PropertiesLocation.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.properties.CharacterSetProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.ChecksumProperty'>: <PropertiesLocation.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.properties.CollateProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.ComputeProperty'>: <PropertiesLocation.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.properties.CopyGrantsProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.query.Cluster'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.ClusteredByProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.DistributedByProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.DuplicateKeyProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.DataBlocksizeProperty'>: <PropertiesLocation.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.properties.DatabaseProperty'>: <PropertiesLocation.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.properties.DataDeletionProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.DefinerProperty'>: <PropertiesLocation.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.properties.DictRange'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.DictProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.DynamicProperty'>: <PropertiesLocation.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.properties.DistKeyProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.DistStyleProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.EmptyProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.EncodeProperty'>: <PropertiesLocation.POST_EXPRESSION: 'POST_EXPRESSION'>, <class 'sqlglot.expressions.properties.EngineProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.EnviromentProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.HandlerProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.ParameterStyleProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.ExecuteAsProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.ExternalProperty'>: <PropertiesLocation.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.properties.FallbackProperty'>: <PropertiesLocation.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.properties.FileFormatProperty'>: <PropertiesLocation.POST_WITH: 'POST_WITH'>, <class 'sqlglot.expressions.properties.FreespaceProperty'>: <PropertiesLocation.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.properties.GlobalProperty'>: <PropertiesLocation.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.properties.HeapProperty'>: <PropertiesLocation.POST_WITH: 'POST_WITH'>, <class 'sqlglot.expressions.properties.HybridProperty'>: <PropertiesLocation.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.properties.InheritsProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.IcebergProperty'>: <PropertiesLocation.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.properties.IncludeProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.InputModelProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.IsolatedLoadingProperty'>: <PropertiesLocation.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.properties.JournalProperty'>: <PropertiesLocation.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.properties.LanguageProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.LikeProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.LocationProperty'>: <PropertiesLocation.POST_WITH: 'POST_WITH'>, <class 'sqlglot.expressions.properties.LockProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.LockingProperty'>: <PropertiesLocation.POST_ALIAS: 'POST_ALIAS'>, <class 'sqlglot.expressions.properties.LogProperty'>: <PropertiesLocation.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.properties.MaskingProperty'>: <PropertiesLocation.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.properties.MaterializedProperty'>: <PropertiesLocation.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.properties.MergeBlockRatioProperty'>: <PropertiesLocation.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.properties.ModuleProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.NetworkProperty'>: <PropertiesLocation.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.properties.NoPrimaryIndexProperty'>: <PropertiesLocation.POST_EXPRESSION: 'POST_EXPRESSION'>, <class 'sqlglot.expressions.properties.OnProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.OnCommitProperty'>: <PropertiesLocation.POST_EXPRESSION: 'POST_EXPRESSION'>, <class 'sqlglot.expressions.query.Order'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.OutputModelProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.PartitionedByProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.PartitionedOfProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.constraints.PrimaryKey'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.Property'>: <PropertiesLocation.POST_WITH: 'POST_WITH'>, <class 'sqlglot.expressions.properties.RefreshTriggerProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.RemoteWithConnectionModelProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.ReturnsProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.RollupProperty'>: <PropertiesLocation.UNSUPPORTED: 'UNSUPPORTED'>, <class 'sqlglot.expressions.properties.RowAccessProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.RowFormatProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.RowFormatDelimitedProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.RowFormatSerdeProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.SampleProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.SchemaCommentProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.SecureProperty'>: <PropertiesLocation.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.properties.SecurityIntegrationProperty'>: <PropertiesLocation.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.properties.SerdeProperties'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.ddl.Set'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.SettingsProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.SetProperty'>: <PropertiesLocation.UNSUPPORTED: 'UNSUPPORTED'>, <class 'sqlglot.expressions.properties.SetConfigProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.SharingProperty'>: <PropertiesLocation.POST_EXPRESSION: 'POST_EXPRESSION'>, <class 'sqlglot.expressions.ddl.SequenceProperties'>: <PropertiesLocation.POST_EXPRESSION: 'POST_EXPRESSION'>, <class 'sqlglot.expressions.ddl.TriggerProperties'>: <PropertiesLocation.POST_EXPRESSION: 'POST_EXPRESSION'>, <class 'sqlglot.expressions.properties.SortKeyProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.SqlReadWriteProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.SqlSecurityProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.StabilityProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.StorageHandlerProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.StreamingTableProperty'>: <PropertiesLocation.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.properties.StrictProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.Tags'>: <PropertiesLocation.POST_WITH: 'POST_WITH'>, <class 'sqlglot.expressions.properties.TemporaryProperty'>: <PropertiesLocation.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.properties.ToTableProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.TransientProperty'>: <PropertiesLocation.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.properties.TransformModelProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.ddl.MergeTreeTTL'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.UnloggedProperty'>: <PropertiesLocation.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.properties.UsingProperty'>: <PropertiesLocation.POST_EXPRESSION: 'POST_EXPRESSION'>, <class 'sqlglot.expressions.properties.UsingTemplateProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.ViewAttributeProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.VirtualProperty'>: <PropertiesLocation.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.properties.VolatileProperty'>: <PropertiesLocation.UNSUPPORTED: 'UNSUPPORTED'>, <class 'sqlglot.expressions.properties.WithDataProperty'>: <PropertiesLocation.POST_EXPRESSION: 'POST_EXPRESSION'>, <class 'sqlglot.expressions.properties.WithJournalTableProperty'>: <PropertiesLocation.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.properties.WithProcedureOptions'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.WithSchemaBindingProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.WithSystemVersioningProperty'>: <PropertiesLocation.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.properties.ForceProperty'>: <PropertiesLocation.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.properties.CredentialsProperty'>: <PropertiesLocation.POST_WITH: 'POST_WITH'>}
RESPECT_IGNORE_NULLS_UNSUPPORTED_EXPRESSIONS = (<class 'sqlglot.expressions.aggregate.ArrayAgg'>,)
def with_properties(self, properties: sqlglot.expressions.properties.Properties) -> str:
678    def with_properties(self, properties: exp.Properties) -> str:
679        return self.properties(properties, wrapped=False, prefix=self.sep(""), sep=" ")
def values_sql( self, expression: sqlglot.expressions.query.Values, values_as_table: bool = True) -> str:
681    def values_sql(self, expression: exp.Values, values_as_table: bool = True) -> str:
682        if expression.find(*self.UNSUPPORTED_VALUES_EXPRESSIONS):
683            values_as_table = False
684
685        return super().values_sql(expression, values_as_table=values_as_table)
def datatype_sql(self, expression: sqlglot.expressions.datatypes.DataType) -> str:
687    def datatype_sql(self, expression: exp.DataType) -> str:
688        # Check if this is a FLOAT type nested inside a VECTOR type
689        # VECTOR only accepts FLOAT (not DOUBLE), INT, and STRING as element types
690        # https://docs.snowflake.com/en/sql-reference/data-types-vector
691        if expression.is_type(exp.DType.DOUBLE):
692            parent = expression.parent
693            if isinstance(parent, exp.DataType) and parent.is_type(exp.DType.VECTOR):
694                # Preserve FLOAT for VECTOR types instead of mapping to synonym DOUBLE
695                return "FLOAT"
696
697        expressions = expression.expressions
698        if expressions and expression.is_type(*exp.DataType.STRUCT_TYPES):
699            for field_type in expressions:
700                # The correct syntax is OBJECT [ (<key> <value_type [NOT NULL] [, ...]) ]
701                if isinstance(field_type, exp.DataType):
702                    return "OBJECT"
703                if (
704                    isinstance(field_type, exp.ColumnDef)
705                    and field_type.this
706                    and field_type.this.is_string
707                ):
708                    # Doing OBJECT('foo' VARCHAR) is invalid snowflake Syntax. Moreover, besides
709                    # converting 'foo' into an identifier, we also need to quote it because these
710                    # keys are case-sensitive. For example:
711                    #
712                    # WITH t AS (SELECT OBJECT_CONSTRUCT('x', 'y') AS c) SELECT c:x FROM t -- correct
713                    # WITH t AS (SELECT OBJECT_CONSTRUCT('x', 'y') AS c) SELECT c:X FROM t -- incorrect, returns NULL
714                    field_type.this.replace(exp.to_identifier(field_type.name, quoted=True))
715
716        return super().datatype_sql(expression)
def tonumber_sql(self, expression: sqlglot.expressions.string.ToNumber) -> str:
718    def tonumber_sql(self, expression: exp.ToNumber) -> str:
719        precision = expression.args.get("precision")
720        scale = expression.args.get("scale")
721
722        default_precision = isinstance(precision, exp.Literal) and precision.name == "38"
723        default_scale = isinstance(scale, exp.Literal) and scale.name == "0"
724
725        if default_precision and default_scale:
726            precision = None
727            scale = None
728        elif default_scale:
729            scale = None
730
731        func_name = "TRY_TO_NUMBER" if expression.args.get("safe") else "TO_NUMBER"
732
733        return self.func(
734            func_name,
735            expression.this,
736            expression.args.get("format"),
737            precision,
738            scale,
739        )
def timestampfromparts_sql(self, expression: sqlglot.expressions.temporal.TimestampFromParts) -> str:
741    def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str:
742        milli = expression.args.get("milli")
743        if milli is not None:
744            milli_to_nano = milli.pop() * exp.Literal.number(1000000)
745            expression.set("nano", milli_to_nano)
746
747        return rename_func("TIMESTAMP_FROM_PARTS")(self, expression)
def cast_sql( self, expression: sqlglot.expressions.functions.Cast, safe_prefix: str | None = None) -> str:
749    def cast_sql(self, expression: exp.Cast, safe_prefix: str | None = None) -> str:
750        if expression.is_type(exp.DType.GEOGRAPHY):
751            return self.func("TO_GEOGRAPHY", expression.this)
752        if expression.is_type(exp.DType.GEOMETRY):
753            return self.func("TO_GEOMETRY", expression.this)
754
755        return super().cast_sql(expression, safe_prefix=safe_prefix)
def trycast_sql(self, expression: sqlglot.expressions.functions.TryCast) -> str:
757    def trycast_sql(self, expression: exp.TryCast) -> str:
758        value = expression.this
759
760        if value.type is None:
761            from sqlglot.optimizer.annotate_types import annotate_types
762
763            value = annotate_types(value, dialect=self.dialect)
764
765        # Snowflake requires that TRY_CAST's value be a string
766        # If TRY_CAST is being roundtripped (since Snowflake is the only dialect that sets "requires_string") or
767        # if we can deduce that the value is a string, then we can generate TRY_CAST
768        if expression.args.get("requires_string") or value.is_type(*exp.DataType.TEXT_TYPES):
769            return super().trycast_sql(expression)
770
771        return self.cast_sql(expression)
def log_sql(self, expression: sqlglot.expressions.math.Log) -> str:
773    def log_sql(self, expression: exp.Log) -> str:
774        if not expression.expression:
775            return self.func("LN", expression.this)
776
777        return super().log_sql(expression)
def greatest_sql(self, expression: sqlglot.expressions.functions.Greatest) -> str:
779    def greatest_sql(self, expression: exp.Greatest) -> str:
780        name = "GREATEST_IGNORE_NULLS" if expression.args.get("ignore_nulls") else "GREATEST"
781        return self.func(name, expression.this, *expression.expressions)
def least_sql(self, expression: sqlglot.expressions.functions.Least) -> str:
783    def least_sql(self, expression: exp.Least) -> str:
784        name = "LEAST_IGNORE_NULLS" if expression.args.get("ignore_nulls") else "LEAST"
785        return self.func(name, expression.this, *expression.expressions)
def generator_sql(self, expression: sqlglot.expressions.array.Generator) -> str:
787    def generator_sql(self, expression: exp.Generator) -> str:
788        args = []
789        rowcount = expression.args.get("rowcount")
790        timelimit = expression.args.get("timelimit")
791
792        if rowcount:
793            args.append(exp.Kwarg(this=exp.var("ROWCOUNT"), expression=rowcount))
794        if timelimit:
795            args.append(exp.Kwarg(this=exp.var("TIMELIMIT"), expression=timelimit))
796
797        return self.func("GENERATOR", *args)
def unnest_sql(self, expression: sqlglot.expressions.array.Unnest) -> str:
799    def unnest_sql(self, expression: exp.Unnest) -> str:
800        unnest_alias = expression.args.get("alias")
801        offset = expression.args.get("offset")
802
803        unnest_alias_columns = unnest_alias.columns if unnest_alias else []
804        value = seq_get(unnest_alias_columns, 0) or exp.to_identifier("value")
805
806        columns = [
807            exp.to_identifier("seq"),
808            exp.to_identifier("key"),
809            exp.to_identifier("path"),
810            offset.pop() if isinstance(offset, exp.Expr) else exp.to_identifier("index"),
811            value,
812            exp.to_identifier("this"),
813        ]
814
815        if unnest_alias:
816            unnest_alias.set("columns", columns)
817        else:
818            unnest_alias = exp.TableAlias(this="_u", columns=columns)
819
820        table_input = self.sql(expression.expressions[0])
821        if not table_input.startswith("INPUT =>"):
822            table_input = f"INPUT => {table_input}"
823
824        expression_parent = expression.parent
825
826        explode = (
827            f"FLATTEN({table_input})"
828            if isinstance(expression_parent, exp.Lateral)
829            else f"TABLE(FLATTEN({table_input}))"
830        )
831        alias = self.sql(unnest_alias)
832        alias = f" AS {alias}" if alias else ""
833        value = (
834            ""
835            if isinstance(expression_parent, (exp.From, exp.Join, exp.Lateral))
836            else f"{value} FROM "
837        )
838
839        return f"{value}{explode}{alias}"
def show_sql(self, expression: sqlglot.expressions.ddl.Show) -> str:
841    def show_sql(self, expression: exp.Show) -> str:
842        terse = "TERSE " if expression.args.get("terse") else ""
843        iceberg = "ICEBERG " if expression.args.get("iceberg") else ""
844        history = " HISTORY" if expression.args.get("history") else ""
845        like = self.sql(expression, "like")
846        like = f" LIKE {like}" if like else ""
847
848        scope = self.sql(expression, "scope")
849        scope = f" {scope}" if scope else ""
850
851        scope_kind = self.sql(expression, "scope_kind")
852        if scope_kind:
853            scope_kind = f" IN {scope_kind}"
854
855        starts_with = self.sql(expression, "starts_with")
856        if starts_with:
857            starts_with = f" STARTS WITH {starts_with}"
858
859        limit = self.sql(expression, "limit")
860
861        from_ = self.sql(expression, "from_")
862        if from_:
863            from_ = f" FROM {from_}"
864
865        privileges = self.expressions(expression, key="privileges", flat=True)
866        privileges = f" WITH PRIVILEGES {privileges}" if privileges else ""
867
868        return f"SHOW {terse}{iceberg}{expression.name}{history}{like}{scope_kind}{scope}{starts_with}{limit}{from_}{privileges}"
def rowaccessproperty_sql( self, expression: sqlglot.expressions.properties.RowAccessProperty) -> str:
870    def rowaccessproperty_sql(self, expression: exp.RowAccessProperty) -> str:
871        if not expression.this:
872            return "ROW ACCESS"
873        on = f" ON ({self.expressions(expression, flat=True)})" if expression.expressions else ""
874        return f"WITH ROW ACCESS POLICY {self.sql(expression, 'this')}{on}"
def describe_sql(self, expression: sqlglot.expressions.ddl.Describe) -> str:
876    def describe_sql(self, expression: exp.Describe) -> str:
877        kind_value = expression.args.get("kind") or "TABLE"
878
879        properties = expression.args.get("properties")
880        if properties:
881            qualifier = self.expressions(properties, sep=" ")
882            kind = f" {qualifier} {kind_value}"
883        else:
884            kind = f" {kind_value}"
885
886        this = f" {self.sql(expression, 'this')}"
887        expressions = self.expressions(expression, flat=True)
888        expressions = f" {expressions}" if expressions else ""
889        return f"DESCRIBE{kind}{this}{expressions}"
def generatedasidentitycolumnconstraint_sql( self, expression: sqlglot.expressions.constraints.GeneratedAsIdentityColumnConstraint) -> str:
891    def generatedasidentitycolumnconstraint_sql(
892        self, expression: exp.GeneratedAsIdentityColumnConstraint
893    ) -> str:
894        start = expression.args.get("start")
895        start = f" START {start}" if start else ""
896        increment = expression.args.get("increment")
897        increment = f" INCREMENT {increment}" if increment else ""
898
899        order = expression.args.get("order")
900        if order is not None:
901            order_clause = " ORDER" if order else " NOORDER"
902        else:
903            order_clause = ""
904
905        return f"AUTOINCREMENT{start}{increment}{order_clause}"
def cluster_sql(self, expression: sqlglot.expressions.query.Cluster) -> str:
907    def cluster_sql(self, expression: exp.Cluster) -> str:
908        return f"CLUSTER BY ({self.expressions(expression, flat=True)})"
def struct_sql(self, expression: sqlglot.expressions.array.Struct) -> str:
910    def struct_sql(self, expression: exp.Struct) -> str:
911        if len(expression.expressions) == 1:
912            arg = expression.expressions[0]
913            if arg.is_star or (isinstance(arg, exp.ILike) and arg.left.is_star):
914                # Wildcard syntax: https://docs.snowflake.com/en/sql-reference/data-types-semistructured#object
915                return f"{{{self.sql(expression.expressions[0])}}}"
916
917        keys = []
918        values = []
919
920        for i, e in enumerate(expression.expressions):
921            if isinstance(e, exp.PropertyEQ):
922                keys.append(
923                    exp.Literal.string(e.name) if isinstance(e.this, exp.Identifier) else e.this
924                )
925                values.append(e.expression)
926            else:
927                keys.append(exp.Literal.string(f"_{i}"))
928                values.append(e)
929
930        return self.func("OBJECT_CONSTRUCT", *flatten(zip(keys, values)))
@unsupported_args('weight', 'accuracy')
def approxquantile_sql(self, expression: sqlglot.expressions.aggregate.ApproxQuantile) -> str:
932    @unsupported_args("weight", "accuracy")
933    def approxquantile_sql(self, expression: exp.ApproxQuantile) -> str:
934        return self.func("APPROX_PERCENTILE", expression.this, expression.args.get("quantile"))
def alterset_sql(self, expression: sqlglot.expressions.ddl.AlterSet) -> str:
936    def alterset_sql(self, expression: exp.AlterSet) -> str:
937        exprs = self.expressions(expression, flat=True)
938        exprs = f" {exprs}" if exprs else ""
939        file_format = self.expressions(expression, key="file_format", flat=True, sep=" ")
940        file_format = f" STAGE_FILE_FORMAT = ({file_format})" if file_format else ""
941        copy_options = self.expressions(expression, key="copy_options", flat=True, sep=" ")
942        copy_options = f" STAGE_COPY_OPTIONS = ({copy_options})" if copy_options else ""
943        tag = self.expressions(expression, key="tag", flat=True)
944        tag = f" TAG {tag}" if tag else ""
945
946        return f"SET{exprs}{file_format}{copy_options}{tag}"
def strtotime_sql(self, expression: sqlglot.expressions.temporal.StrToTime):
948    def strtotime_sql(self, expression: exp.StrToTime):
949        # target_type is stored as a DataType instance
950        target_type = expression.args.get("target_type")
951
952        # Get the type enum from DataType instance or from type annotation
953        if isinstance(target_type, exp.DataType):
954            type_enum = target_type.this
955        elif expression.type:
956            type_enum = expression.type.this
957        else:
958            type_enum = exp.DType.TIMESTAMP
959
960        func_name = TIMESTAMP_TYPES.get(type_enum, "TO_TIMESTAMP")
961
962        return self.func(
963            f"{'TRY_' if expression.args.get('safe') else ''}{func_name}",
964            expression.this,
965            self.format_time(expression),
966        )
def timestampsub_sql(self, expression: sqlglot.expressions.temporal.TimestampSub):
968    def timestampsub_sql(self, expression: exp.TimestampSub):
969        return self.sql(
970            exp.TimestampAdd(
971                this=expression.this,
972                expression=expression.expression * -1,
973                unit=expression.unit,
974            )
975        )
def jsonextract_sql(self, expression: sqlglot.expressions.json.JSONExtract):
977    def jsonextract_sql(self, expression: exp.JSONExtract):
978        this = expression.this
979
980        # JSON strings are valid coming from other dialects such as BQ so
981        # for these cases we PARSE_JSON preemptively
982        if not isinstance(this, (exp.ParseJSON, exp.JSONExtract)) and not expression.args.get(
983            "requires_json"
984        ):
985            this = exp.ParseJSON(this=this)
986
987        return self.func(
988            "GET_PATH",
989            this,
990            expression.expression,
991        )
def timetostr_sql(self, expression: sqlglot.expressions.temporal.TimeToStr) -> str:
993    def timetostr_sql(self, expression: exp.TimeToStr) -> str:
994        this = expression.this
995        if this.is_string:
996            this = exp.cast(this, exp.DType.TIMESTAMP)
997
998        return self.func("TO_CHAR", this, self.format_time(expression))
def datesub_sql(self, expression: sqlglot.expressions.temporal.DateSub) -> str:
1000    def datesub_sql(self, expression: exp.DateSub) -> str:
1001        value = expression.expression
1002        if value:
1003            value.replace(value * (-1))
1004        else:
1005            self.unsupported("DateSub cannot be transpiled if the subtracted count is unknown")
1006
1007        return date_delta_sql("DATEADD")(self, expression)
def select_sql(self, expression: sqlglot.expressions.query.Select) -> str:
1009    def select_sql(self, expression: exp.Select) -> str:
1010        limit = expression.args.get("limit")
1011        offset = expression.args.get("offset")
1012        if offset and not limit:
1013            expression.limit(exp.Null(), copy=False)
1014        return super().select_sql(expression)
def createable_sql( self, expression: sqlglot.expressions.ddl.Create, locations: collections.defaultdict) -> str:
1016    def createable_sql(self, expression: exp.Create, locations: defaultdict) -> str:
1017        is_materialized = expression.find(exp.MaterializedProperty)
1018        copy_grants_property = expression.find(exp.CopyGrantsProperty)
1019
1020        if expression.kind == "VIEW" and is_materialized and copy_grants_property:
1021            # For materialized views, COPY GRANTS is located *before* the columns list
1022            # This is in contrast to normal views where COPY GRANTS is located *after* the columns list
1023            # We default CopyGrantsProperty to POST_SCHEMA which means we need to output it POST_NAME if a materialized view is detected
1024            # ref: https://docs.snowflake.com/en/sql-reference/sql/create-materialized-view#syntax
1025            # ref: https://docs.snowflake.com/en/sql-reference/sql/create-view#syntax
1026            post_schema_properties = locations[exp.Properties.Location.POST_SCHEMA]
1027            post_schema_properties.pop(post_schema_properties.index(copy_grants_property))
1028
1029            this_name = self.sql(expression.this, "this")
1030            copy_grants = self.sql(copy_grants_property)
1031            this_schema = self.schema_columns_sql(expression.this)
1032            this_schema = f"{self.sep()}{this_schema}" if this_schema else ""
1033
1034            return f"{this_name}{self.sep()}{copy_grants}{this_schema}"
1035
1036        return super().createable_sql(expression, locations)
def arrayagg_sql(self, expression: sqlglot.expressions.aggregate.ArrayAgg) -> str:
1038    def arrayagg_sql(self, expression: exp.ArrayAgg) -> str:
1039        this = expression.this
1040
1041        # If an ORDER BY clause is present, we need to remove it from ARRAY_AGG
1042        # and add it later as part of the WITHIN GROUP clause
1043        order = this if isinstance(this, exp.Order) else None
1044        if order:
1045            expression.set("this", order.this.pop())
1046
1047        expr_sql = super().arrayagg_sql(expression)
1048
1049        if order:
1050            expr_sql = self.sql(exp.WithinGroup(this=expr_sql, expression=order))
1051
1052        return expr_sql
def arraydistinct_sql(self, expression: sqlglot.expressions.array.ArrayDistinct) -> str:
1054    def arraydistinct_sql(self, expression: exp.ArrayDistinct) -> str:
1055        if expression.args.get("check_null"):
1056            return self.func("ARRAY_DISTINCT", expression.this)
1057        return self.func("ARRAY_DISTINCT", exp.ArrayCompact(this=expression.this))
def arraytostring_sql(self, expression: sqlglot.expressions.array.ArrayToString) -> str:
1059    def arraytostring_sql(self, expression: exp.ArrayToString) -> str:
1060        return self.func("ARRAY_TO_STRING", expression.this, expression.expression)
def array_sql(self, expression: sqlglot.expressions.array.Array) -> str:
1062    def array_sql(self, expression: exp.Array) -> str:
1063        expressions = expression.expressions
1064
1065        first_expr = seq_get(expressions, 0)
1066        if isinstance(first_expr, exp.Select):
1067            # SELECT AS STRUCT foo AS alias_foo -> ARRAY_AGG(OBJECT_CONSTRUCT('alias_foo', foo))
1068            if first_expr.text("kind").upper() == "STRUCT":
1069                object_construct_args = []
1070                for expr in first_expr.expressions:
1071                    # Alias case: SELECT AS STRUCT foo AS alias_foo -> OBJECT_CONSTRUCT('alias_foo', foo)
1072                    # Column case: SELECT AS STRUCT foo -> OBJECT_CONSTRUCT('foo', foo)
1073                    name = expr.this if isinstance(expr, exp.Alias) else expr
1074
1075                    object_construct_args.extend([exp.Literal.string(expr.alias_or_name), name])
1076
1077                array_agg = exp.ArrayAgg(this=build_object_construct(args=object_construct_args))
1078
1079                first_expr.set("kind", None)
1080                first_expr.set("expressions", [array_agg])
1081
1082                return self.sql(first_expr.subquery())
1083
1084        return inline_array_sql(self, expression)
def currentdate_sql(self, expression: sqlglot.expressions.temporal.CurrentDate) -> str:
1086    def currentdate_sql(self, expression: exp.CurrentDate) -> str:
1087        zone = self.sql(expression, "this")
1088        if not zone:
1089            return super().currentdate_sql(expression)
1090
1091        expr = exp.Cast(
1092            this=exp.ConvertTimezone(target_tz=zone, timestamp=exp.CurrentTimestamp()),
1093            to=exp.DataType(this=exp.DType.DATE),
1094        )
1095        return self.sql(expr)
def dot_sql(self, expression: sqlglot.expressions.core.Dot) -> str:
1097    def dot_sql(self, expression: exp.Dot) -> str:
1098        this = expression.this
1099
1100        if not this.type:
1101            from sqlglot.optimizer.annotate_types import annotate_types
1102
1103            this = annotate_types(this, dialect=self.dialect)
1104
1105        if not isinstance(this, exp.Dot) and this.is_type(exp.DType.STRUCT):
1106            # Generate colon notation for the top level STRUCT
1107            return f"{self.sql(this)}:{self.sql(expression, 'expression')}"
1108
1109        return super().dot_sql(expression)
def modelattribute_sql(self, expression: sqlglot.expressions.query.ModelAttribute) -> str:
1111    def modelattribute_sql(self, expression: exp.ModelAttribute) -> str:
1112        return f"{self.sql(expression, 'this')}!{self.sql(expression, 'expression')}"
def format_sql(self, expression: sqlglot.expressions.string.Format) -> str:
1114    def format_sql(self, expression: exp.Format) -> str:
1115        if expression.name.lower() == "%s" and len(expression.expressions) == 1:
1116            return self.func("TO_CHAR", expression.expressions[0])
1117
1118        return self.function_fallback_sql(expression)
def splitpart_sql(self, expression: sqlglot.expressions.string.SplitPart) -> str:
1120    def splitpart_sql(self, expression: exp.SplitPart) -> str:
1121        # Set part_index to 1 if missing
1122        if not expression.args.get("delimiter"):
1123            expression.set("delimiter", exp.Literal.string(" "))
1124
1125        if not expression.args.get("part_index"):
1126            expression.set("part_index", exp.Literal.number(1))
1127
1128        return rename_func("SPLIT_PART")(self, expression)
def uniform_sql(self, expression: sqlglot.expressions.functions.Uniform) -> str:
1130    def uniform_sql(self, expression: exp.Uniform) -> str:
1131        gen = expression.args.get("gen")
1132        seed = expression.args.get("seed")
1133
1134        # From Databricks UNIFORM(min, max, seed) -> Wrap gen in RANDOM(seed)
1135        if seed:
1136            gen = exp.Rand(this=seed)
1137
1138        # No gen argument (from Databricks 2-arg UNIFORM(min, max)) -> Add RANDOM()
1139        if not gen:
1140            gen = exp.Rand()
1141
1142        return self.func("UNIFORM", expression.this, expression.expression, gen)
def window_sql(self, expression: sqlglot.expressions.query.Window) -> str:
1144    def window_sql(self, expression: exp.Window) -> str:
1145        spec = expression.args.get("spec")
1146        this = expression.this
1147
1148        if (
1149            (
1150                isinstance(this, RANKING_WINDOW_FUNCTIONS_WITH_FRAME)
1151                or (
1152                    isinstance(this, (exp.RespectNulls, exp.IgnoreNulls))
1153                    and isinstance(this.this, RANKING_WINDOW_FUNCTIONS_WITH_FRAME)
1154                )
1155            )
1156            and spec
1157            and (
1158                spec.text("kind").upper() == "ROWS"
1159                and spec.text("start").upper() == "UNBOUNDED"
1160                and spec.text("start_side").upper() == "PRECEDING"
1161                and spec.text("end").upper() == "UNBOUNDED"
1162                and spec.text("end_side").upper() == "FOLLOWING"
1163            )
1164        ):
1165            # omit the default window from window ranking functions
1166            expression.set("spec", None)
1167        return super().window_sql(expression)
Inherited Members
sqlglot.generator.Generator
Generator
NULL_ORDERING_SUPPORTED
WINDOW_FUNCS_WITH_NULL_ORDERING
IGNORE_NULLS_IN_FUNC
IGNORE_NULLS_BEFORE_ORDER
LOCKING_READS_SUPPORTED
WRAP_DERIVED_VALUES
CREATE_FUNCTION_RETURN_AS
SUPPORTS_MERGE_WHERE
INTERVAL_ALLOWS_PLURAL_FORM
LIMIT_FETCH
RENAME_TABLE_WITH_DB
GROUPINGS_SEP
INDEX_ON
INOUT_SEPARATOR
QUERY_HINT_SEP
DUPLICATE_KEY_UPDATE_WITH_SET
LIMIT_IS_TOP
RETURNING_END
EXTRACT_ALLOWS_QUOTES
TZ_TO_WITH_TIME_ZONE
NVL2_SUPPORTED
VALUES_AS_TABLE
ALTER_TABLE_INCLUDE_COLUMN_KEYWORD
UNNEST_WITH_ORDINALITY
SEMI_ANTI_JOIN_WITH_SIDE
COMPUTED_COLUMN_WITH_TYPE
TABLESAMPLE_REQUIRES_PARENS
TABLESAMPLE_SIZE_IS_ROWS
TABLESAMPLE_KEYWORDS
TABLESAMPLE_WITH_METHOD
TABLESAMPLE_SEED_KEYWORD
DATA_TYPE_SPECIFIERS_ALLOWED
ENSURE_BOOLS
CTE_RECURSIVE_KEYWORD_REQUIRED
SUPPORTS_SINGLE_ARG_CONCAT
LAST_DAY_SUPPORTS_DATE_PART
SUPPORTS_TABLE_ALIAS_COLUMNS
SUPPORTS_NAMED_CTE_COLUMNS
UNPIVOT_ALIASES_ARE_IDENTIFIERS
SUPPORTS_SELECT_INTO
SUPPORTS_UNLOGGED_TABLES
SUPPORTS_CREATE_TABLE_LIKE
SUPPORTS_MODIFY_COLUMN
SUPPORTS_CHANGE_COLUMN
LIKE_PROPERTY_INSIDE_SCHEMA
MULTI_ARG_DISTINCT
JSON_TYPE_REQUIRED_FOR_EXTRACTION
JSON_PATH_BRACKETED_KEY_SUPPORTED
JSON_PATH_SINGLE_QUOTE_ESCAPE
CAN_IMPLEMENT_ARRAY_ANY
SUPPORTS_TO_NUMBER
SUPPORTS_WINDOW_EXCLUDE
SET_OP_MODIFIERS
COPY_HAS_INTO_KEYWORD
UNICODE_SUBSTITUTE
HEX_FUNC
WITH_PROPERTIES_PREFIX
QUOTE_JSON_PATH
PAD_FILL_PATTERN_IS_REQUIRED
SUPPORTS_UNIX_SECONDS
ALTER_SET_WRAPPED
NORMALIZE_EXTRACT_DATE_PARTS
PARSE_JSON_NAME
ALTER_SET_TYPE
ARRAY_SIZE_DIM_REQUIRED
SUPPORTS_BETWEEN_FLAGS
SUPPORTS_LIKE_QUANTIFIERS
MATCH_AGAINST_TABLE_PREFIX
SET_ASSIGNMENT_REQUIRES_VARIABLE_KEYWORD
DECLARE_DEFAULT_ASSIGNMENT
UPDATE_STATEMENT_SUPPORTS_FROM
STAR_EXCLUDE_REQUIRES_DERIVED_TABLE
SUPPORTS_DROP_ALTER_ICEBERG_PROPERTY
UNSUPPORTED_TYPES
TYPE_PARAM_SETTINGS
TIME_PART_SINGULARS
NAMED_PLACEHOLDER_TOKEN
EXPRESSION_PRECEDES_PROPERTIES_CREATABLES
RESERVED_KEYWORDS
WITH_SEPARATED_COMMENTS
EXCLUDE_COMMENTS
UNWRAPPED_INTERVAL_VALUES
PARAMETERIZABLE_TEXT_TYPES
EXPRESSIONS_WITHOUT_NESTED_CTES
SAFE_JSON_PATH_KEY_RE
SENTINEL_LINE_BREAK
pretty
identify
normalize
pad
unsupported_level
max_unsupported
leading_comma
max_text_width
comments
dialect
normalize_functions
unsupported_messages
generate
preprocess
unsupported
sep
seg
sanitize_comment
maybe_comment
wrap
no_identify
normalize_func
indent
sql
uncache_sql
cache_sql
characterset_sql
column_parts
column_sql
pseudocolumn_sql
columnposition_sql
columndef_sql
columnconstraint_sql
computedcolumnconstraint_sql
autoincrementcolumnconstraint_sql
compresscolumnconstraint_sql
generatedasrowcolumnconstraint_sql
periodforsystemtimeconstraint_sql
notnullcolumnconstraint_sql
primarykeycolumnconstraint_sql
uniquecolumnconstraint_sql
inoutcolumnconstraint_sql
create_sql
sequenceproperties_sql
triggerproperties_sql
triggerreferencing_sql
triggerevent_sql
clone_sql
heredoc_sql
prepend_ctes
with_sql
cte_sql
tablealias_sql
bitstring_sql
hexstring_sql
bytestring_sql
unicodestring_sql
rawstring_sql
datatypeparam_sql
datatype_param_bound_limiter
directory_sql
delete_sql
drop_sql
set_operation
set_operations
fetch_sql
limitoptions_sql
filter_sql
hint_sql
indexparameters_sql
index_sql
identifier_sql
hex_sql
lowerhex_sql
inputoutputformat_sql
national_sql
partition_sql
properties_sql
root_properties
properties
locate_properties
property_name
property_sql
uuidproperty_sql
likeproperty_sql
fallbackproperty_sql
journalproperty_sql
freespaceproperty_sql
checksumproperty_sql
mergeblockratioproperty_sql
moduleproperty_sql
datablocksizeproperty_sql
blockcompressionproperty_sql
isolatedloadingproperty_sql
partitionboundspec_sql
partitionedofproperty_sql
lockingproperty_sql
withdataproperty_sql
withsystemversioningproperty_sql
insert_sql
introducer_sql
kill_sql
pseudotype_sql
objectidentifier_sql
onconflict_sql
returning_sql
rowformatdelimitedproperty_sql
withtablehint_sql
indextablehint_sql
historicaldata_sql
table_parts
table_sql
tablefromrows_sql
tablesample_sql
pivot_sql
version_sql
tuple_sql
update_sql
var_sql
into_sql
from_sql
groupingsets_sql
rollup_sql
rollupindex_sql
rollupproperty_sql
cube_sql
group_sql
having_sql
connect_sql
prior_sql
join_sql
lambda_sql
lateral_op
lateral_sql
limit_sql
offset_sql
setitem_sql
set_sql
queryband_sql
pragma_sql
lock_sql
literal_sql
escape_str
loaddata_sql
null_sql
boolean_sql
booland_sql
boolor_sql
order_sql
withfill_sql
distribute_sql
sort_sql
ordered_sql
matchrecognizemeasure_sql
matchrecognize_sql
query_modifiers
options_modifier
for_modifiers
queryoption_sql
offset_limit_modifiers
after_limit_modifiers
schema_sql
schema_columns_sql
star_sql
parameter_sql
sessionparameter_sql
placeholder_sql
subquery_sql
qualify_sql
prewhere_sql
where_sql
partition_by_sql
windowspec_sql
withingroup_sql
between_sql
bracket_offset_expressions
bracket_sql
all_sql
any_sql
exists_sql
case_sql
constraint_sql
nextvaluefor_sql
extract_sql
trim_sql
convert_concat_args
concat_sql
concatws_sql
check_sql
foreignkey_sql
primarykey_sql
if_sql
matchagainst_sql
jsonkeyvalue_sql
jsonpath_sql
json_path_part
formatjson_sql
formatphrase_sql
jsonarray_sql
jsonarrayagg_sql
jsoncolumndef_sql
jsonschema_sql
jsontable_sql
openjsoncolumndef_sql
openjson_sql
in_sql
in_unnest_op
interval_sql
return_sql
reference_sql
anonymous_sql
paren_sql
neg_sql
not_sql
alias_sql
pivotalias_sql
aliases_sql
atindex_sql
attimezone_sql
fromtimezone_sql
add_sql
and_sql
or_sql
xor_sql
connector_sql
bitwiseand_sql
bitwiseleftshift_sql
bitwisenot_sql
bitwiseor_sql
bitwiserightshift_sql
bitwisexor_sql
collate_sql
command_sql
comment_sql
mergetreettlaction_sql
mergetreettl_sql
transaction_sql
commit_sql
rollback_sql
altercolumn_sql
modifycolumn_sql
alterindex_sql
alterdiststyle_sql
altersortkey_sql
alterrename_sql
renamecolumn_sql
alter_sql
altersession_sql
add_column_sql
droppartition_sql
dropprimarykey_sql
addconstraint_sql
addpartition_sql
distinct_sql
ignorenulls_sql
respectnulls_sql
havingmax_sql
intdiv_sql
dpipe_sql
div_sql
safedivide_sql
overlaps_sql
distance_sql
eq_sql
propertyeq_sql
escape_sql
glob_sql
gt_sql
gte_sql
is_sql
like_sql
ilike_sql
match_sql
similarto_sql
lt_sql
lte_sql
mod_sql
mul_sql
neq_sql
nullsafeeq_sql
nullsafeneq_sql
sub_sql
jsoncast_sql
try_sql
use_sql
binary
ceil_floor
function_fallback_sql
func
format_args
too_wide
format_time
expressions
op_expressions
naked_property
tag_sql
token_sql
userdefinedfunction_sql
joinhint_sql
kwarg_sql
when_sql
whens_sql
merge_sql
tochar_sql
dictproperty_sql
dictrange_sql
dictsubproperty_sql
duplicatekeyproperty_sql
uniquekeyproperty_sql
distributedbyproperty_sql
oncluster_sql
clusteredbyproperty_sql
anyvalue_sql
querytransform_sql
indexconstraintoption_sql
checkcolumnconstraint_sql
indexcolumnconstraint_sql
nvl2_sql
comprehension_sql
columnprefix_sql
opclass_sql
predict_sql
generateembedding_sql
generatetext_sql
generatetable_sql
generatebool_sql
generateint_sql
generatedouble_sql
mltranslate_sql
mlforecast_sql
aiforecast_sql
featuresattime_sql
vectorsearch_sql
forin_sql
refresh_sql
toarray_sql
tsordstotime_sql
tsordstotimestamp_sql
tsordstodatetime_sql
tsordstodate_sql
unixdate_sql
lastday_sql
dateadd_sql
arrayany_sql
partitionrange_sql
truncatetable_sql
convert_sql
copyparameter_sql
credentials_sql
copy_sql
semicolon_sql
datadeletionproperty_sql
maskingpolicycolumnconstraint_sql
gapfill_sql
scope_resolution
scoperesolution_sql
parsejson_sql
rand_sql
changes_sql
pad_sql
summarize_sql
explodinggenerateseries_sql
converttimezone_sql
json_sql
jsonvalue_sql
skipjsoncolumn_sql
conditionalinsert_sql
multitableinserts_sql
oncondition_sql
jsonextractquote_sql
jsonexists_sql
slice_sql
apply_sql
grant_sql
revoke_sql
grantprivilege_sql
grantprincipal_sql
columns_sql
overlay_sql
todouble_sql
string_sql
median_sql
overflowtruncatebehavior_sql
unixseconds_sql
arraysize_sql
attach_sql
detach_sql
attachoption_sql
watermarkcolumnconstraint_sql
encodeproperty_sql
includeproperty_sql
xmlelement_sql
xmlkeyvalueoption_sql
partitionbyrangeproperty_sql
partitionbyrangepropertydynamic_sql
unpivotcolumns_sql
analyzesample_sql
analyzestatistics_sql
analyzehistogram_sql
analyzedelete_sql
analyzelistchainedrows_sql
analyzevalidate_sql
analyze_sql
xmltable_sql
xmlnamespace_sql
export_sql
declare_sql
declareitem_sql
recursivewithsearch_sql
parameterizedagg_sql
anonymousaggfunc_sql
combinedaggfunc_sql
combinedparameterizedagg_sql
install_sql
get_put_sql
translatecharacters_sql
decodecase_sql
semanticview_sql
getextract_sql
datefromunixdate_sql
space_sql
buildproperty_sql
refreshtriggerproperty_sql
directorystage_sql
uuid_sql
initcap_sql
localtime_sql
localtimestamp_sql
weekstart_sql
chr_sql
block_sql
storedprocedure_sql
ifblock_sql
whileblock_sql
execute_sql
executesql_sql
altermodifysqlsecurity_sql
usingproperty_sql
renameindex_sql