sqlglot.dialects.athena
1from __future__ import annotations 2 3import typing as t 4 5from sqlglot import exp 6from sqlglot.dialects.trino import Trino 7from sqlglot.dialects.hive import Hive 8from sqlglot.tokens import TokenType 9 10 11def _generate_as_hive(expression: exp.Expression) -> bool: 12 if isinstance(expression, exp.Create): 13 if expression.kind == "TABLE": 14 properties: t.Optional[exp.Properties] = expression.args.get("properties") 15 if properties and properties.find(exp.ExternalProperty): 16 return True # CREATE EXTERNAL TABLE is Hive 17 18 if not isinstance(expression.expression, exp.Select): 19 return True # any CREATE TABLE other than CREATE TABLE AS SELECT is Hive 20 else: 21 return expression.kind != "VIEW" # CREATE VIEW is never Hive but CREATE SCHEMA etc is 22 23 # https://docs.aws.amazon.com/athena/latest/ug/ddl-reference.html 24 elif isinstance(expression, (exp.Alter, exp.Drop, exp.Describe)): 25 if isinstance(expression, exp.Drop) and expression.kind == "VIEW": 26 # DROP VIEW is Trino (I guess because CREATE VIEW is) 27 return False 28 29 # Everything else is Hive 30 return True 31 32 return False 33 34 35def _is_iceberg_table(properties: exp.Properties) -> bool: 36 table_type_property = next( 37 ( 38 p 39 for p in properties.expressions 40 if isinstance(p, exp.Property) and p.name == "table_type" 41 ), 42 None, 43 ) 44 return bool(table_type_property and table_type_property.text("value").lower() == "iceberg") 45 46 47def _location_property_sql(self: Athena.Generator, e: exp.LocationProperty): 48 # If table_type='iceberg', the LocationProperty is called 'location' 49 # Otherwise, it's called 'external_location' 50 # ref: https://docs.aws.amazon.com/athena/latest/ug/create-table-as.html 51 52 prop_name = "external_location" 53 54 if isinstance(e.parent, exp.Properties): 55 if _is_iceberg_table(e.parent): 56 prop_name = "location" 57 58 return f"{prop_name}={self.sql(e, 'this')}" 59 60 61def _partitioned_by_property_sql(self: Athena.Generator, e: exp.PartitionedByProperty) -> str: 62 # If table_type='iceberg' then the table property for partitioning is called 'partitioning' 63 # If table_type='hive' it's called 'partitioned_by' 64 # ref: https://docs.aws.amazon.com/athena/latest/ug/create-table-as.html#ctas-table-properties 65 66 prop_name = "partitioned_by" 67 if isinstance(e.parent, exp.Properties): 68 if _is_iceberg_table(e.parent): 69 prop_name = "partitioning" 70 71 return f"{prop_name}={self.sql(e, 'this')}" 72 73 74class Athena(Trino): 75 """ 76 Over the years, it looks like AWS has taken various execution engines, bolted on AWS-specific modifications and then 77 built the Athena service around them. 78 79 Thus, Athena is not simply hosted Trino, it's more like a router that routes SQL queries to an execution engine depending 80 on the query type. 81 82 As at 2024-09-10, assuming your Athena workgroup is configured to use "Athena engine version 3", the following engines exist: 83 84 Hive: 85 - Accepts mostly the same syntax as Hadoop / Hive 86 - Uses backticks to quote identifiers 87 - Has a distinctive DDL syntax (around things like setting table properties, storage locations etc) that is different from Trino 88 - Used for *most* DDL, with some exceptions that get routed to the Trino engine instead: 89 - CREATE [EXTERNAL] TABLE (without AS SELECT) 90 - ALTER 91 - DROP 92 93 Trino: 94 - Uses double quotes to quote identifiers 95 - Used for DDL operations that involve SELECT queries, eg: 96 - CREATE VIEW / DROP VIEW 97 - CREATE TABLE... AS SELECT 98 - Used for DML operations 99 - SELECT, INSERT, UPDATE, DELETE, MERGE 100 101 The SQLGlot Athena dialect tries to identify which engine a query would be routed to and then uses the parser / generator for that engine 102 rather than trying to create a universal syntax that can handle both types. 103 """ 104 105 class Tokenizer(Trino.Tokenizer): 106 """ 107 The Tokenizer is flexible enough to tokenize queries across both the Hive and Trino engines 108 """ 109 110 IDENTIFIERS = ['"', "`"] 111 KEYWORDS = { 112 **Hive.Tokenizer.KEYWORDS, 113 **Trino.Tokenizer.KEYWORDS, 114 "UNLOAD": TokenType.COMMAND, 115 } 116 117 class Parser(Trino.Parser): 118 """ 119 Parse queries for the Athena Trino execution engine 120 """ 121 122 STATEMENT_PARSERS = { 123 **Trino.Parser.STATEMENT_PARSERS, 124 TokenType.USING: lambda self: self._parse_as_command(self._prev), 125 } 126 127 class _HiveGenerator(Hive.Generator): 128 def alter_sql(self, expression: exp.Alter) -> str: 129 # package any ALTER TABLE ADD actions into a Schema object 130 # so it gets generated as `ALTER TABLE .. ADD COLUMNS(...)` 131 # instead of `ALTER TABLE ... ADD COLUMN` which is invalid syntax on Athena 132 if isinstance(expression, exp.Alter) and expression.kind == "TABLE": 133 if expression.actions and isinstance(expression.actions[0], exp.ColumnDef): 134 new_actions = exp.Schema(expressions=expression.actions) 135 expression.set("actions", [new_actions]) 136 137 return super().alter_sql(expression) 138 139 class Generator(Trino.Generator): 140 """ 141 Generate queries for the Athena Trino execution engine 142 """ 143 144 PROPERTIES_LOCATION = { 145 **Trino.Generator.PROPERTIES_LOCATION, 146 exp.LocationProperty: exp.Properties.Location.POST_WITH, 147 } 148 149 TRANSFORMS = { 150 **Trino.Generator.TRANSFORMS, 151 exp.FileFormatProperty: lambda self, e: f"format={self.sql(e, 'this')}", 152 exp.PartitionedByProperty: _partitioned_by_property_sql, 153 exp.LocationProperty: _location_property_sql, 154 } 155 156 def __init__(self, *args, **kwargs): 157 super().__init__(*args, **kwargs) 158 159 hive_kwargs = {**kwargs, "dialect": "hive"} 160 161 self._hive_generator = Athena._HiveGenerator(*args, **hive_kwargs) 162 163 def generate(self, expression: exp.Expression, copy: bool = True) -> str: 164 if _generate_as_hive(expression): 165 return self._hive_generator.generate(expression, copy) 166 167 return super().generate(expression, copy)
75class Athena(Trino): 76 """ 77 Over the years, it looks like AWS has taken various execution engines, bolted on AWS-specific modifications and then 78 built the Athena service around them. 79 80 Thus, Athena is not simply hosted Trino, it's more like a router that routes SQL queries to an execution engine depending 81 on the query type. 82 83 As at 2024-09-10, assuming your Athena workgroup is configured to use "Athena engine version 3", the following engines exist: 84 85 Hive: 86 - Accepts mostly the same syntax as Hadoop / Hive 87 - Uses backticks to quote identifiers 88 - Has a distinctive DDL syntax (around things like setting table properties, storage locations etc) that is different from Trino 89 - Used for *most* DDL, with some exceptions that get routed to the Trino engine instead: 90 - CREATE [EXTERNAL] TABLE (without AS SELECT) 91 - ALTER 92 - DROP 93 94 Trino: 95 - Uses double quotes to quote identifiers 96 - Used for DDL operations that involve SELECT queries, eg: 97 - CREATE VIEW / DROP VIEW 98 - CREATE TABLE... AS SELECT 99 - Used for DML operations 100 - SELECT, INSERT, UPDATE, DELETE, MERGE 101 102 The SQLGlot Athena dialect tries to identify which engine a query would be routed to and then uses the parser / generator for that engine 103 rather than trying to create a universal syntax that can handle both types. 104 """ 105 106 class Tokenizer(Trino.Tokenizer): 107 """ 108 The Tokenizer is flexible enough to tokenize queries across both the Hive and Trino engines 109 """ 110 111 IDENTIFIERS = ['"', "`"] 112 KEYWORDS = { 113 **Hive.Tokenizer.KEYWORDS, 114 **Trino.Tokenizer.KEYWORDS, 115 "UNLOAD": TokenType.COMMAND, 116 } 117 118 class Parser(Trino.Parser): 119 """ 120 Parse queries for the Athena Trino execution engine 121 """ 122 123 STATEMENT_PARSERS = { 124 **Trino.Parser.STATEMENT_PARSERS, 125 TokenType.USING: lambda self: self._parse_as_command(self._prev), 126 } 127 128 class _HiveGenerator(Hive.Generator): 129 def alter_sql(self, expression: exp.Alter) -> str: 130 # package any ALTER TABLE ADD actions into a Schema object 131 # so it gets generated as `ALTER TABLE .. ADD COLUMNS(...)` 132 # instead of `ALTER TABLE ... ADD COLUMN` which is invalid syntax on Athena 133 if isinstance(expression, exp.Alter) and expression.kind == "TABLE": 134 if expression.actions and isinstance(expression.actions[0], exp.ColumnDef): 135 new_actions = exp.Schema(expressions=expression.actions) 136 expression.set("actions", [new_actions]) 137 138 return super().alter_sql(expression) 139 140 class Generator(Trino.Generator): 141 """ 142 Generate queries for the Athena Trino execution engine 143 """ 144 145 PROPERTIES_LOCATION = { 146 **Trino.Generator.PROPERTIES_LOCATION, 147 exp.LocationProperty: exp.Properties.Location.POST_WITH, 148 } 149 150 TRANSFORMS = { 151 **Trino.Generator.TRANSFORMS, 152 exp.FileFormatProperty: lambda self, e: f"format={self.sql(e, 'this')}", 153 exp.PartitionedByProperty: _partitioned_by_property_sql, 154 exp.LocationProperty: _location_property_sql, 155 } 156 157 def __init__(self, *args, **kwargs): 158 super().__init__(*args, **kwargs) 159 160 hive_kwargs = {**kwargs, "dialect": "hive"} 161 162 self._hive_generator = Athena._HiveGenerator(*args, **hive_kwargs) 163 164 def generate(self, expression: exp.Expression, copy: bool = True) -> str: 165 if _generate_as_hive(expression): 166 return self._hive_generator.generate(expression, copy) 167 168 return super().generate(expression, copy)
Over the years, it looks like AWS has taken various execution engines, bolted on AWS-specific modifications and then built the Athena service around them.
Thus, Athena is not simply hosted Trino, it's more like a router that routes SQL queries to an execution engine depending on the query type.
As at 2024-09-10, assuming your Athena workgroup is configured to use "Athena engine version 3", the following engines exist:
Hive:
- Accepts mostly the same syntax as Hadoop / Hive
- Uses backticks to quote identifiers
- Has a distinctive DDL syntax (around things like setting table properties, storage locations etc) that is different from Trino
- Used for most DDL, with some exceptions that get routed to the Trino engine instead:
- CREATE [EXTERNAL] TABLE (without AS SELECT)
- ALTER
- DROP
Trino:
- Uses double quotes to quote identifiers
- Used for DDL operations that involve SELECT queries, eg:
- CREATE VIEW / DROP VIEW
- CREATE TABLE... AS SELECT
- Used for DML operations
- SELECT, INSERT, UPDATE, DELETE, MERGE
The SQLGlot Athena dialect tries to identify which engine a query would be routed to and then uses the parser / generator for that engine rather than trying to create a universal syntax that can handle both types.
Inherited Members
106 class Tokenizer(Trino.Tokenizer): 107 """ 108 The Tokenizer is flexible enough to tokenize queries across both the Hive and Trino engines 109 """ 110 111 IDENTIFIERS = ['"', "`"] 112 KEYWORDS = { 113 **Hive.Tokenizer.KEYWORDS, 114 **Trino.Tokenizer.KEYWORDS, 115 "UNLOAD": TokenType.COMMAND, 116 }
The Tokenizer is flexible enough to tokenize queries across both the Hive and Trino engines
Inherited Members
- sqlglot.tokens.Tokenizer
- Tokenizer
- SINGLE_TOKENS
- BIT_STRINGS
- BYTE_STRINGS
- RAW_STRINGS
- HEREDOC_STRINGS
- QUOTES
- STRING_ESCAPES
- VAR_SINGLE_TOKENS
- IDENTIFIER_ESCAPES
- HEREDOC_TAG_IS_IDENTIFIER
- HEREDOC_STRING_ALTERNATIVE
- STRING_ESCAPES_ALLOWED_IN_RAW_STRINGS
- NESTED_COMMENTS
- HINT_START
- TOKENS_PRECEDING_HINT
- WHITE_SPACE
- COMMANDS
- COMMAND_PREFIX_TOKENS
- NUMERIC_LITERALS
- COMMENTS
- dialect
- use_rs_tokenizer
- reset
- tokenize
- tokenize_rs
- size
- sql
- tokens
118 class Parser(Trino.Parser): 119 """ 120 Parse queries for the Athena Trino execution engine 121 """ 122 123 STATEMENT_PARSERS = { 124 **Trino.Parser.STATEMENT_PARSERS, 125 TokenType.USING: lambda self: self._parse_as_command(self._prev), 126 }
Parse queries for the Athena Trino execution engine
Inherited Members
- sqlglot.parser.Parser
- Parser
- NO_PAREN_FUNCTIONS
- STRUCT_TYPE_TOKENS
- NESTED_TYPE_TOKENS
- ENUM_TYPE_TOKENS
- AGGREGATE_TYPE_TOKENS
- TYPE_TOKENS
- SIGNED_TO_UNSIGNED_TYPE_TOKEN
- SUBQUERY_PREDICATES
- RESERVED_TOKENS
- DB_CREATABLES
- CREATABLES
- ALTERABLES
- ALIAS_TOKENS
- ARRAY_CONSTRUCTORS
- COMMENT_TABLE_ALIAS_TOKENS
- UPDATE_ALIAS_TOKENS
- TRIM_TYPES
- FUNC_TOKENS
- CONJUNCTION
- ASSIGNMENT
- DISJUNCTION
- EQUALITY
- COMPARISON
- BITWISE
- TERM
- FACTOR
- EXPONENT
- TIMES
- TIMESTAMPS
- SET_OPERATIONS
- JOIN_METHODS
- JOIN_SIDES
- JOIN_KINDS
- JOIN_HINTS
- LAMBDAS
- COLUMN_OPERATORS
- EXPRESSION_PARSERS
- UNARY_PARSERS
- STRING_PARSERS
- NUMERIC_PARSERS
- PRIMARY_PARSERS
- PLACEHOLDER_PARSERS
- RANGE_PARSERS
- PROPERTY_PARSERS
- CONSTRAINT_PARSERS
- ALTER_PARSERS
- ALTER_ALTER_PARSERS
- SCHEMA_UNNAMED_CONSTRAINTS
- NO_PAREN_FUNCTION_PARSERS
- INVALID_FUNC_NAME_TOKENS
- FUNCTIONS_WITH_ALIASED_ARGS
- KEY_VALUE_DEFINITIONS
- QUERY_MODIFIER_PARSERS
- SET_PARSERS
- SHOW_PARSERS
- TYPE_LITERAL_PARSERS
- TYPE_CONVERTERS
- DDL_SELECT_TOKENS
- PRE_VOLATILE_TOKENS
- TRANSACTION_KIND
- TRANSACTION_CHARACTERISTICS
- CONFLICT_ACTIONS
- CREATE_SEQUENCE
- ISOLATED_LOADING_OPTIONS
- USABLES
- CAST_ACTIONS
- SCHEMA_BINDING_OPTIONS
- PROCEDURE_OPTIONS
- EXECUTE_AS_OPTIONS
- KEY_CONSTRAINT_OPTIONS
- INSERT_ALTERNATIVES
- CLONE_KEYWORDS
- HISTORICAL_DATA_PREFIX
- HISTORICAL_DATA_KIND
- OPCLASS_FOLLOW_KEYWORDS
- OPTYPE_FOLLOW_TOKENS
- TABLE_INDEX_HINT_TOKENS
- VIEW_ATTRIBUTES
- WINDOW_ALIAS_TOKENS
- WINDOW_BEFORE_PAREN_TOKENS
- WINDOW_SIDES
- JSON_KEY_VALUE_SEPARATOR_TOKENS
- FETCH_TOKENS
- ADD_CONSTRAINT_TOKENS
- DISTINCT_TOKENS
- NULL_TOKENS
- UNNEST_OFFSET_ALIAS_TOKENS
- SELECT_START_TOKENS
- COPY_INTO_VARLEN_OPTIONS
- IS_JSON_PREDICATE_KIND
- ODBC_DATETIME_LITERALS
- ON_CONDITION_TOKENS
- PRIVILEGE_FOLLOW_TOKENS
- DESCRIBE_STYLES
- ANALYZE_STYLES
- ANALYZE_EXPRESSION_PARSERS
- PARTITION_KEYWORDS
- AMBIGUOUS_ALIAS_TOKENS
- OPERATION_MODIFIERS
- RECURSIVE_CTE_SEARCH_KIND
- MODIFIABLES
- STRICT_CAST
- PREFIXED_PIVOT_COLUMNS
- IDENTIFY_PIVOT_STRINGS
- LOG_DEFAULTS_TO_LN
- ALTER_TABLE_ADD_REQUIRED_FOR_EACH_COLUMN
- TABLESAMPLE_CSV
- DEFAULT_SAMPLING_METHOD
- SET_REQUIRES_ASSIGNMENT_DELIMITER
- TRIM_PATTERN_FIRST
- STRING_ALIASES
- MODIFIERS_ATTACHED_TO_SET_OP
- SET_OP_MODIFIERS
- NO_PAREN_IF_COMMANDS
- JSON_ARROWS_REQUIRE_JSON_TYPE
- COLON_IS_VARIANT_EXTRACT
- SUPPORTS_IMPLICIT_UNNEST
- INTERVAL_SPANS
- SUPPORTS_PARTITION_SELECTION
- WRAPPED_TRANSFORM_COLUMN_CONSTRAINT
- OPTIONAL_ALIAS_TOKEN_CTE
- error_level
- error_message_context
- max_errors
- dialect
- reset
- parse
- parse_into
- check_errors
- raise_error
- expression
- validate_expression
- errors
- sql
140 class Generator(Trino.Generator): 141 """ 142 Generate queries for the Athena Trino execution engine 143 """ 144 145 PROPERTIES_LOCATION = { 146 **Trino.Generator.PROPERTIES_LOCATION, 147 exp.LocationProperty: exp.Properties.Location.POST_WITH, 148 } 149 150 TRANSFORMS = { 151 **Trino.Generator.TRANSFORMS, 152 exp.FileFormatProperty: lambda self, e: f"format={self.sql(e, 'this')}", 153 exp.PartitionedByProperty: _partitioned_by_property_sql, 154 exp.LocationProperty: _location_property_sql, 155 } 156 157 def __init__(self, *args, **kwargs): 158 super().__init__(*args, **kwargs) 159 160 hive_kwargs = {**kwargs, "dialect": "hive"} 161 162 self._hive_generator = Athena._HiveGenerator(*args, **hive_kwargs) 163 164 def generate(self, expression: exp.Expression, copy: bool = True) -> str: 165 if _generate_as_hive(expression): 166 return self._hive_generator.generate(expression, copy) 167 168 return super().generate(expression, copy)
Generate queries for the Athena Trino execution engine
164 def generate(self, expression: exp.Expression, copy: bool = True) -> str: 165 if _generate_as_hive(expression): 166 return self._hive_generator.generate(expression, copy) 167 168 return super().generate(expression, copy)
Generates the SQL string corresponding to the given syntax tree.
Arguments:
- expression: The syntax tree.
- copy: Whether to copy the expression. The generator performs mutations so it is safer to copy.
Returns:
The SQL string corresponding to
expression
.
Inherited Members
- sqlglot.dialects.presto.Presto.Generator
- INTERVAL_ALLOWS_PLURAL_FORM
- JOIN_HINTS
- TABLE_HINTS
- QUERY_HINTS
- IS_BOOL_ALLOWED
- TZ_TO_WITH_TIME_ZONE
- NVL2_SUPPORTED
- STRUCT_DELIMITER
- LIMIT_ONLY_LITERALS
- SUPPORTS_SINGLE_ARG_CONCAT
- LIKE_PROPERTY_INSIDE_SCHEMA
- MULTI_ARG_DISTINCT
- SUPPORTS_TO_NUMBER
- HEX_FUNC
- PARSE_JSON_NAME
- PAD_FILL_PATTERN_IS_REQUIRED
- EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE
- SUPPORTS_MEDIAN
- ARRAY_SIZE_NAME
- TYPE_MAPPING
- RESERVED_KEYWORDS
- md5_sql
- strtounix_sql
- bracket_sql
- struct_sql
- interval_sql
- transaction_sql
- offset_limit_modifiers
- create_sql
- delete_sql
- groupconcat_sql
- sqlglot.generator.Generator
- NULL_ORDERING_SUPPORTED
- IGNORE_NULLS_IN_FUNC
- LOCKING_READS_SUPPORTED
- WRAP_DERIVED_VALUES
- CREATE_FUNCTION_RETURN_AS
- MATCHED_BY_SOURCE
- SINGLE_STRING_INTERVAL
- LIMIT_FETCH
- RENAME_TABLE_WITH_DB
- GROUPINGS_SEP
- INDEX_ON
- QUERY_HINT_SEP
- DUPLICATE_KEY_UPDATE_WITH_SET
- LIMIT_IS_TOP
- RETURNING_END
- EXTRACT_ALLOWS_QUOTES
- VALUES_AS_TABLE
- ALTER_TABLE_INCLUDE_COLUMN_KEYWORD
- UNNEST_WITH_ORDINALITY
- AGGREGATE_FILTER_SUPPORTED
- SEMI_ANTI_JOIN_WITH_SIDE
- COMPUTED_COLUMN_WITH_TYPE
- SUPPORTS_TABLE_COPY
- TABLESAMPLE_REQUIRES_PARENS
- TABLESAMPLE_SIZE_IS_ROWS
- TABLESAMPLE_KEYWORDS
- TABLESAMPLE_WITH_METHOD
- TABLESAMPLE_SEED_KEYWORD
- COLLATE_IS_FUNC
- DATA_TYPE_SPECIFIERS_ALLOWED
- ENSURE_BOOLS
- CTE_RECURSIVE_KEYWORD_REQUIRED
- LAST_DAY_SUPPORTS_DATE_PART
- SUPPORTS_TABLE_ALIAS_COLUMNS
- UNPIVOT_ALIASES_ARE_IDENTIFIERS
- JSON_KEY_VALUE_PAIR_SEP
- INSERT_OVERWRITE
- SUPPORTS_SELECT_INTO
- SUPPORTS_UNLOGGED_TABLES
- SUPPORTS_CREATE_TABLE_LIKE
- JSON_TYPE_REQUIRED_FOR_EXTRACTION
- JSON_PATH_BRACKETED_KEY_SUPPORTED
- JSON_PATH_SINGLE_QUOTE_ESCAPE
- CAN_IMPLEMENT_ARRAY_ANY
- SET_OP_MODIFIERS
- COPY_PARAMS_ARE_WRAPPED
- COPY_PARAMS_EQ_REQUIRED
- COPY_HAS_INTO_KEYWORD
- TRY_SUPPORTED
- SUPPORTS_UESCAPE
- STAR_EXCEPT
- WITH_PROPERTIES_PREFIX
- QUOTE_JSON_PATH
- SUPPORTS_EXPLODING_PROJECTIONS
- ARRAY_CONCAT_IS_VAR_LEN
- SUPPORTS_CONVERT_TIMEZONE
- SUPPORTS_UNIX_SECONDS
- ALTER_SET_TYPE
- ARRAY_SIZE_DIM_REQUIRED
- TIME_PART_SINGULARS
- TOKEN_MAPPING
- PARAMETER_TOKEN
- NAMED_PLACEHOLDER_TOKEN
- EXPRESSION_PRECEDES_PROPERTIES_CREATABLES
- WITH_SEPARATED_COMMENTS
- EXCLUDE_COMMENTS
- UNWRAPPED_INTERVAL_VALUES
- PARAMETERIZABLE_TEXT_TYPES
- EXPRESSIONS_WITHOUT_NESTED_CTES
- SENTINEL_LINE_BREAK
- pretty
- identify
- normalize
- pad
- unsupported_level
- max_unsupported
- leading_comma
- max_text_width
- comments
- dialect
- normalize_functions
- unsupported_messages
- preprocess
- unsupported
- sep
- seg
- pad_comment
- maybe_comment
- wrap
- no_identify
- normalize_func
- indent
- sql
- uncache_sql
- cache_sql
- characterset_sql
- column_parts
- column_sql
- columnposition_sql
- columndef_sql
- columnconstraint_sql
- computedcolumnconstraint_sql
- autoincrementcolumnconstraint_sql
- compresscolumnconstraint_sql
- generatedasidentitycolumnconstraint_sql
- generatedasrowcolumnconstraint_sql
- periodforsystemtimeconstraint_sql
- notnullcolumnconstraint_sql
- transformcolumnconstraint_sql
- primarykeycolumnconstraint_sql
- uniquecolumnconstraint_sql
- createable_sql
- sequenceproperties_sql
- clone_sql
- describe_sql
- heredoc_sql
- prepend_ctes
- with_sql
- cte_sql
- tablealias_sql
- bitstring_sql
- hexstring_sql
- bytestring_sql
- unicodestring_sql
- rawstring_sql
- datatypeparam_sql
- datatype_sql
- directory_sql
- drop_sql
- set_operation
- set_operations
- fetch_sql
- limitoptions_sql
- filter_sql
- hint_sql
- indexparameters_sql
- index_sql
- identifier_sql
- hex_sql
- lowerhex_sql
- inputoutputformat_sql
- national_sql
- partition_sql
- properties_sql
- root_properties
- properties
- with_properties
- locate_properties
- property_name
- property_sql
- likeproperty_sql
- fallbackproperty_sql
- journalproperty_sql
- freespaceproperty_sql
- checksumproperty_sql
- mergeblockratioproperty_sql
- datablocksizeproperty_sql
- blockcompressionproperty_sql
- isolatedloadingproperty_sql
- partitionboundspec_sql
- partitionedofproperty_sql
- lockingproperty_sql
- withdataproperty_sql
- withsystemversioningproperty_sql
- insert_sql
- introducer_sql
- kill_sql
- pseudotype_sql
- objectidentifier_sql
- onconflict_sql
- returning_sql
- rowformatdelimitedproperty_sql
- withtablehint_sql
- indextablehint_sql
- historicaldata_sql
- table_parts
- table_sql
- tablefromrows_sql
- tablesample_sql
- pivot_sql
- version_sql
- tuple_sql
- update_sql
- values_sql
- var_sql
- into_sql
- from_sql
- groupingsets_sql
- rollup_sql
- cube_sql
- group_sql
- having_sql
- connect_sql
- prior_sql
- join_sql
- lambda_sql
- lateral_op
- lateral_sql
- limit_sql
- offset_sql
- setitem_sql
- set_sql
- pragma_sql
- lock_sql
- literal_sql
- escape_str
- loaddata_sql
- null_sql
- boolean_sql
- order_sql
- withfill_sql
- cluster_sql
- distribute_sql
- sort_sql
- ordered_sql
- matchrecognizemeasure_sql
- matchrecognize_sql
- query_modifiers
- options_modifier
- queryoption_sql
- after_limit_modifiers
- select_sql
- schema_sql
- schema_columns_sql
- star_sql
- parameter_sql
- sessionparameter_sql
- placeholder_sql
- subquery_sql
- qualify_sql
- unnest_sql
- prewhere_sql
- where_sql
- window_sql
- partition_by_sql
- windowspec_sql
- withingroup_sql
- between_sql
- bracket_offset_expressions
- all_sql
- any_sql
- exists_sql
- case_sql
- constraint_sql
- nextvaluefor_sql
- extract_sql
- trim_sql
- convert_concat_args
- concat_sql
- concatws_sql
- check_sql
- foreignkey_sql
- primarykey_sql
- if_sql
- matchagainst_sql
- jsonkeyvalue_sql
- jsonpath_sql
- json_path_part
- formatjson_sql
- jsonobject_sql
- jsonobjectagg_sql
- jsonarray_sql
- jsonarrayagg_sql
- jsoncolumndef_sql
- jsonschema_sql
- jsontable_sql
- openjsoncolumndef_sql
- openjson_sql
- in_sql
- in_unnest_op
- return_sql
- reference_sql
- anonymous_sql
- paren_sql
- neg_sql
- not_sql
- alias_sql
- pivotalias_sql
- aliases_sql
- atindex_sql
- attimezone_sql
- fromtimezone_sql
- add_sql
- and_sql
- or_sql
- xor_sql
- connector_sql
- bitwiseand_sql
- bitwiseleftshift_sql
- bitwisenot_sql
- bitwiseor_sql
- bitwiserightshift_sql
- bitwisexor_sql
- cast_sql
- currentdate_sql
- collate_sql
- command_sql
- comment_sql
- mergetreettlaction_sql
- mergetreettl_sql
- commit_sql
- rollback_sql
- altercolumn_sql
- alterindex_sql
- alterdiststyle_sql
- altersortkey_sql
- alterrename_sql
- renamecolumn_sql
- alterset_sql
- alter_sql
- add_column_sql
- droppartition_sql
- addconstraint_sql
- distinct_sql
- ignorenulls_sql
- respectnulls_sql
- havingmax_sql
- intdiv_sql
- dpipe_sql
- div_sql
- safedivide_sql
- overlaps_sql
- distance_sql
- dot_sql
- eq_sql
- propertyeq_sql
- escape_sql
- glob_sql
- gt_sql
- gte_sql
- ilike_sql
- ilikeany_sql
- is_sql
- like_sql
- likeany_sql
- similarto_sql
- lt_sql
- lte_sql
- mod_sql
- mul_sql
- neq_sql
- nullsafeeq_sql
- nullsafeneq_sql
- slice_sql
- sub_sql
- trycast_sql
- jsoncast_sql
- try_sql
- log_sql
- use_sql
- binary
- ceil_floor
- function_fallback_sql
- func
- format_args
- too_wide
- format_time
- expressions
- op_expressions
- naked_property
- tag_sql
- token_sql
- userdefinedfunction_sql
- joinhint_sql
- kwarg_sql
- when_sql
- whens_sql
- merge_sql
- tochar_sql
- tonumber_sql
- dictproperty_sql
- dictrange_sql
- dictsubproperty_sql
- duplicatekeyproperty_sql
- uniquekeyproperty_sql
- distributedbyproperty_sql
- oncluster_sql
- clusteredbyproperty_sql
- anyvalue_sql
- querytransform_sql
- indexconstraintoption_sql
- checkcolumnconstraint_sql
- indexcolumnconstraint_sql
- nvl2_sql
- comprehension_sql
- columnprefix_sql
- opclass_sql
- predict_sql
- forin_sql
- refresh_sql
- toarray_sql
- tsordstotime_sql
- tsordstotimestamp_sql
- tsordstodatetime_sql
- tsordstodate_sql
- unixdate_sql
- lastday_sql
- dateadd_sql
- arrayany_sql
- partitionrange_sql
- truncatetable_sql
- convert_sql
- copyparameter_sql
- credentials_sql
- copy_sql
- semicolon_sql
- datadeletionproperty_sql
- maskingpolicycolumnconstraint_sql
- gapfill_sql
- scope_resolution
- scoperesolution_sql
- parsejson_sql
- rand_sql
- changes_sql
- pad_sql
- summarize_sql
- explodinggenerateseries_sql
- arrayconcat_sql
- converttimezone_sql
- json_sql
- jsonvalue_sql
- conditionalinsert_sql
- multitableinserts_sql
- oncondition_sql
- jsonextractquote_sql
- jsonexists_sql
- arrayagg_sql
- apply_sql
- grant_sql
- grantprivilege_sql
- grantprincipal_sql
- columns_sql
- overlay_sql
- todouble_sql
- string_sql
- median_sql
- overflowtruncatebehavior_sql
- unixseconds_sql
- arraysize_sql
- attach_sql
- detach_sql
- attachoption_sql
- featuresattime_sql
- watermarkcolumnconstraint_sql
- encodeproperty_sql
- includeproperty_sql
- xmlelement_sql
- partitionbyrangeproperty_sql
- partitionbyrangepropertydynamic_sql
- unpivotcolumns_sql
- analyzesample_sql
- analyzestatistics_sql
- analyzehistogram_sql
- analyzedelete_sql
- analyzelistchainedrows_sql
- analyzevalidate_sql
- analyze_sql
- xmltable_sql
- xmlnamespace_sql
- export_sql
- declare_sql
- declareitem_sql
- recursivewithsearch_sql
- parameterizedagg_sql
- anonymousaggfunc_sql
- combinedaggfunc_sql
- combinedparameterizedagg_sql
- show_sql
- put_sql