Edit on GitHub

sqlglot.dialects.fabric

  1from __future__ import annotations
  2
  3
  4from sqlglot import exp
  5from sqlglot.dialects.dialect import NormalizationStrategy
  6from sqlglot.dialects.tsql import TSQL
  7from sqlglot.tokens import TokenType
  8
  9
 10def _cap_data_type_precision(expression: exp.DataType, max_precision: int = 6) -> exp.DataType:
 11    """
 12    Cap the precision of to a maximum of `max_precision` digits.
 13    If no precision is specified, default to `max_precision`.
 14    """
 15
 16    precision_param = expression.find(exp.DataTypeParam)
 17
 18    if precision_param and precision_param.this.is_int:
 19        current_precision = precision_param.this.to_py()
 20        target_precision = min(current_precision, max_precision)
 21    else:
 22        target_precision = max_precision
 23
 24    return exp.DataType(
 25        this=expression.this,
 26        expressions=[exp.DataTypeParam(this=exp.Literal.number(target_precision))],
 27    )
 28
 29
 30class Fabric(TSQL):
 31    """
 32    Microsoft Fabric Data Warehouse dialect that inherits from T-SQL.
 33
 34    Microsoft Fabric is a cloud-based analytics platform that provides a unified
 35    data warehouse experience. While it shares much of T-SQL's syntax, it has
 36    specific differences and limitations that this dialect addresses.
 37
 38    Key differences from T-SQL:
 39    - Case-sensitive identifiers (unlike T-SQL which is case-insensitive)
 40    - Limited data type support with mappings to supported alternatives
 41    - Temporal types (DATETIME2, DATETIMEOFFSET, TIME) limited to 6 digits precision
 42    - Certain legacy types (MONEY, SMALLMONEY, etc.) are not supported
 43    - Unicode types (NCHAR, NVARCHAR) are mapped to non-unicode equivalents
 44
 45    References:
 46    - Data Types: https://learn.microsoft.com/en-us/fabric/data-warehouse/data-types
 47    - T-SQL Surface Area: https://learn.microsoft.com/en-us/fabric/data-warehouse/tsql-surface-area
 48    """
 49
 50    # Fabric is case-sensitive unlike T-SQL which is case-insensitive
 51    NORMALIZATION_STRATEGY = NormalizationStrategy.CASE_SENSITIVE
 52
 53    class Tokenizer(TSQL.Tokenizer):
 54        # Override T-SQL tokenizer to handle TIMESTAMP differently
 55        # In T-SQL, TIMESTAMP is a synonym for ROWVERSION, but in Fabric we want it to be a datetime type
 56        # Also add UTINYINT keyword mapping since T-SQL doesn't have it
 57        KEYWORDS = {
 58            **TSQL.Tokenizer.KEYWORDS,
 59            "TIMESTAMP": TokenType.TIMESTAMP,
 60            "UTINYINT": TokenType.UTINYINT,
 61        }
 62
 63    class Generator(TSQL.Generator):
 64        # Fabric-specific type mappings - override T-SQL types that aren't supported
 65        # Reference: https://learn.microsoft.com/en-us/fabric/data-warehouse/data-types
 66        TYPE_MAPPING = {
 67            **TSQL.Generator.TYPE_MAPPING,
 68            exp.DataType.Type.DATETIME: "DATETIME2",
 69            exp.DataType.Type.DECIMAL: "DECIMAL",
 70            exp.DataType.Type.IMAGE: "VARBINARY",
 71            exp.DataType.Type.INT: "INT",
 72            exp.DataType.Type.JSON: "VARCHAR",
 73            exp.DataType.Type.MONEY: "DECIMAL",
 74            exp.DataType.Type.NCHAR: "CHAR",
 75            exp.DataType.Type.NVARCHAR: "VARCHAR",
 76            exp.DataType.Type.ROWVERSION: "ROWVERSION",
 77            exp.DataType.Type.SMALLDATETIME: "DATETIME2",
 78            exp.DataType.Type.SMALLMONEY: "DECIMAL",
 79            exp.DataType.Type.TIMESTAMP: "DATETIME2",
 80            exp.DataType.Type.TIMESTAMPNTZ: "DATETIME2",
 81            exp.DataType.Type.TIMESTAMPTZ: "DATETIME2",
 82            exp.DataType.Type.TINYINT: "SMALLINT",
 83            exp.DataType.Type.UTINYINT: "SMALLINT",
 84            exp.DataType.Type.UUID: "VARBINARY(MAX)",
 85            exp.DataType.Type.XML: "VARCHAR",
 86        }
 87
 88        def datatype_sql(self, expression: exp.DataType) -> str:
 89            # Check if this is a temporal type that needs precision handling. Fabric limits temporal
 90            # types to max 6 digits precision. When no precision is specified, we default to 6 digits.
 91            if (
 92                expression.is_type(*exp.DataType.TEMPORAL_TYPES)
 93                and expression.this != exp.DataType.Type.DATE
 94            ):
 95                # Create a new expression with the capped precision
 96                expression = _cap_data_type_precision(expression)
 97
 98            return super().datatype_sql(expression)
 99
100        def cast_sql(self, expression: exp.Cast, safe_prefix: str | None = None) -> str:
101            # Cast to DATETIMEOFFSET if inside an AT TIME ZONE expression
102            # https://learn.microsoft.com/en-us/sql/t-sql/data-types/datetimeoffset-transact-sql#microsoft-fabric-support
103            if expression.is_type(exp.DataType.Type.TIMESTAMPTZ):
104                at_time_zone = expression.find_ancestor(exp.AtTimeZone, exp.Select)
105
106                # Return normal cast, if the expression is not in an AT TIME ZONE context
107                if not isinstance(at_time_zone, exp.AtTimeZone):
108                    return super().cast_sql(expression, safe_prefix)
109
110                # Get the precision from the original TIMESTAMPTZ cast and cap it to 6
111                capped_data_type = _cap_data_type_precision(expression.to, max_precision=6)
112                precision = capped_data_type.find(exp.DataTypeParam)
113                precision_value = (
114                    precision.this.to_py() if precision and precision.this.is_int else 6
115                )
116
117                # Do the cast explicitly to bypass sqlglot's default handling
118                datetimeoffset = f"CAST({expression.this} AS DATETIMEOFFSET({precision_value}))"
119
120                return self.sql(datetimeoffset)
121
122            return super().cast_sql(expression, safe_prefix)
123
124        def attimezone_sql(self, expression: exp.AtTimeZone) -> str:
125            # Wrap the AT TIME ZONE expression in a cast to DATETIME2 if it contains a TIMESTAMPTZ
126            ## https://learn.microsoft.com/en-us/sql/t-sql/data-types/datetimeoffset-transact-sql#microsoft-fabric-support
127            timestamptz_cast = expression.find(exp.Cast)
128            if timestamptz_cast and timestamptz_cast.to.is_type(exp.DataType.Type.TIMESTAMPTZ):
129                # Get the precision from the original TIMESTAMPTZ cast and cap it to 6
130                data_type = timestamptz_cast.to
131                capped_data_type = _cap_data_type_precision(data_type, max_precision=6)
132                precision_param = capped_data_type.find(exp.DataTypeParam)
133                precision = precision_param.this.to_py() if precision_param else 6
134
135                # Generate the AT TIME ZONE expression (which will handle the inner cast conversion)
136                at_time_zone_sql = super().attimezone_sql(expression)
137
138                # Wrap it in an outer cast to DATETIME2
139                return f"CAST({at_time_zone_sql} AS DATETIME2({precision}))"
140
141            return super().attimezone_sql(expression)
142
143        def unixtotime_sql(self, expression: exp.UnixToTime) -> str:
144            scale = expression.args.get("scale")
145            timestamp = expression.this
146
147            if scale not in (None, exp.UnixToTime.SECONDS):
148                self.unsupported(f"UnixToTime scale {scale} is not supported by Fabric")
149                return ""
150
151            # Convert unix timestamp (seconds) to microseconds and round to avoid decimals
152            microseconds = timestamp * exp.Literal.number("1e6")
153            rounded = exp.func("round", microseconds, 0)
154            rounded_ms_as_bigint = exp.cast(rounded, exp.DataType.Type.BIGINT)
155
156            # Create the base datetime as '1970-01-01' cast to DATETIME2(6)
157            epoch_start = exp.cast("'1970-01-01'", "datetime2(6)", dialect="fabric")
158
159            dateadd = exp.DateAdd(
160                this=epoch_start,
161                expression=rounded_ms_as_bigint,
162                unit=exp.Literal.string("MICROSECONDS"),
163            )
164            return self.sql(dateadd)
class Fabric(sqlglot.dialects.tsql.TSQL):
 31class Fabric(TSQL):
 32    """
 33    Microsoft Fabric Data Warehouse dialect that inherits from T-SQL.
 34
 35    Microsoft Fabric is a cloud-based analytics platform that provides a unified
 36    data warehouse experience. While it shares much of T-SQL's syntax, it has
 37    specific differences and limitations that this dialect addresses.
 38
 39    Key differences from T-SQL:
 40    - Case-sensitive identifiers (unlike T-SQL which is case-insensitive)
 41    - Limited data type support with mappings to supported alternatives
 42    - Temporal types (DATETIME2, DATETIMEOFFSET, TIME) limited to 6 digits precision
 43    - Certain legacy types (MONEY, SMALLMONEY, etc.) are not supported
 44    - Unicode types (NCHAR, NVARCHAR) are mapped to non-unicode equivalents
 45
 46    References:
 47    - Data Types: https://learn.microsoft.com/en-us/fabric/data-warehouse/data-types
 48    - T-SQL Surface Area: https://learn.microsoft.com/en-us/fabric/data-warehouse/tsql-surface-area
 49    """
 50
 51    # Fabric is case-sensitive unlike T-SQL which is case-insensitive
 52    NORMALIZATION_STRATEGY = NormalizationStrategy.CASE_SENSITIVE
 53
 54    class Tokenizer(TSQL.Tokenizer):
 55        # Override T-SQL tokenizer to handle TIMESTAMP differently
 56        # In T-SQL, TIMESTAMP is a synonym for ROWVERSION, but in Fabric we want it to be a datetime type
 57        # Also add UTINYINT keyword mapping since T-SQL doesn't have it
 58        KEYWORDS = {
 59            **TSQL.Tokenizer.KEYWORDS,
 60            "TIMESTAMP": TokenType.TIMESTAMP,
 61            "UTINYINT": TokenType.UTINYINT,
 62        }
 63
 64    class Generator(TSQL.Generator):
 65        # Fabric-specific type mappings - override T-SQL types that aren't supported
 66        # Reference: https://learn.microsoft.com/en-us/fabric/data-warehouse/data-types
 67        TYPE_MAPPING = {
 68            **TSQL.Generator.TYPE_MAPPING,
 69            exp.DataType.Type.DATETIME: "DATETIME2",
 70            exp.DataType.Type.DECIMAL: "DECIMAL",
 71            exp.DataType.Type.IMAGE: "VARBINARY",
 72            exp.DataType.Type.INT: "INT",
 73            exp.DataType.Type.JSON: "VARCHAR",
 74            exp.DataType.Type.MONEY: "DECIMAL",
 75            exp.DataType.Type.NCHAR: "CHAR",
 76            exp.DataType.Type.NVARCHAR: "VARCHAR",
 77            exp.DataType.Type.ROWVERSION: "ROWVERSION",
 78            exp.DataType.Type.SMALLDATETIME: "DATETIME2",
 79            exp.DataType.Type.SMALLMONEY: "DECIMAL",
 80            exp.DataType.Type.TIMESTAMP: "DATETIME2",
 81            exp.DataType.Type.TIMESTAMPNTZ: "DATETIME2",
 82            exp.DataType.Type.TIMESTAMPTZ: "DATETIME2",
 83            exp.DataType.Type.TINYINT: "SMALLINT",
 84            exp.DataType.Type.UTINYINT: "SMALLINT",
 85            exp.DataType.Type.UUID: "VARBINARY(MAX)",
 86            exp.DataType.Type.XML: "VARCHAR",
 87        }
 88
 89        def datatype_sql(self, expression: exp.DataType) -> str:
 90            # Check if this is a temporal type that needs precision handling. Fabric limits temporal
 91            # types to max 6 digits precision. When no precision is specified, we default to 6 digits.
 92            if (
 93                expression.is_type(*exp.DataType.TEMPORAL_TYPES)
 94                and expression.this != exp.DataType.Type.DATE
 95            ):
 96                # Create a new expression with the capped precision
 97                expression = _cap_data_type_precision(expression)
 98
 99            return super().datatype_sql(expression)
100
101        def cast_sql(self, expression: exp.Cast, safe_prefix: str | None = None) -> str:
102            # Cast to DATETIMEOFFSET if inside an AT TIME ZONE expression
103            # https://learn.microsoft.com/en-us/sql/t-sql/data-types/datetimeoffset-transact-sql#microsoft-fabric-support
104            if expression.is_type(exp.DataType.Type.TIMESTAMPTZ):
105                at_time_zone = expression.find_ancestor(exp.AtTimeZone, exp.Select)
106
107                # Return normal cast, if the expression is not in an AT TIME ZONE context
108                if not isinstance(at_time_zone, exp.AtTimeZone):
109                    return super().cast_sql(expression, safe_prefix)
110
111                # Get the precision from the original TIMESTAMPTZ cast and cap it to 6
112                capped_data_type = _cap_data_type_precision(expression.to, max_precision=6)
113                precision = capped_data_type.find(exp.DataTypeParam)
114                precision_value = (
115                    precision.this.to_py() if precision and precision.this.is_int else 6
116                )
117
118                # Do the cast explicitly to bypass sqlglot's default handling
119                datetimeoffset = f"CAST({expression.this} AS DATETIMEOFFSET({precision_value}))"
120
121                return self.sql(datetimeoffset)
122
123            return super().cast_sql(expression, safe_prefix)
124
125        def attimezone_sql(self, expression: exp.AtTimeZone) -> str:
126            # Wrap the AT TIME ZONE expression in a cast to DATETIME2 if it contains a TIMESTAMPTZ
127            ## https://learn.microsoft.com/en-us/sql/t-sql/data-types/datetimeoffset-transact-sql#microsoft-fabric-support
128            timestamptz_cast = expression.find(exp.Cast)
129            if timestamptz_cast and timestamptz_cast.to.is_type(exp.DataType.Type.TIMESTAMPTZ):
130                # Get the precision from the original TIMESTAMPTZ cast and cap it to 6
131                data_type = timestamptz_cast.to
132                capped_data_type = _cap_data_type_precision(data_type, max_precision=6)
133                precision_param = capped_data_type.find(exp.DataTypeParam)
134                precision = precision_param.this.to_py() if precision_param else 6
135
136                # Generate the AT TIME ZONE expression (which will handle the inner cast conversion)
137                at_time_zone_sql = super().attimezone_sql(expression)
138
139                # Wrap it in an outer cast to DATETIME2
140                return f"CAST({at_time_zone_sql} AS DATETIME2({precision}))"
141
142            return super().attimezone_sql(expression)
143
144        def unixtotime_sql(self, expression: exp.UnixToTime) -> str:
145            scale = expression.args.get("scale")
146            timestamp = expression.this
147
148            if scale not in (None, exp.UnixToTime.SECONDS):
149                self.unsupported(f"UnixToTime scale {scale} is not supported by Fabric")
150                return ""
151
152            # Convert unix timestamp (seconds) to microseconds and round to avoid decimals
153            microseconds = timestamp * exp.Literal.number("1e6")
154            rounded = exp.func("round", microseconds, 0)
155            rounded_ms_as_bigint = exp.cast(rounded, exp.DataType.Type.BIGINT)
156
157            # Create the base datetime as '1970-01-01' cast to DATETIME2(6)
158            epoch_start = exp.cast("'1970-01-01'", "datetime2(6)", dialect="fabric")
159
160            dateadd = exp.DateAdd(
161                this=epoch_start,
162                expression=rounded_ms_as_bigint,
163                unit=exp.Literal.string("MICROSECONDS"),
164            )
165            return self.sql(dateadd)

Microsoft Fabric Data Warehouse dialect that inherits from T-SQL.

Microsoft Fabric is a cloud-based analytics platform that provides a unified data warehouse experience. While it shares much of T-SQL's syntax, it has specific differences and limitations that this dialect addresses.

Key differences from T-SQL:

  • Case-sensitive identifiers (unlike T-SQL which is case-insensitive)
  • Limited data type support with mappings to supported alternatives
  • Temporal types (DATETIME2, DATETIMEOFFSET, TIME) limited to 6 digits precision
  • Certain legacy types (MONEY, SMALLMONEY, etc.) are not supported
  • Unicode types (NCHAR, NVARCHAR) are mapped to non-unicode equivalents

References:

NORMALIZATION_STRATEGY = <NormalizationStrategy.CASE_SENSITIVE: 'CASE_SENSITIVE'>

Specifies the strategy according to which identifiers should be normalized.

SUPPORTS_COLUMN_JOIN_MARKS = False

Whether the old-style outer join (+) syntax is supported.

tokenizer_class = <class 'Fabric.Tokenizer'>
jsonpath_tokenizer_class = <class 'sqlglot.tokens.JSONPathTokenizer'>
parser_class = <class 'sqlglot.parser.Parser'>
generator_class = <class 'Fabric.Generator'>
TIME_TRIE: Dict = {'y': {'e': {'a': {'r': {0: True}}}, 0: True, 'y': {'y': {'y': {0: True}}, 0: True}}, 'd': {'a': {'y': {'o': {'f': {'y': {'e': {'a': {'r': {0: True}}}}}}, 0: True}}, 'y': {0: True}, 'w': {0: True}, 'd': {'d': {'d': {0: True}}, 0: True}, 0: True}, 'w': {'e': {'e': {'k': {0: True, 'd': {'a': {'y': {0: True}}}}}}, 'w': {0: True}, 'k': {0: True}}, 'h': {'o': {'u': {'r': {0: True}}}, 'h': {0: True}, 0: True}, 'm': {'i': {'n': {'u': {'t': {'e': {0: True}}}}, 0: True, 'l': {'l': {'i': {'s': {'e': {'c': {'o': {'n': {'d': {0: True}}}}}}}}}}, 's': {0: True}, 'o': {'n': {'t': {'h': {0: True}}}}, 'm': {0: True}, 0: True}, 'n': {0: True}, 's': {'e': {'c': {'o': {'n': {'d': {0: True}}}}}, 's': {0: True}, 0: True}, 'Y': {0: True, 'Y': {'Y': {'Y': {0: True}}, 0: True}}, 'M': {'M': {'M': {'M': {0: True}, 0: True}, 0: True}, 0: True}, 'H': {'H': {0: True}, 0: True}, 'f': {'f': {'f': {'f': {'f': {'f': {0: True}}}}}}}
FORMAT_TRIE: Dict = {'y': {'e': {'a': {'r': {0: True}}}, 0: True, 'y': {'y': {'y': {0: True}}, 0: True}}, 'd': {'a': {'y': {'o': {'f': {'y': {'e': {'a': {'r': {0: True}}}}}}, 0: True}}, 'y': {0: True}, 'w': {0: True}, 'd': {'d': {'d': {0: True}}, 0: True}, 0: True}, 'w': {'e': {'e': {'k': {0: True, 'd': {'a': {'y': {0: True}}}}}}, 'w': {0: True}, 'k': {0: True}}, 'h': {'o': {'u': {'r': {0: True}}}, 'h': {0: True}, 0: True}, 'm': {'i': {'n': {'u': {'t': {'e': {0: True}}}}, 0: True, 'l': {'l': {'i': {'s': {'e': {'c': {'o': {'n': {'d': {0: True}}}}}}}}}}, 's': {0: True}, 'o': {'n': {'t': {'h': {0: True}}}}, 'm': {0: True}, 0: True}, 'n': {0: True}, 's': {'e': {'c': {'o': {'n': {'d': {0: True}}}}}, 's': {0: True}, 0: True}, 'Y': {0: True, 'Y': {'Y': {'Y': {0: True}}, 0: True}}, 'M': {'M': {'M': {'M': {0: True}, 0: True}, 0: True}, 0: True}, 'H': {'H': {0: True}, 0: True}, 'f': {'f': {'f': {'f': {'f': {'f': {0: True}}}}}}}
INVERSE_TIME_MAPPING: Dict[str, str] = {'%Y': 'yyyy', '%j': 'dayofyear', '%d': 'dd', '%W': 'wk', '%h': 'hour', '%I': 'hh', '%M': 'mm', '%S': 'ss', '%-S': 's', '%f': 'ffffff', '%w': 'dw', '%m': 'MM', '%-M': 'm', '%y': 'yy', '%B': 'MMMM', '%b': 'MMM', '%-m': 'M', '%A': 'dddd', '%-d': 'd', '%H': 'HH', '%-H': 'H', '%-I': 'h'}
INVERSE_TIME_TRIE: Dict = {'%': {'Y': {0: True}, 'j': {0: True}, 'd': {0: True}, 'W': {0: True}, 'h': {0: True}, 'I': {0: True}, 'M': {0: True}, 'S': {0: True}, '-': {'S': {0: True}, 'M': {0: True}, 'm': {0: True}, 'd': {0: True}, 'H': {0: True}, 'I': {0: True}}, 'f': {0: True}, 'w': {0: True}, 'm': {0: True}, 'y': {0: True}, 'B': {0: True}, 'b': {0: True}, 'A': {0: True}, 'H': {0: True}}}
INVERSE_FORMAT_MAPPING: Dict[str, str] = {}
INVERSE_FORMAT_TRIE: Dict = {}
INVERSE_CREATABLE_KIND_MAPPING: dict[str, str] = {}
ESCAPED_SEQUENCES: Dict[str, str] = {}
QUOTE_START = "'"
QUOTE_END = "'"
IDENTIFIER_START = '['
IDENTIFIER_END = ']'
BIT_START: Optional[str] = None
BIT_END: Optional[str] = None
HEX_START: Optional[str] = '0x'
HEX_END: Optional[str] = ''
BYTE_START: Optional[str] = None
BYTE_END: Optional[str] = None
UNICODE_START: Optional[str] = None
UNICODE_END: Optional[str] = None
class Fabric.Tokenizer(sqlglot.dialects.tsql.TSQL.Tokenizer):
54    class Tokenizer(TSQL.Tokenizer):
55        # Override T-SQL tokenizer to handle TIMESTAMP differently
56        # In T-SQL, TIMESTAMP is a synonym for ROWVERSION, but in Fabric we want it to be a datetime type
57        # Also add UTINYINT keyword mapping since T-SQL doesn't have it
58        KEYWORDS = {
59            **TSQL.Tokenizer.KEYWORDS,
60            "TIMESTAMP": TokenType.TIMESTAMP,
61            "UTINYINT": TokenType.UTINYINT,
62        }
KEYWORDS = {'{%': <TokenType.BLOCK_START: 'BLOCK_START'>, '{%+': <TokenType.BLOCK_START: 'BLOCK_START'>, '{%-': <TokenType.BLOCK_START: 'BLOCK_START'>, '%}': <TokenType.BLOCK_END: 'BLOCK_END'>, '+%}': <TokenType.BLOCK_END: 'BLOCK_END'>, '-%}': <TokenType.BLOCK_END: 'BLOCK_END'>, '{{+': <TokenType.BLOCK_START: 'BLOCK_START'>, '{{-': <TokenType.BLOCK_START: 'BLOCK_START'>, '+}}': <TokenType.BLOCK_END: 'BLOCK_END'>, '-}}': <TokenType.BLOCK_END: 'BLOCK_END'>, '==': <TokenType.EQ: 'EQ'>, '::': <TokenType.DCOLON: 'DCOLON'>, '||': <TokenType.DPIPE: 'DPIPE'>, '|>': <TokenType.PIPE_GT: 'PIPE_GT'>, '>=': <TokenType.GTE: 'GTE'>, '<=': <TokenType.LTE: 'LTE'>, '<>': <TokenType.NEQ: 'NEQ'>, '!=': <TokenType.NEQ: 'NEQ'>, ':=': <TokenType.COLON_EQ: 'COLON_EQ'>, '<=>': <TokenType.NULLSAFE_EQ: 'NULLSAFE_EQ'>, '->': <TokenType.ARROW: 'ARROW'>, '->>': <TokenType.DARROW: 'DARROW'>, '=>': <TokenType.FARROW: 'FARROW'>, '#>': <TokenType.HASH_ARROW: 'HASH_ARROW'>, '#>>': <TokenType.DHASH_ARROW: 'DHASH_ARROW'>, '<->': <TokenType.LR_ARROW: 'LR_ARROW'>, '&&': <TokenType.DAMP: 'DAMP'>, '??': <TokenType.DQMARK: 'DQMARK'>, '~~~': <TokenType.GLOB: 'GLOB'>, '~~': <TokenType.LIKE: 'LIKE'>, '~~*': <TokenType.ILIKE: 'ILIKE'>, '~*': <TokenType.IRLIKE: 'IRLIKE'>, 'ALL': <TokenType.ALL: 'ALL'>, 'ALWAYS': <TokenType.ALWAYS: 'ALWAYS'>, 'AND': <TokenType.AND: 'AND'>, 'ANTI': <TokenType.ANTI: 'ANTI'>, 'ANY': <TokenType.ANY: 'ANY'>, 'ASC': <TokenType.ASC: 'ASC'>, 'AS': <TokenType.ALIAS: 'ALIAS'>, 'ASOF': <TokenType.ASOF: 'ASOF'>, 'AUTOINCREMENT': <TokenType.AUTO_INCREMENT: 'AUTO_INCREMENT'>, 'AUTO_INCREMENT': <TokenType.AUTO_INCREMENT: 'AUTO_INCREMENT'>, 'BEGIN': <TokenType.BEGIN: 'BEGIN'>, 'BETWEEN': <TokenType.BETWEEN: 'BETWEEN'>, 'CACHE': <TokenType.CACHE: 'CACHE'>, 'UNCACHE': <TokenType.UNCACHE: 'UNCACHE'>, 'CASE': <TokenType.CASE: 'CASE'>, 'CHARACTER SET': <TokenType.CHARACTER_SET: 'CHARACTER_SET'>, 'CLUSTER BY': <TokenType.CLUSTER_BY: 'CLUSTER_BY'>, 'COLLATE': <TokenType.COLLATE: 'COLLATE'>, 'COLUMN': <TokenType.COLUMN: 'COLUMN'>, 'COMMIT': <TokenType.COMMIT: 'COMMIT'>, 'CONNECT BY': <TokenType.CONNECT_BY: 'CONNECT_BY'>, 'CONSTRAINT': <TokenType.CONSTRAINT: 'CONSTRAINT'>, 'COPY': <TokenType.COPY: 'COPY'>, 'CREATE': <TokenType.CREATE: 'CREATE'>, 'CROSS': <TokenType.CROSS: 'CROSS'>, 'CUBE': <TokenType.CUBE: 'CUBE'>, 'CURRENT_DATE': <TokenType.CURRENT_DATE: 'CURRENT_DATE'>, 'CURRENT_SCHEMA': <TokenType.CURRENT_SCHEMA: 'CURRENT_SCHEMA'>, 'CURRENT_TIME': <TokenType.CURRENT_TIME: 'CURRENT_TIME'>, 'CURRENT_TIMESTAMP': <TokenType.CURRENT_TIMESTAMP: 'CURRENT_TIMESTAMP'>, 'CURRENT_USER': <TokenType.CURRENT_USER: 'CURRENT_USER'>, 'DATABASE': <TokenType.DATABASE: 'DATABASE'>, 'DEFAULT': <TokenType.DEFAULT: 'DEFAULT'>, 'DELETE': <TokenType.DELETE: 'DELETE'>, 'DESC': <TokenType.DESC: 'DESC'>, 'DESCRIBE': <TokenType.DESCRIBE: 'DESCRIBE'>, 'DISTINCT': <TokenType.DISTINCT: 'DISTINCT'>, 'DISTRIBUTE BY': <TokenType.DISTRIBUTE_BY: 'DISTRIBUTE_BY'>, 'DIV': <TokenType.DIV: 'DIV'>, 'DROP': <TokenType.DROP: 'DROP'>, 'ELSE': <TokenType.ELSE: 'ELSE'>, 'END': <TokenType.END: 'END'>, 'ENUM': <TokenType.ENUM: 'ENUM'>, 'ESCAPE': <TokenType.ESCAPE: 'ESCAPE'>, 'EXCEPT': <TokenType.EXCEPT: 'EXCEPT'>, 'EXECUTE': <TokenType.EXECUTE: 'EXECUTE'>, 'EXISTS': <TokenType.EXISTS: 'EXISTS'>, 'FALSE': <TokenType.FALSE: 'FALSE'>, 'FETCH': <TokenType.FETCH: 'FETCH'>, 'FILTER': <TokenType.FILTER: 'FILTER'>, 'FIRST': <TokenType.FIRST: 'FIRST'>, 'FULL': <TokenType.FULL: 'FULL'>, 'FUNCTION': <TokenType.FUNCTION: 'FUNCTION'>, 'FOR': <TokenType.FOR: 'FOR'>, 'FOREIGN KEY': <TokenType.FOREIGN_KEY: 'FOREIGN_KEY'>, 'FORMAT': <TokenType.FORMAT: 'FORMAT'>, 'FROM': <TokenType.FROM: 'FROM'>, 'GEOGRAPHY': <TokenType.GEOGRAPHY: 'GEOGRAPHY'>, 'GEOMETRY': <TokenType.GEOMETRY: 'GEOMETRY'>, 'GLOB': <TokenType.GLOB: 'GLOB'>, 'GROUP BY': <TokenType.GROUP_BY: 'GROUP_BY'>, 'GROUPING SETS': <TokenType.GROUPING_SETS: 'GROUPING_SETS'>, 'HAVING': <TokenType.HAVING: 'HAVING'>, 'ILIKE': <TokenType.ILIKE: 'ILIKE'>, 'IN': <TokenType.IN: 'IN'>, 'INDEX': <TokenType.INDEX: 'INDEX'>, 'INET': <TokenType.INET: 'INET'>, 'INNER': <TokenType.INNER: 'INNER'>, 'INSERT': <TokenType.INSERT: 'INSERT'>, 'INTERVAL': <TokenType.INTERVAL: 'INTERVAL'>, 'INTERSECT': <TokenType.INTERSECT: 'INTERSECT'>, 'INTO': <TokenType.INTO: 'INTO'>, 'IS': <TokenType.IS: 'IS'>, 'ISNULL': <TokenType.ISNULL: 'ISNULL'>, 'JOIN': <TokenType.JOIN: 'JOIN'>, 'KEEP': <TokenType.KEEP: 'KEEP'>, 'KILL': <TokenType.KILL: 'KILL'>, 'LATERAL': <TokenType.LATERAL: 'LATERAL'>, 'LEFT': <TokenType.LEFT: 'LEFT'>, 'LIKE': <TokenType.LIKE: 'LIKE'>, 'LIMIT': <TokenType.LIMIT: 'LIMIT'>, 'LOAD': <TokenType.LOAD: 'LOAD'>, 'LOCK': <TokenType.LOCK: 'LOCK'>, 'MERGE': <TokenType.MERGE: 'MERGE'>, 'NAMESPACE': <TokenType.NAMESPACE: 'NAMESPACE'>, 'NATURAL': <TokenType.NATURAL: 'NATURAL'>, 'NEXT': <TokenType.NEXT: 'NEXT'>, 'NOT': <TokenType.NOT: 'NOT'>, 'NOTNULL': <TokenType.NOTNULL: 'NOTNULL'>, 'NULL': <TokenType.NULL: 'NULL'>, 'OBJECT': <TokenType.OBJECT: 'OBJECT'>, 'OFFSET': <TokenType.OFFSET: 'OFFSET'>, 'ON': <TokenType.ON: 'ON'>, 'OR': <TokenType.OR: 'OR'>, 'XOR': <TokenType.XOR: 'XOR'>, 'ORDER BY': <TokenType.ORDER_BY: 'ORDER_BY'>, 'ORDINALITY': <TokenType.ORDINALITY: 'ORDINALITY'>, 'OUTER': <TokenType.OUTER: 'OUTER'>, 'OVER': <TokenType.OVER: 'OVER'>, 'OVERLAPS': <TokenType.OVERLAPS: 'OVERLAPS'>, 'OVERWRITE': <TokenType.OVERWRITE: 'OVERWRITE'>, 'PARTITION': <TokenType.PARTITION: 'PARTITION'>, 'PARTITION BY': <TokenType.PARTITION_BY: 'PARTITION_BY'>, 'PARTITIONED BY': <TokenType.PARTITION_BY: 'PARTITION_BY'>, 'PARTITIONED_BY': <TokenType.PARTITION_BY: 'PARTITION_BY'>, 'PERCENT': <TokenType.PERCENT: 'PERCENT'>, 'PIVOT': <TokenType.PIVOT: 'PIVOT'>, 'PRAGMA': <TokenType.PRAGMA: 'PRAGMA'>, 'PRIMARY KEY': <TokenType.PRIMARY_KEY: 'PRIMARY_KEY'>, 'PROCEDURE': <TokenType.PROCEDURE: 'PROCEDURE'>, 'QUALIFY': <TokenType.QUALIFY: 'QUALIFY'>, 'RANGE': <TokenType.RANGE: 'RANGE'>, 'RECURSIVE': <TokenType.RECURSIVE: 'RECURSIVE'>, 'REGEXP': <TokenType.RLIKE: 'RLIKE'>, 'RENAME': <TokenType.RENAME: 'RENAME'>, 'REPLACE': <TokenType.REPLACE: 'REPLACE'>, 'RETURNING': <TokenType.RETURNING: 'RETURNING'>, 'REFERENCES': <TokenType.REFERENCES: 'REFERENCES'>, 'RIGHT': <TokenType.RIGHT: 'RIGHT'>, 'RLIKE': <TokenType.RLIKE: 'RLIKE'>, 'ROLLBACK': <TokenType.ROLLBACK: 'ROLLBACK'>, 'ROLLUP': <TokenType.ROLLUP: 'ROLLUP'>, 'ROW': <TokenType.ROW: 'ROW'>, 'ROWS': <TokenType.ROWS: 'ROWS'>, 'SCHEMA': <TokenType.SCHEMA: 'SCHEMA'>, 'SELECT': <TokenType.SELECT: 'SELECT'>, 'SEMI': <TokenType.SEMI: 'SEMI'>, 'SET': <TokenType.SET: 'SET'>, 'SETTINGS': <TokenType.SETTINGS: 'SETTINGS'>, 'SHOW': <TokenType.SHOW: 'SHOW'>, 'SIMILAR TO': <TokenType.SIMILAR_TO: 'SIMILAR_TO'>, 'SOME': <TokenType.SOME: 'SOME'>, 'SORT BY': <TokenType.SORT_BY: 'SORT_BY'>, 'START WITH': <TokenType.START_WITH: 'START_WITH'>, 'STRAIGHT_JOIN': <TokenType.STRAIGHT_JOIN: 'STRAIGHT_JOIN'>, 'TABLE': <TokenType.TABLE: 'TABLE'>, 'TABLESAMPLE': <TokenType.TABLE_SAMPLE: 'TABLE_SAMPLE'>, 'TEMP': <TokenType.TEMPORARY: 'TEMPORARY'>, 'TEMPORARY': <TokenType.TEMPORARY: 'TEMPORARY'>, 'THEN': <TokenType.THEN: 'THEN'>, 'TRUE': <TokenType.TRUE: 'TRUE'>, 'TRUNCATE': <TokenType.TRUNCATE: 'TRUNCATE'>, 'UNION': <TokenType.UNION: 'UNION'>, 'UNKNOWN': <TokenType.UNKNOWN: 'UNKNOWN'>, 'UNNEST': <TokenType.UNNEST: 'UNNEST'>, 'UNPIVOT': <TokenType.UNPIVOT: 'UNPIVOT'>, 'UPDATE': <TokenType.UPDATE: 'UPDATE'>, 'USE': <TokenType.USE: 'USE'>, 'USING': <TokenType.USING: 'USING'>, 'UUID': <TokenType.UUID: 'UUID'>, 'VALUES': <TokenType.VALUES: 'VALUES'>, 'VIEW': <TokenType.VIEW: 'VIEW'>, 'VOLATILE': <TokenType.VOLATILE: 'VOLATILE'>, 'WHEN': <TokenType.WHEN: 'WHEN'>, 'WHERE': <TokenType.WHERE: 'WHERE'>, 'WINDOW': <TokenType.WINDOW: 'WINDOW'>, 'WITH': <TokenType.WITH: 'WITH'>, 'APPLY': <TokenType.APPLY: 'APPLY'>, 'ARRAY': <TokenType.ARRAY: 'ARRAY'>, 'BIT': <TokenType.BIT: 'BIT'>, 'BOOL': <TokenType.BOOLEAN: 'BOOLEAN'>, 'BOOLEAN': <TokenType.BOOLEAN: 'BOOLEAN'>, 'BYTE': <TokenType.TINYINT: 'TINYINT'>, 'MEDIUMINT': <TokenType.MEDIUMINT: 'MEDIUMINT'>, 'INT1': <TokenType.TINYINT: 'TINYINT'>, 'TINYINT': <TokenType.UTINYINT: 'UTINYINT'>, 'INT16': <TokenType.SMALLINT: 'SMALLINT'>, 'SHORT': <TokenType.SMALLINT: 'SMALLINT'>, 'SMALLINT': <TokenType.SMALLINT: 'SMALLINT'>, 'HUGEINT': <TokenType.INT128: 'INT128'>, 'UHUGEINT': <TokenType.UINT128: 'UINT128'>, 'INT2': <TokenType.SMALLINT: 'SMALLINT'>, 'INTEGER': <TokenType.INT: 'INT'>, 'INT': <TokenType.INT: 'INT'>, 'INT4': <TokenType.INT: 'INT'>, 'INT32': <TokenType.INT: 'INT'>, 'INT64': <TokenType.BIGINT: 'BIGINT'>, 'INT128': <TokenType.INT128: 'INT128'>, 'INT256': <TokenType.INT256: 'INT256'>, 'LONG': <TokenType.BIGINT: 'BIGINT'>, 'BIGINT': <TokenType.BIGINT: 'BIGINT'>, 'INT8': <TokenType.TINYINT: 'TINYINT'>, 'UINT': <TokenType.UINT: 'UINT'>, 'UINT128': <TokenType.UINT128: 'UINT128'>, 'UINT256': <TokenType.UINT256: 'UINT256'>, 'DEC': <TokenType.DECIMAL: 'DECIMAL'>, 'DECIMAL': <TokenType.DECIMAL: 'DECIMAL'>, 'DECIMAL32': <TokenType.DECIMAL32: 'DECIMAL32'>, 'DECIMAL64': <TokenType.DECIMAL64: 'DECIMAL64'>, 'DECIMAL128': <TokenType.DECIMAL128: 'DECIMAL128'>, 'DECIMAL256': <TokenType.DECIMAL256: 'DECIMAL256'>, 'BIGDECIMAL': <TokenType.BIGDECIMAL: 'BIGDECIMAL'>, 'BIGNUMERIC': <TokenType.BIGDECIMAL: 'BIGDECIMAL'>, 'LIST': <TokenType.LIST: 'LIST'>, 'MAP': <TokenType.MAP: 'MAP'>, 'NULLABLE': <TokenType.NULLABLE: 'NULLABLE'>, 'NUMBER': <TokenType.DECIMAL: 'DECIMAL'>, 'NUMERIC': <TokenType.DECIMAL: 'DECIMAL'>, 'FIXED': <TokenType.DECIMAL: 'DECIMAL'>, 'REAL': <TokenType.FLOAT: 'FLOAT'>, 'FLOAT': <TokenType.FLOAT: 'FLOAT'>, 'FLOAT4': <TokenType.FLOAT: 'FLOAT'>, 'FLOAT8': <TokenType.DOUBLE: 'DOUBLE'>, 'DOUBLE': <TokenType.DOUBLE: 'DOUBLE'>, 'DOUBLE PRECISION': <TokenType.DOUBLE: 'DOUBLE'>, 'JSON': <TokenType.JSON: 'JSON'>, 'JSONB': <TokenType.JSONB: 'JSONB'>, 'CHAR': <TokenType.CHAR: 'CHAR'>, 'CHARACTER': <TokenType.CHAR: 'CHAR'>, 'CHAR VARYING': <TokenType.VARCHAR: 'VARCHAR'>, 'CHARACTER VARYING': <TokenType.VARCHAR: 'VARCHAR'>, 'NCHAR': <TokenType.NCHAR: 'NCHAR'>, 'VARCHAR': <TokenType.VARCHAR: 'VARCHAR'>, 'VARCHAR2': <TokenType.VARCHAR: 'VARCHAR'>, 'NVARCHAR': <TokenType.NVARCHAR: 'NVARCHAR'>, 'NVARCHAR2': <TokenType.NVARCHAR: 'NVARCHAR'>, 'BPCHAR': <TokenType.BPCHAR: 'BPCHAR'>, 'STR': <TokenType.TEXT: 'TEXT'>, 'STRING': <TokenType.TEXT: 'TEXT'>, 'TEXT': <TokenType.TEXT: 'TEXT'>, 'LONGTEXT': <TokenType.LONGTEXT: 'LONGTEXT'>, 'MEDIUMTEXT': <TokenType.MEDIUMTEXT: 'MEDIUMTEXT'>, 'TINYTEXT': <TokenType.TINYTEXT: 'TINYTEXT'>, 'CLOB': <TokenType.TEXT: 'TEXT'>, 'LONGVARCHAR': <TokenType.TEXT: 'TEXT'>, 'BINARY': <TokenType.BINARY: 'BINARY'>, 'BLOB': <TokenType.VARBINARY: 'VARBINARY'>, 'LONGBLOB': <TokenType.LONGBLOB: 'LONGBLOB'>, 'MEDIUMBLOB': <TokenType.MEDIUMBLOB: 'MEDIUMBLOB'>, 'TINYBLOB': <TokenType.TINYBLOB: 'TINYBLOB'>, 'BYTEA': <TokenType.VARBINARY: 'VARBINARY'>, 'VARBINARY': <TokenType.VARBINARY: 'VARBINARY'>, 'TIME': <TokenType.TIME: 'TIME'>, 'TIMETZ': <TokenType.TIMETZ: 'TIMETZ'>, 'TIMESTAMP': <TokenType.TIMESTAMP: 'TIMESTAMP'>, 'TIMESTAMPTZ': <TokenType.TIMESTAMPTZ: 'TIMESTAMPTZ'>, 'TIMESTAMPLTZ': <TokenType.TIMESTAMPLTZ: 'TIMESTAMPLTZ'>, 'TIMESTAMP_LTZ': <TokenType.TIMESTAMPLTZ: 'TIMESTAMPLTZ'>, 'TIMESTAMPNTZ': <TokenType.TIMESTAMPNTZ: 'TIMESTAMPNTZ'>, 'TIMESTAMP_NTZ': <TokenType.TIMESTAMPNTZ: 'TIMESTAMPNTZ'>, 'DATE': <TokenType.DATE: 'DATE'>, 'DATETIME': <TokenType.DATETIME: 'DATETIME'>, 'INT4RANGE': <TokenType.INT4RANGE: 'INT4RANGE'>, 'INT4MULTIRANGE': <TokenType.INT4MULTIRANGE: 'INT4MULTIRANGE'>, 'INT8RANGE': <TokenType.INT8RANGE: 'INT8RANGE'>, 'INT8MULTIRANGE': <TokenType.INT8MULTIRANGE: 'INT8MULTIRANGE'>, 'NUMRANGE': <TokenType.NUMRANGE: 'NUMRANGE'>, 'NUMMULTIRANGE': <TokenType.NUMMULTIRANGE: 'NUMMULTIRANGE'>, 'TSRANGE': <TokenType.TSRANGE: 'TSRANGE'>, 'TSMULTIRANGE': <TokenType.TSMULTIRANGE: 'TSMULTIRANGE'>, 'TSTZRANGE': <TokenType.TSTZRANGE: 'TSTZRANGE'>, 'TSTZMULTIRANGE': <TokenType.TSTZMULTIRANGE: 'TSTZMULTIRANGE'>, 'DATERANGE': <TokenType.DATERANGE: 'DATERANGE'>, 'DATEMULTIRANGE': <TokenType.DATEMULTIRANGE: 'DATEMULTIRANGE'>, 'UNIQUE': <TokenType.UNIQUE: 'UNIQUE'>, 'VECTOR': <TokenType.VECTOR: 'VECTOR'>, 'STRUCT': <TokenType.STRUCT: 'STRUCT'>, 'SEQUENCE': <TokenType.SEQUENCE: 'SEQUENCE'>, 'VARIANT': <TokenType.VARIANT: 'VARIANT'>, 'ALTER': <TokenType.ALTER: 'ALTER'>, 'ANALYZE': <TokenType.ANALYZE: 'ANALYZE'>, 'CALL': <TokenType.COMMAND: 'COMMAND'>, 'COMMENT': <TokenType.COMMENT: 'COMMENT'>, 'EXPLAIN': <TokenType.COMMAND: 'COMMAND'>, 'GRANT': <TokenType.GRANT: 'GRANT'>, 'OPTIMIZE': <TokenType.COMMAND: 'COMMAND'>, 'PREPARE': <TokenType.COMMAND: 'COMMAND'>, 'VACUUM': <TokenType.COMMAND: 'COMMAND'>, 'USER-DEFINED': <TokenType.USERDEFINED: 'USERDEFINED'>, 'FOR VERSION': <TokenType.VERSION_SNAPSHOT: 'VERSION_SNAPSHOT'>, 'FOR TIMESTAMP': <TokenType.TIMESTAMP_SNAPSHOT: 'TIMESTAMP_SNAPSHOT'>, 'CLUSTERED INDEX': <TokenType.INDEX: 'INDEX'>, 'DATETIME2': <TokenType.DATETIME2: 'DATETIME2'>, 'DATETIMEOFFSET': <TokenType.TIMESTAMPTZ: 'TIMESTAMPTZ'>, 'DECLARE': <TokenType.DECLARE: 'DECLARE'>, 'EXEC': <TokenType.COMMAND: 'COMMAND'>, 'FOR SYSTEM_TIME': <TokenType.TIMESTAMP_SNAPSHOT: 'TIMESTAMP_SNAPSHOT'>, 'GO': <TokenType.COMMAND: 'COMMAND'>, 'IMAGE': <TokenType.IMAGE: 'IMAGE'>, 'MONEY': <TokenType.MONEY: 'MONEY'>, 'NONCLUSTERED INDEX': <TokenType.INDEX: 'INDEX'>, 'NTEXT': <TokenType.TEXT: 'TEXT'>, 'OPTION': <TokenType.OPTION: 'OPTION'>, 'OUTPUT': <TokenType.RETURNING: 'RETURNING'>, 'PRINT': <TokenType.COMMAND: 'COMMAND'>, 'PROC': <TokenType.PROCEDURE: 'PROCEDURE'>, 'ROWVERSION': <TokenType.ROWVERSION: 'ROWVERSION'>, 'SMALLDATETIME': <TokenType.SMALLDATETIME: 'SMALLDATETIME'>, 'SMALLMONEY': <TokenType.SMALLMONEY: 'SMALLMONEY'>, 'SQL_VARIANT': <TokenType.VARIANT: 'VARIANT'>, 'SYSTEM_USER': <TokenType.CURRENT_USER: 'CURRENT_USER'>, 'TOP': <TokenType.TOP: 'TOP'>, 'UNIQUEIDENTIFIER': <TokenType.UUID: 'UUID'>, 'UPDATE STATISTICS': <TokenType.COMMAND: 'COMMAND'>, 'XML': <TokenType.XML: 'XML'>, 'UTINYINT': <TokenType.UTINYINT: 'UTINYINT'>}
class Fabric.Generator(sqlglot.dialects.tsql.TSQL.Generator):
 64    class Generator(TSQL.Generator):
 65        # Fabric-specific type mappings - override T-SQL types that aren't supported
 66        # Reference: https://learn.microsoft.com/en-us/fabric/data-warehouse/data-types
 67        TYPE_MAPPING = {
 68            **TSQL.Generator.TYPE_MAPPING,
 69            exp.DataType.Type.DATETIME: "DATETIME2",
 70            exp.DataType.Type.DECIMAL: "DECIMAL",
 71            exp.DataType.Type.IMAGE: "VARBINARY",
 72            exp.DataType.Type.INT: "INT",
 73            exp.DataType.Type.JSON: "VARCHAR",
 74            exp.DataType.Type.MONEY: "DECIMAL",
 75            exp.DataType.Type.NCHAR: "CHAR",
 76            exp.DataType.Type.NVARCHAR: "VARCHAR",
 77            exp.DataType.Type.ROWVERSION: "ROWVERSION",
 78            exp.DataType.Type.SMALLDATETIME: "DATETIME2",
 79            exp.DataType.Type.SMALLMONEY: "DECIMAL",
 80            exp.DataType.Type.TIMESTAMP: "DATETIME2",
 81            exp.DataType.Type.TIMESTAMPNTZ: "DATETIME2",
 82            exp.DataType.Type.TIMESTAMPTZ: "DATETIME2",
 83            exp.DataType.Type.TINYINT: "SMALLINT",
 84            exp.DataType.Type.UTINYINT: "SMALLINT",
 85            exp.DataType.Type.UUID: "VARBINARY(MAX)",
 86            exp.DataType.Type.XML: "VARCHAR",
 87        }
 88
 89        def datatype_sql(self, expression: exp.DataType) -> str:
 90            # Check if this is a temporal type that needs precision handling. Fabric limits temporal
 91            # types to max 6 digits precision. When no precision is specified, we default to 6 digits.
 92            if (
 93                expression.is_type(*exp.DataType.TEMPORAL_TYPES)
 94                and expression.this != exp.DataType.Type.DATE
 95            ):
 96                # Create a new expression with the capped precision
 97                expression = _cap_data_type_precision(expression)
 98
 99            return super().datatype_sql(expression)
100
101        def cast_sql(self, expression: exp.Cast, safe_prefix: str | None = None) -> str:
102            # Cast to DATETIMEOFFSET if inside an AT TIME ZONE expression
103            # https://learn.microsoft.com/en-us/sql/t-sql/data-types/datetimeoffset-transact-sql#microsoft-fabric-support
104            if expression.is_type(exp.DataType.Type.TIMESTAMPTZ):
105                at_time_zone = expression.find_ancestor(exp.AtTimeZone, exp.Select)
106
107                # Return normal cast, if the expression is not in an AT TIME ZONE context
108                if not isinstance(at_time_zone, exp.AtTimeZone):
109                    return super().cast_sql(expression, safe_prefix)
110
111                # Get the precision from the original TIMESTAMPTZ cast and cap it to 6
112                capped_data_type = _cap_data_type_precision(expression.to, max_precision=6)
113                precision = capped_data_type.find(exp.DataTypeParam)
114                precision_value = (
115                    precision.this.to_py() if precision and precision.this.is_int else 6
116                )
117
118                # Do the cast explicitly to bypass sqlglot's default handling
119                datetimeoffset = f"CAST({expression.this} AS DATETIMEOFFSET({precision_value}))"
120
121                return self.sql(datetimeoffset)
122
123            return super().cast_sql(expression, safe_prefix)
124
125        def attimezone_sql(self, expression: exp.AtTimeZone) -> str:
126            # Wrap the AT TIME ZONE expression in a cast to DATETIME2 if it contains a TIMESTAMPTZ
127            ## https://learn.microsoft.com/en-us/sql/t-sql/data-types/datetimeoffset-transact-sql#microsoft-fabric-support
128            timestamptz_cast = expression.find(exp.Cast)
129            if timestamptz_cast and timestamptz_cast.to.is_type(exp.DataType.Type.TIMESTAMPTZ):
130                # Get the precision from the original TIMESTAMPTZ cast and cap it to 6
131                data_type = timestamptz_cast.to
132                capped_data_type = _cap_data_type_precision(data_type, max_precision=6)
133                precision_param = capped_data_type.find(exp.DataTypeParam)
134                precision = precision_param.this.to_py() if precision_param else 6
135
136                # Generate the AT TIME ZONE expression (which will handle the inner cast conversion)
137                at_time_zone_sql = super().attimezone_sql(expression)
138
139                # Wrap it in an outer cast to DATETIME2
140                return f"CAST({at_time_zone_sql} AS DATETIME2({precision}))"
141
142            return super().attimezone_sql(expression)
143
144        def unixtotime_sql(self, expression: exp.UnixToTime) -> str:
145            scale = expression.args.get("scale")
146            timestamp = expression.this
147
148            if scale not in (None, exp.UnixToTime.SECONDS):
149                self.unsupported(f"UnixToTime scale {scale} is not supported by Fabric")
150                return ""
151
152            # Convert unix timestamp (seconds) to microseconds and round to avoid decimals
153            microseconds = timestamp * exp.Literal.number("1e6")
154            rounded = exp.func("round", microseconds, 0)
155            rounded_ms_as_bigint = exp.cast(rounded, exp.DataType.Type.BIGINT)
156
157            # Create the base datetime as '1970-01-01' cast to DATETIME2(6)
158            epoch_start = exp.cast("'1970-01-01'", "datetime2(6)", dialect="fabric")
159
160            dateadd = exp.DateAdd(
161                this=epoch_start,
162                expression=rounded_ms_as_bigint,
163                unit=exp.Literal.string("MICROSECONDS"),
164            )
165            return self.sql(dateadd)

Generator converts a given syntax tree to the corresponding SQL string.

Arguments:
  • pretty: Whether to format the produced SQL string. Default: False.
  • identify: Determines when an identifier should be quoted. Possible values are: False (default): Never quote, except in cases where it's mandatory by the dialect. True or 'always': Always quote. 'safe': Only quote identifiers that are case insensitive.
  • normalize: Whether to normalize identifiers to lowercase. Default: False.
  • pad: The pad size in a formatted string. For example, this affects the indentation of a projection in a query, relative to its nesting level. Default: 2.
  • indent: The indentation size in a formatted string. For example, this affects the indentation of subqueries and filters under a WHERE clause. Default: 2.
  • normalize_functions: How to normalize function names. Possible values are: "upper" or True (default): Convert names to uppercase. "lower": Convert names to lowercase. False: Disables function name normalization.
  • unsupported_level: Determines the generator's behavior when it encounters unsupported expressions. Default ErrorLevel.WARN.
  • max_unsupported: Maximum number of unsupported messages to include in a raised UnsupportedError. This is only relevant if unsupported_level is ErrorLevel.RAISE. Default: 3
  • leading_comma: Whether the comma is leading or trailing in select expressions. This is only relevant when generating in pretty mode. Default: False
  • max_text_width: The max number of characters in a segment before creating new lines in pretty mode. The default is on the smaller end because the length only represents a segment and not the true line length. Default: 80
  • comments: Whether to preserve comments in the output SQL code. Default: True
TYPE_MAPPING = {<Type.DATETIME2: 'DATETIME2'>: 'DATETIME2', <Type.MEDIUMTEXT: 'MEDIUMTEXT'>: 'TEXT', <Type.LONGTEXT: 'LONGTEXT'>: 'TEXT', <Type.TINYTEXT: 'TINYTEXT'>: 'TEXT', <Type.BLOB: 'BLOB'>: 'VARBINARY', <Type.MEDIUMBLOB: 'MEDIUMBLOB'>: 'BLOB', <Type.LONGBLOB: 'LONGBLOB'>: 'BLOB', <Type.TINYBLOB: 'TINYBLOB'>: 'BLOB', <Type.INET: 'INET'>: 'INET', <Type.ROWVERSION: 'ROWVERSION'>: 'ROWVERSION', <Type.SMALLDATETIME: 'SMALLDATETIME'>: 'DATETIME2', <Type.BOOLEAN: 'BOOLEAN'>: 'BIT', <Type.DECIMAL: 'DECIMAL'>: 'DECIMAL', <Type.DOUBLE: 'DOUBLE'>: 'FLOAT', <Type.INT: 'INT'>: 'INT', <Type.TEXT: 'TEXT'>: 'VARCHAR(MAX)', <Type.TIMESTAMP: 'TIMESTAMP'>: 'DATETIME2', <Type.TIMESTAMPNTZ: 'TIMESTAMPNTZ'>: 'DATETIME2', <Type.TIMESTAMPTZ: 'TIMESTAMPTZ'>: 'DATETIME2', <Type.UTINYINT: 'UTINYINT'>: 'SMALLINT', <Type.VARIANT: 'VARIANT'>: 'SQL_VARIANT', <Type.UUID: 'UUID'>: 'VARBINARY(MAX)', <Type.DATETIME: 'DATETIME'>: 'DATETIME2', <Type.IMAGE: 'IMAGE'>: 'VARBINARY', <Type.JSON: 'JSON'>: 'VARCHAR', <Type.MONEY: 'MONEY'>: 'DECIMAL', <Type.NCHAR: 'NCHAR'>: 'CHAR', <Type.NVARCHAR: 'NVARCHAR'>: 'VARCHAR', <Type.SMALLMONEY: 'SMALLMONEY'>: 'DECIMAL', <Type.TINYINT: 'TINYINT'>: 'SMALLINT', <Type.XML: 'XML'>: 'VARCHAR'}
def datatype_sql(self, expression: sqlglot.expressions.DataType) -> str:
89        def datatype_sql(self, expression: exp.DataType) -> str:
90            # Check if this is a temporal type that needs precision handling. Fabric limits temporal
91            # types to max 6 digits precision. When no precision is specified, we default to 6 digits.
92            if (
93                expression.is_type(*exp.DataType.TEMPORAL_TYPES)
94                and expression.this != exp.DataType.Type.DATE
95            ):
96                # Create a new expression with the capped precision
97                expression = _cap_data_type_precision(expression)
98
99            return super().datatype_sql(expression)
def cast_sql( self, expression: sqlglot.expressions.Cast, safe_prefix: str | None = None) -> str:
101        def cast_sql(self, expression: exp.Cast, safe_prefix: str | None = None) -> str:
102            # Cast to DATETIMEOFFSET if inside an AT TIME ZONE expression
103            # https://learn.microsoft.com/en-us/sql/t-sql/data-types/datetimeoffset-transact-sql#microsoft-fabric-support
104            if expression.is_type(exp.DataType.Type.TIMESTAMPTZ):
105                at_time_zone = expression.find_ancestor(exp.AtTimeZone, exp.Select)
106
107                # Return normal cast, if the expression is not in an AT TIME ZONE context
108                if not isinstance(at_time_zone, exp.AtTimeZone):
109                    return super().cast_sql(expression, safe_prefix)
110
111                # Get the precision from the original TIMESTAMPTZ cast and cap it to 6
112                capped_data_type = _cap_data_type_precision(expression.to, max_precision=6)
113                precision = capped_data_type.find(exp.DataTypeParam)
114                precision_value = (
115                    precision.this.to_py() if precision and precision.this.is_int else 6
116                )
117
118                # Do the cast explicitly to bypass sqlglot's default handling
119                datetimeoffset = f"CAST({expression.this} AS DATETIMEOFFSET({precision_value}))"
120
121                return self.sql(datetimeoffset)
122
123            return super().cast_sql(expression, safe_prefix)
def attimezone_sql(self, expression: sqlglot.expressions.AtTimeZone) -> str:
125        def attimezone_sql(self, expression: exp.AtTimeZone) -> str:
126            # Wrap the AT TIME ZONE expression in a cast to DATETIME2 if it contains a TIMESTAMPTZ
127            ## https://learn.microsoft.com/en-us/sql/t-sql/data-types/datetimeoffset-transact-sql#microsoft-fabric-support
128            timestamptz_cast = expression.find(exp.Cast)
129            if timestamptz_cast and timestamptz_cast.to.is_type(exp.DataType.Type.TIMESTAMPTZ):
130                # Get the precision from the original TIMESTAMPTZ cast and cap it to 6
131                data_type = timestamptz_cast.to
132                capped_data_type = _cap_data_type_precision(data_type, max_precision=6)
133                precision_param = capped_data_type.find(exp.DataTypeParam)
134                precision = precision_param.this.to_py() if precision_param else 6
135
136                # Generate the AT TIME ZONE expression (which will handle the inner cast conversion)
137                at_time_zone_sql = super().attimezone_sql(expression)
138
139                # Wrap it in an outer cast to DATETIME2
140                return f"CAST({at_time_zone_sql} AS DATETIME2({precision}))"
141
142            return super().attimezone_sql(expression)
def unixtotime_sql(self, expression: sqlglot.expressions.UnixToTime) -> str:
144        def unixtotime_sql(self, expression: exp.UnixToTime) -> str:
145            scale = expression.args.get("scale")
146            timestamp = expression.this
147
148            if scale not in (None, exp.UnixToTime.SECONDS):
149                self.unsupported(f"UnixToTime scale {scale} is not supported by Fabric")
150                return ""
151
152            # Convert unix timestamp (seconds) to microseconds and round to avoid decimals
153            microseconds = timestamp * exp.Literal.number("1e6")
154            rounded = exp.func("round", microseconds, 0)
155            rounded_ms_as_bigint = exp.cast(rounded, exp.DataType.Type.BIGINT)
156
157            # Create the base datetime as '1970-01-01' cast to DATETIME2(6)
158            epoch_start = exp.cast("'1970-01-01'", "datetime2(6)", dialect="fabric")
159
160            dateadd = exp.DateAdd(
161                this=epoch_start,
162                expression=rounded_ms_as_bigint,
163                unit=exp.Literal.string("MICROSECONDS"),
164            )
165            return self.sql(dateadd)
SELECT_KINDS: Tuple[str, ...] = ()
TRY_SUPPORTED = False
SUPPORTS_UESCAPE = False
SUPPORTS_DECODE_CASE = False
AFTER_HAVING_MODIFIER_TRANSFORMS = {'windows': <function Generator.<lambda>>, 'qualify': <function Generator.<lambda>>}
Inherited Members
sqlglot.generator.Generator
Generator
IGNORE_NULLS_IN_FUNC
LOCKING_READS_SUPPORTED
WRAP_DERIVED_VALUES
CREATE_FUNCTION_RETURN_AS
MATCHED_BY_SOURCE
SINGLE_STRING_INTERVAL
INTERVAL_ALLOWS_PLURAL_FORM
LIMIT_ONLY_LITERALS
RENAME_TABLE_WITH_DB
GROUPINGS_SEP
INDEX_ON
JOIN_HINTS
TABLE_HINTS
QUERY_HINT_SEP
IS_BOOL_ALLOWED
DUPLICATE_KEY_UPDATE_WITH_SET
EXTRACT_ALLOWS_QUOTES
TZ_TO_WITH_TIME_ZONE
VALUES_AS_TABLE
UNNEST_WITH_ORDINALITY
AGGREGATE_FILTER_SUPPORTED
SEMI_ANTI_JOIN_WITH_SIDE
SUPPORTS_TABLE_COPY
TABLESAMPLE_REQUIRES_PARENS
TABLESAMPLE_SIZE_IS_ROWS
TABLESAMPLE_KEYWORDS
TABLESAMPLE_WITH_METHOD
COLLATE_IS_FUNC
DATA_TYPE_SPECIFIERS_ALLOWED
LAST_DAY_SUPPORTS_DATE_PART
SUPPORTS_TABLE_ALIAS_COLUMNS
UNPIVOT_ALIASES_ARE_IDENTIFIERS
JSON_KEY_VALUE_PAIR_SEP
INSERT_OVERWRITE
SUPPORTS_UNLOGGED_TABLES
SUPPORTS_CREATE_TABLE_LIKE
LIKE_PROPERTY_INSIDE_SCHEMA
MULTI_ARG_DISTINCT
JSON_TYPE_REQUIRED_FOR_EXTRACTION
JSON_PATH_SINGLE_QUOTE_ESCAPE
CAN_IMPLEMENT_ARRAY_ANY
SUPPORTS_WINDOW_EXCLUDE
COPY_PARAMS_ARE_WRAPPED
COPY_HAS_INTO_KEYWORD
STAR_EXCEPT
HEX_FUNC
WITH_PROPERTIES_PREFIX
QUOTE_JSON_PATH
PAD_FILL_PATTERN_IS_REQUIRED
SUPPORTS_EXPLODING_PROJECTIONS
ARRAY_CONCAT_IS_VAR_LEN
SUPPORTS_CONVERT_TIMEZONE
SUPPORTS_MEDIAN
SUPPORTS_UNIX_SECONDS
NORMALIZE_EXTRACT_DATE_PARTS
ARRAY_SIZE_NAME
ARRAY_SIZE_DIM_REQUIRED
SUPPORTS_BETWEEN_FLAGS
TIME_PART_SINGULARS
TOKEN_MAPPING
STRUCT_DELIMITER
PARAMETER_TOKEN
NAMED_PLACEHOLDER_TOKEN
EXPRESSION_PRECEDES_PROPERTIES_CREATABLES
RESERVED_KEYWORDS
WITH_SEPARATED_COMMENTS
EXCLUDE_COMMENTS
UNWRAPPED_INTERVAL_VALUES
PARAMETERIZABLE_TEXT_TYPES
RESPECT_IGNORE_NULLS_UNSUPPORTED_EXPRESSIONS
SENTINEL_LINE_BREAK
pretty
identify
normalize
pad
unsupported_level
max_unsupported
leading_comma
max_text_width
comments
dialect
normalize_functions
unsupported_messages
generate
preprocess
unsupported
sep
seg
sanitize_comment
maybe_comment
wrap
no_identify
normalize_func
indent
sql
uncache_sql
cache_sql
characterset_sql
column_parts
column_sql
columnposition_sql
columnconstraint_sql
computedcolumnconstraint_sql
autoincrementcolumnconstraint_sql
compresscolumnconstraint_sql
generatedasidentitycolumnconstraint_sql
generatedasrowcolumnconstraint_sql
periodforsystemtimeconstraint_sql
notnullcolumnconstraint_sql
primarykeycolumnconstraint_sql
uniquecolumnconstraint_sql
sequenceproperties_sql
clone_sql
describe_sql
heredoc_sql
prepend_ctes
with_sql
cte_sql
tablealias_sql
bitstring_sql
hexstring_sql
bytestring_sql
unicodestring_sql
rawstring_sql
datatypeparam_sql
directory_sql
delete_sql
set_operation
set_operations
fetch_sql
limitoptions_sql
filter_sql
hint_sql
indexparameters_sql
index_sql
hex_sql
lowerhex_sql
inputoutputformat_sql
national_sql
properties_sql
root_properties
properties
with_properties
locate_properties
property_name
property_sql
likeproperty_sql
fallbackproperty_sql
journalproperty_sql
freespaceproperty_sql
checksumproperty_sql
mergeblockratioproperty_sql
datablocksizeproperty_sql
blockcompressionproperty_sql
isolatedloadingproperty_sql
partitionboundspec_sql
partitionedofproperty_sql
lockingproperty_sql
withdataproperty_sql
withsystemversioningproperty_sql
insert_sql
introducer_sql
kill_sql
pseudotype_sql
objectidentifier_sql
onconflict_sql
rowformatdelimitedproperty_sql
withtablehint_sql
indextablehint_sql
historicaldata_sql
table_parts
table_sql
tablefromrows_sql
tablesample_sql
pivot_sql
tuple_sql
update_sql
values_sql
var_sql
from_sql
groupingsets_sql
rollup_sql
cube_sql
group_sql
having_sql
connect_sql
prior_sql
join_sql
lambda_sql
lateral_sql
limit_sql
set_sql
pragma_sql
lock_sql
literal_sql
escape_str
loaddata_sql
null_sql
order_sql
withfill_sql
cluster_sql
distribute_sql
sort_sql
ordered_sql
matchrecognizemeasure_sql
matchrecognize_sql
query_modifiers
for_modifiers
offset_limit_modifiers
after_limit_modifiers
schema_sql
schema_columns_sql
star_sql
parameter_sql
sessionparameter_sql
placeholder_sql
subquery_sql
qualify_sql
unnest_sql
prewhere_sql
where_sql
window_sql
partition_by_sql
windowspec_sql
withingroup_sql
between_sql
bracket_offset_expressions
bracket_sql
all_sql
any_sql
exists_sql
case_sql
nextvaluefor_sql
extract_sql
trim_sql
convert_concat_args
concat_sql
concatws_sql
check_sql
foreignkey_sql
primarykey_sql
if_sql
matchagainst_sql
jsonkeyvalue_sql
jsonpath_sql
json_path_part
formatjson_sql
formatphrase_sql
jsonobject_sql
jsonobjectagg_sql
jsonarray_sql
jsonarrayagg_sql
jsoncolumndef_sql
jsonschema_sql
jsontable_sql
openjsoncolumndef_sql
openjson_sql
in_sql
in_unnest_op
interval_sql
return_sql
reference_sql
anonymous_sql
paren_sql
neg_sql
not_sql
alias_sql
pivotalias_sql
aliases_sql
atindex_sql
fromtimezone_sql
add_sql
and_sql
or_sql
xor_sql
connector_sql
bitwiseand_sql
bitwiseleftshift_sql
bitwisenot_sql
bitwiseor_sql
bitwiserightshift_sql
bitwisexor_sql
currentdate_sql
collate_sql
command_sql
comment_sql
mergetreettlaction_sql
mergetreettl_sql
altercolumn_sql
alterindex_sql
alterdiststyle_sql
altersortkey_sql
alterrename_sql
renamecolumn_sql
alterset_sql
add_column_sql
droppartition_sql
addconstraint_sql
addpartition_sql
distinct_sql
ignorenulls_sql
respectnulls_sql
havingmax_sql
intdiv_sql
div_sql
safedivide_sql
overlaps_sql
distance_sql
dot_sql
eq_sql
propertyeq_sql
escape_sql
glob_sql
gt_sql
gte_sql
ilike_sql
ilikeany_sql
like_sql
likeany_sql
similarto_sql
lt_sql
lte_sql
mod_sql
mul_sql
neq_sql
nullsafeeq_sql
nullsafeneq_sql
slice_sql
sub_sql
trycast_sql
jsoncast_sql
try_sql
log_sql
use_sql
binary
ceil_floor
function_fallback_sql
func
format_args
too_wide
format_time
expressions
op_expressions
naked_property
tag_sql
token_sql
userdefinedfunction_sql
joinhint_sql
kwarg_sql
when_sql
whens_sql
merge_sql
tochar_sql
tonumber_sql
dictproperty_sql
dictrange_sql
dictsubproperty_sql
duplicatekeyproperty_sql
uniquekeyproperty_sql
distributedbyproperty_sql
oncluster_sql
clusteredbyproperty_sql
anyvalue_sql
querytransform_sql
indexconstraintoption_sql
checkcolumnconstraint_sql
indexcolumnconstraint_sql
nvl2_sql
comprehension_sql
columnprefix_sql
opclass_sql
predict_sql
forin_sql
refresh_sql
toarray_sql
tsordstotime_sql
tsordstotimestamp_sql
tsordstodatetime_sql
tsordstodate_sql
unixdate_sql
lastday_sql
dateadd_sql
arrayany_sql
struct_sql
partitionrange_sql
truncatetable_sql
copyparameter_sql
credentials_sql
copy_sql
semicolon_sql
datadeletionproperty_sql
maskingpolicycolumnconstraint_sql
gapfill_sql
scoperesolution_sql
parsejson_sql
rand_sql
changes_sql
pad_sql
summarize_sql
explodinggenerateseries_sql
arrayconcat_sql
converttimezone_sql
json_sql
jsonvalue_sql
conditionalinsert_sql
multitableinserts_sql
oncondition_sql
jsonextractquote_sql
jsonexists_sql
arrayagg_sql
apply_sql
grant_sql
grantprivilege_sql
grantprincipal_sql
columns_sql
overlay_sql
todouble_sql
string_sql
median_sql
overflowtruncatebehavior_sql
unixseconds_sql
arraysize_sql
attach_sql
detach_sql
attachoption_sql
featuresattime_sql
watermarkcolumnconstraint_sql
encodeproperty_sql
includeproperty_sql
xmlelement_sql
xmlkeyvalueoption_sql
partitionbyrangeproperty_sql
partitionbyrangepropertydynamic_sql
unpivotcolumns_sql
analyzesample_sql
analyzestatistics_sql
analyzehistogram_sql
analyzedelete_sql
analyzelistchainedrows_sql
analyzevalidate_sql
analyze_sql
xmltable_sql
xmlnamespace_sql
export_sql
declare_sql
declareitem_sql
recursivewithsearch_sql
parameterizedagg_sql
anonymousaggfunc_sql
combinedaggfunc_sql
combinedparameterizedagg_sql
show_sql
get_put_sql
translatecharacters_sql
decodecase_sql
semanticview_sql
sqlglot.dialects.tsql.TSQL.Generator
LIMIT_IS_TOP
QUERY_HINTS
RETURNING_END
NVL2_SUPPORTED
ALTER_TABLE_INCLUDE_COLUMN_KEYWORD
LIMIT_FETCH
COMPUTED_COLUMN_WITH_TYPE
CTE_RECURSIVE_KEYWORD_REQUIRED
ENSURE_BOOLS
NULL_ORDERING_SUPPORTED
SUPPORTS_SINGLE_ARG_CONCAT
TABLESAMPLE_SEED_KEYWORD
SUPPORTS_SELECT_INTO
JSON_PATH_BRACKETED_KEY_SUPPORTED
SUPPORTS_TO_NUMBER
SET_OP_MODIFIERS
COPY_PARAMS_EQ_REQUIRED
PARSE_JSON_NAME
EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE
ALTER_SET_WRAPPED
ALTER_SET_TYPE
EXPRESSIONS_WITHOUT_NESTED_CTES
SUPPORTED_JSON_PATH_PARTS
TRANSFORMS
PROPERTIES_LOCATION
scope_resolution
select_sql
convert_sql
queryoption_sql
lateral_op
splitpart_sql
timefromparts_sql
timestampfromparts_sql
setitem_sql
boolean_sql
is_sql
createable_sql
create_sql
into_sql
count_sql
offset_sql
version_sql
returnsproperty_sql
returning_sql
transaction_sql
commit_sql
rollback_sql
identifier_sql
constraint_sql
length_sql
right_sql
left_sql
partition_sql
alter_sql
drop_sql
options_modifier
dpipe_sql
isascii_sql
columndef_sql
coalesce_sql