Edit on GitHub

sqlglot.dialects.clickhouse

  1from __future__ import annotations
  2
  3import typing as t
  4
  5from sqlglot import exp, generator, parser, tokens
  6from sqlglot.dialects.dialect import Dialect, inline_array_sql, var_map_sql
  7from sqlglot.errors import ParseError
  8from sqlglot.helper import ensure_list, seq_get
  9from sqlglot.parser import parse_var_map
 10from sqlglot.tokens import TokenType
 11
 12
 13def _lower_func(sql: str) -> str:
 14    index = sql.index("(")
 15    return sql[:index].lower() + sql[index:]
 16
 17
 18class ClickHouse(Dialect):
 19    normalize_functions = None
 20    null_ordering = "nulls_are_last"
 21
 22    class Tokenizer(tokens.Tokenizer):
 23        COMMENTS = ["--", "#", "#!", ("/*", "*/")]
 24        IDENTIFIERS = ['"', "`"]
 25
 26        KEYWORDS = {
 27            **tokens.Tokenizer.KEYWORDS,
 28            "ASOF": TokenType.ASOF,
 29            "GLOBAL": TokenType.GLOBAL,
 30            "DATETIME64": TokenType.DATETIME,
 31            "FINAL": TokenType.FINAL,
 32            "FLOAT32": TokenType.FLOAT,
 33            "FLOAT64": TokenType.DOUBLE,
 34            "INT16": TokenType.SMALLINT,
 35            "INT32": TokenType.INT,
 36            "INT64": TokenType.BIGINT,
 37            "INT8": TokenType.TINYINT,
 38            "TUPLE": TokenType.STRUCT,
 39        }
 40
 41    class Parser(parser.Parser):
 42        FUNCTIONS = {
 43            **parser.Parser.FUNCTIONS,  # type: ignore
 44            "EXPONENTIALTIMEDECAYEDAVG": lambda params, args: exp.ExponentialTimeDecayedAvg(
 45                this=seq_get(args, 0),
 46                time=seq_get(args, 1),
 47                decay=seq_get(params, 0),
 48            ),
 49            "MAP": parse_var_map,
 50            "HISTOGRAM": lambda params, args: exp.Histogram(
 51                this=seq_get(args, 0), bins=seq_get(params, 0)
 52            ),
 53            "GROUPUNIQARRAY": lambda params, args: exp.GroupUniqArray(
 54                this=seq_get(args, 0), size=seq_get(params, 0)
 55            ),
 56            "QUANTILE": lambda params, args: exp.Quantile(this=args, quantile=params),
 57            "QUANTILES": lambda params, args: exp.Quantiles(parameters=params, expressions=args),
 58            "QUANTILEIF": lambda params, args: exp.QuantileIf(parameters=params, expressions=args),
 59        }
 60
 61        RANGE_PARSERS = {
 62            **parser.Parser.RANGE_PARSERS,
 63            TokenType.GLOBAL: lambda self, this: self._match(TokenType.IN)
 64            and self._parse_in(this, is_global=True),
 65        }
 66
 67        JOIN_KINDS = {*parser.Parser.JOIN_KINDS, TokenType.ANY, TokenType.ASOF}  # type: ignore
 68
 69        TABLE_ALIAS_TOKENS = {*parser.Parser.TABLE_ALIAS_TOKENS} - {TokenType.ANY}  # type: ignore
 70
 71        LOG_DEFAULTS_TO_LN = True
 72
 73        def _parse_in(
 74            self, this: t.Optional[exp.Expression], is_global: bool = False
 75        ) -> exp.Expression:
 76            this = super()._parse_in(this)
 77            this.set("is_global", is_global)
 78            return this
 79
 80        def _parse_table(
 81            self, schema: bool = False, alias_tokens: t.Optional[t.Collection[TokenType]] = None
 82        ) -> t.Optional[exp.Expression]:
 83            this = super()._parse_table(schema=schema, alias_tokens=alias_tokens)
 84
 85            if self._match(TokenType.FINAL):
 86                this = self.expression(exp.Final, this=this)
 87
 88            return this
 89
 90        def _parse_position(self, haystack_first: bool = False) -> exp.Expression:
 91            return super()._parse_position(haystack_first=True)
 92
 93        # https://clickhouse.com/docs/en/sql-reference/statements/select/with/
 94        def _parse_cte(self) -> exp.Expression:
 95            index = self._index
 96            try:
 97                # WITH <identifier> AS <subquery expression>
 98                return super()._parse_cte()
 99            except ParseError:
100                # WITH <expression> AS <identifier>
101                self._retreat(index)
102                statement = self._parse_statement()
103
104                if statement and isinstance(statement.this, exp.Alias):
105                    self.raise_error("Expected CTE to have alias")
106
107                return self.expression(exp.CTE, this=statement, alias=statement and statement.this)
108
109    class Generator(generator.Generator):
110        STRUCT_DELIMITER = ("(", ")")
111
112        TYPE_MAPPING = {
113            **generator.Generator.TYPE_MAPPING,  # type: ignore
114            exp.DataType.Type.NULLABLE: "Nullable",
115            exp.DataType.Type.DATETIME: "DateTime64",
116            exp.DataType.Type.MAP: "Map",
117            exp.DataType.Type.ARRAY: "Array",
118            exp.DataType.Type.STRUCT: "Tuple",
119            exp.DataType.Type.TINYINT: "Int8",
120            exp.DataType.Type.SMALLINT: "Int16",
121            exp.DataType.Type.INT: "Int32",
122            exp.DataType.Type.BIGINT: "Int64",
123            exp.DataType.Type.FLOAT: "Float32",
124            exp.DataType.Type.DOUBLE: "Float64",
125        }
126
127        TRANSFORMS = {
128            **generator.Generator.TRANSFORMS,  # type: ignore
129            exp.Array: inline_array_sql,
130            exp.ExponentialTimeDecayedAvg: lambda self, e: f"exponentialTimeDecayedAvg{self._param_args_sql(e, 'decay', ['this', 'time'])}",
131            exp.Final: lambda self, e: f"{self.sql(e, 'this')} FINAL",
132            exp.GroupUniqArray: lambda self, e: f"groupUniqArray{self._param_args_sql(e, 'size', 'this')}",
133            exp.Histogram: lambda self, e: f"histogram{self._param_args_sql(e, 'bins', 'this')}",
134            exp.Map: lambda self, e: _lower_func(var_map_sql(self, e)),
135            exp.Quantile: lambda self, e: f"quantile{self._param_args_sql(e, 'quantile', 'this')}",
136            exp.Quantiles: lambda self, e: f"quantiles{self._param_args_sql(e, 'parameters', 'expressions')}",
137            exp.QuantileIf: lambda self, e: f"quantileIf{self._param_args_sql(e, 'parameters', 'expressions')}",
138            exp.StrPosition: lambda self, e: f"position({self.format_args(e.this, e.args.get('substr'), e.args.get('position'))})",
139            exp.VarMap: lambda self, e: _lower_func(var_map_sql(self, e)),
140        }
141
142        EXPLICIT_UNION = True
143
144        def _param_args_sql(
145            self,
146            expression: exp.Expression,
147            param_names: str | t.List[str],
148            arg_names: str | t.List[str],
149        ) -> str:
150            params = self.format_args(
151                *(
152                    arg
153                    for name in ensure_list(param_names)
154                    for arg in ensure_list(expression.args.get(name))
155                )
156            )
157            args = self.format_args(
158                *(
159                    arg
160                    for name in ensure_list(arg_names)
161                    for arg in ensure_list(expression.args.get(name))
162                )
163            )
164            return f"({params})({args})"
165
166        def cte_sql(self, expression: exp.CTE) -> str:
167            if isinstance(expression.this, exp.Alias):
168                return self.sql(expression, "this")
169
170            return super().cte_sql(expression)
class ClickHouse(sqlglot.dialects.dialect.Dialect):
 19class ClickHouse(Dialect):
 20    normalize_functions = None
 21    null_ordering = "nulls_are_last"
 22
 23    class Tokenizer(tokens.Tokenizer):
 24        COMMENTS = ["--", "#", "#!", ("/*", "*/")]
 25        IDENTIFIERS = ['"', "`"]
 26
 27        KEYWORDS = {
 28            **tokens.Tokenizer.KEYWORDS,
 29            "ASOF": TokenType.ASOF,
 30            "GLOBAL": TokenType.GLOBAL,
 31            "DATETIME64": TokenType.DATETIME,
 32            "FINAL": TokenType.FINAL,
 33            "FLOAT32": TokenType.FLOAT,
 34            "FLOAT64": TokenType.DOUBLE,
 35            "INT16": TokenType.SMALLINT,
 36            "INT32": TokenType.INT,
 37            "INT64": TokenType.BIGINT,
 38            "INT8": TokenType.TINYINT,
 39            "TUPLE": TokenType.STRUCT,
 40        }
 41
 42    class Parser(parser.Parser):
 43        FUNCTIONS = {
 44            **parser.Parser.FUNCTIONS,  # type: ignore
 45            "EXPONENTIALTIMEDECAYEDAVG": lambda params, args: exp.ExponentialTimeDecayedAvg(
 46                this=seq_get(args, 0),
 47                time=seq_get(args, 1),
 48                decay=seq_get(params, 0),
 49            ),
 50            "MAP": parse_var_map,
 51            "HISTOGRAM": lambda params, args: exp.Histogram(
 52                this=seq_get(args, 0), bins=seq_get(params, 0)
 53            ),
 54            "GROUPUNIQARRAY": lambda params, args: exp.GroupUniqArray(
 55                this=seq_get(args, 0), size=seq_get(params, 0)
 56            ),
 57            "QUANTILE": lambda params, args: exp.Quantile(this=args, quantile=params),
 58            "QUANTILES": lambda params, args: exp.Quantiles(parameters=params, expressions=args),
 59            "QUANTILEIF": lambda params, args: exp.QuantileIf(parameters=params, expressions=args),
 60        }
 61
 62        RANGE_PARSERS = {
 63            **parser.Parser.RANGE_PARSERS,
 64            TokenType.GLOBAL: lambda self, this: self._match(TokenType.IN)
 65            and self._parse_in(this, is_global=True),
 66        }
 67
 68        JOIN_KINDS = {*parser.Parser.JOIN_KINDS, TokenType.ANY, TokenType.ASOF}  # type: ignore
 69
 70        TABLE_ALIAS_TOKENS = {*parser.Parser.TABLE_ALIAS_TOKENS} - {TokenType.ANY}  # type: ignore
 71
 72        LOG_DEFAULTS_TO_LN = True
 73
 74        def _parse_in(
 75            self, this: t.Optional[exp.Expression], is_global: bool = False
 76        ) -> exp.Expression:
 77            this = super()._parse_in(this)
 78            this.set("is_global", is_global)
 79            return this
 80
 81        def _parse_table(
 82            self, schema: bool = False, alias_tokens: t.Optional[t.Collection[TokenType]] = None
 83        ) -> t.Optional[exp.Expression]:
 84            this = super()._parse_table(schema=schema, alias_tokens=alias_tokens)
 85
 86            if self._match(TokenType.FINAL):
 87                this = self.expression(exp.Final, this=this)
 88
 89            return this
 90
 91        def _parse_position(self, haystack_first: bool = False) -> exp.Expression:
 92            return super()._parse_position(haystack_first=True)
 93
 94        # https://clickhouse.com/docs/en/sql-reference/statements/select/with/
 95        def _parse_cte(self) -> exp.Expression:
 96            index = self._index
 97            try:
 98                # WITH <identifier> AS <subquery expression>
 99                return super()._parse_cte()
100            except ParseError:
101                # WITH <expression> AS <identifier>
102                self._retreat(index)
103                statement = self._parse_statement()
104
105                if statement and isinstance(statement.this, exp.Alias):
106                    self.raise_error("Expected CTE to have alias")
107
108                return self.expression(exp.CTE, this=statement, alias=statement and statement.this)
109
110    class Generator(generator.Generator):
111        STRUCT_DELIMITER = ("(", ")")
112
113        TYPE_MAPPING = {
114            **generator.Generator.TYPE_MAPPING,  # type: ignore
115            exp.DataType.Type.NULLABLE: "Nullable",
116            exp.DataType.Type.DATETIME: "DateTime64",
117            exp.DataType.Type.MAP: "Map",
118            exp.DataType.Type.ARRAY: "Array",
119            exp.DataType.Type.STRUCT: "Tuple",
120            exp.DataType.Type.TINYINT: "Int8",
121            exp.DataType.Type.SMALLINT: "Int16",
122            exp.DataType.Type.INT: "Int32",
123            exp.DataType.Type.BIGINT: "Int64",
124            exp.DataType.Type.FLOAT: "Float32",
125            exp.DataType.Type.DOUBLE: "Float64",
126        }
127
128        TRANSFORMS = {
129            **generator.Generator.TRANSFORMS,  # type: ignore
130            exp.Array: inline_array_sql,
131            exp.ExponentialTimeDecayedAvg: lambda self, e: f"exponentialTimeDecayedAvg{self._param_args_sql(e, 'decay', ['this', 'time'])}",
132            exp.Final: lambda self, e: f"{self.sql(e, 'this')} FINAL",
133            exp.GroupUniqArray: lambda self, e: f"groupUniqArray{self._param_args_sql(e, 'size', 'this')}",
134            exp.Histogram: lambda self, e: f"histogram{self._param_args_sql(e, 'bins', 'this')}",
135            exp.Map: lambda self, e: _lower_func(var_map_sql(self, e)),
136            exp.Quantile: lambda self, e: f"quantile{self._param_args_sql(e, 'quantile', 'this')}",
137            exp.Quantiles: lambda self, e: f"quantiles{self._param_args_sql(e, 'parameters', 'expressions')}",
138            exp.QuantileIf: lambda self, e: f"quantileIf{self._param_args_sql(e, 'parameters', 'expressions')}",
139            exp.StrPosition: lambda self, e: f"position({self.format_args(e.this, e.args.get('substr'), e.args.get('position'))})",
140            exp.VarMap: lambda self, e: _lower_func(var_map_sql(self, e)),
141        }
142
143        EXPLICIT_UNION = True
144
145        def _param_args_sql(
146            self,
147            expression: exp.Expression,
148            param_names: str | t.List[str],
149            arg_names: str | t.List[str],
150        ) -> str:
151            params = self.format_args(
152                *(
153                    arg
154                    for name in ensure_list(param_names)
155                    for arg in ensure_list(expression.args.get(name))
156                )
157            )
158            args = self.format_args(
159                *(
160                    arg
161                    for name in ensure_list(arg_names)
162                    for arg in ensure_list(expression.args.get(name))
163                )
164            )
165            return f"({params})({args})"
166
167        def cte_sql(self, expression: exp.CTE) -> str:
168            if isinstance(expression.this, exp.Alias):
169                return self.sql(expression, "this")
170
171            return super().cte_sql(expression)
class ClickHouse.Tokenizer(sqlglot.tokens.Tokenizer):
23    class Tokenizer(tokens.Tokenizer):
24        COMMENTS = ["--", "#", "#!", ("/*", "*/")]
25        IDENTIFIERS = ['"', "`"]
26
27        KEYWORDS = {
28            **tokens.Tokenizer.KEYWORDS,
29            "ASOF": TokenType.ASOF,
30            "GLOBAL": TokenType.GLOBAL,
31            "DATETIME64": TokenType.DATETIME,
32            "FINAL": TokenType.FINAL,
33            "FLOAT32": TokenType.FLOAT,
34            "FLOAT64": TokenType.DOUBLE,
35            "INT16": TokenType.SMALLINT,
36            "INT32": TokenType.INT,
37            "INT64": TokenType.BIGINT,
38            "INT8": TokenType.TINYINT,
39            "TUPLE": TokenType.STRUCT,
40        }
class ClickHouse.Parser(sqlglot.parser.Parser):
 42    class Parser(parser.Parser):
 43        FUNCTIONS = {
 44            **parser.Parser.FUNCTIONS,  # type: ignore
 45            "EXPONENTIALTIMEDECAYEDAVG": lambda params, args: exp.ExponentialTimeDecayedAvg(
 46                this=seq_get(args, 0),
 47                time=seq_get(args, 1),
 48                decay=seq_get(params, 0),
 49            ),
 50            "MAP": parse_var_map,
 51            "HISTOGRAM": lambda params, args: exp.Histogram(
 52                this=seq_get(args, 0), bins=seq_get(params, 0)
 53            ),
 54            "GROUPUNIQARRAY": lambda params, args: exp.GroupUniqArray(
 55                this=seq_get(args, 0), size=seq_get(params, 0)
 56            ),
 57            "QUANTILE": lambda params, args: exp.Quantile(this=args, quantile=params),
 58            "QUANTILES": lambda params, args: exp.Quantiles(parameters=params, expressions=args),
 59            "QUANTILEIF": lambda params, args: exp.QuantileIf(parameters=params, expressions=args),
 60        }
 61
 62        RANGE_PARSERS = {
 63            **parser.Parser.RANGE_PARSERS,
 64            TokenType.GLOBAL: lambda self, this: self._match(TokenType.IN)
 65            and self._parse_in(this, is_global=True),
 66        }
 67
 68        JOIN_KINDS = {*parser.Parser.JOIN_KINDS, TokenType.ANY, TokenType.ASOF}  # type: ignore
 69
 70        TABLE_ALIAS_TOKENS = {*parser.Parser.TABLE_ALIAS_TOKENS} - {TokenType.ANY}  # type: ignore
 71
 72        LOG_DEFAULTS_TO_LN = True
 73
 74        def _parse_in(
 75            self, this: t.Optional[exp.Expression], is_global: bool = False
 76        ) -> exp.Expression:
 77            this = super()._parse_in(this)
 78            this.set("is_global", is_global)
 79            return this
 80
 81        def _parse_table(
 82            self, schema: bool = False, alias_tokens: t.Optional[t.Collection[TokenType]] = None
 83        ) -> t.Optional[exp.Expression]:
 84            this = super()._parse_table(schema=schema, alias_tokens=alias_tokens)
 85
 86            if self._match(TokenType.FINAL):
 87                this = self.expression(exp.Final, this=this)
 88
 89            return this
 90
 91        def _parse_position(self, haystack_first: bool = False) -> exp.Expression:
 92            return super()._parse_position(haystack_first=True)
 93
 94        # https://clickhouse.com/docs/en/sql-reference/statements/select/with/
 95        def _parse_cte(self) -> exp.Expression:
 96            index = self._index
 97            try:
 98                # WITH <identifier> AS <subquery expression>
 99                return super()._parse_cte()
100            except ParseError:
101                # WITH <expression> AS <identifier>
102                self._retreat(index)
103                statement = self._parse_statement()
104
105                if statement and isinstance(statement.this, exp.Alias):
106                    self.raise_error("Expected CTE to have alias")
107
108                return self.expression(exp.CTE, this=statement, alias=statement and statement.this)

Parser consumes a list of tokens produced by the sqlglot.tokens.Tokenizer and produces a parsed syntax tree.

Arguments:
  • error_level: the desired error level. Default: ErrorLevel.RAISE
  • error_message_context: determines the amount of context to capture from a query string when displaying the error message (in number of characters). Default: 50.
  • index_offset: Index offset for arrays eg ARRAY[0] vs ARRAY[1] as the head of a list. Default: 0
  • alias_post_tablesample: If the table alias comes after tablesample. Default: False
  • max_errors: Maximum number of error messages to include in a raised ParseError. This is only relevant if error_level is ErrorLevel.RAISE. Default: 3
  • null_ordering: Indicates the default null ordering method to use if not explicitly set. Options are "nulls_are_small", "nulls_are_large", "nulls_are_last". Default: "nulls_are_small"
class ClickHouse.Generator(sqlglot.generator.Generator):
110    class Generator(generator.Generator):
111        STRUCT_DELIMITER = ("(", ")")
112
113        TYPE_MAPPING = {
114            **generator.Generator.TYPE_MAPPING,  # type: ignore
115            exp.DataType.Type.NULLABLE: "Nullable",
116            exp.DataType.Type.DATETIME: "DateTime64",
117            exp.DataType.Type.MAP: "Map",
118            exp.DataType.Type.ARRAY: "Array",
119            exp.DataType.Type.STRUCT: "Tuple",
120            exp.DataType.Type.TINYINT: "Int8",
121            exp.DataType.Type.SMALLINT: "Int16",
122            exp.DataType.Type.INT: "Int32",
123            exp.DataType.Type.BIGINT: "Int64",
124            exp.DataType.Type.FLOAT: "Float32",
125            exp.DataType.Type.DOUBLE: "Float64",
126        }
127
128        TRANSFORMS = {
129            **generator.Generator.TRANSFORMS,  # type: ignore
130            exp.Array: inline_array_sql,
131            exp.ExponentialTimeDecayedAvg: lambda self, e: f"exponentialTimeDecayedAvg{self._param_args_sql(e, 'decay', ['this', 'time'])}",
132            exp.Final: lambda self, e: f"{self.sql(e, 'this')} FINAL",
133            exp.GroupUniqArray: lambda self, e: f"groupUniqArray{self._param_args_sql(e, 'size', 'this')}",
134            exp.Histogram: lambda self, e: f"histogram{self._param_args_sql(e, 'bins', 'this')}",
135            exp.Map: lambda self, e: _lower_func(var_map_sql(self, e)),
136            exp.Quantile: lambda self, e: f"quantile{self._param_args_sql(e, 'quantile', 'this')}",
137            exp.Quantiles: lambda self, e: f"quantiles{self._param_args_sql(e, 'parameters', 'expressions')}",
138            exp.QuantileIf: lambda self, e: f"quantileIf{self._param_args_sql(e, 'parameters', 'expressions')}",
139            exp.StrPosition: lambda self, e: f"position({self.format_args(e.this, e.args.get('substr'), e.args.get('position'))})",
140            exp.VarMap: lambda self, e: _lower_func(var_map_sql(self, e)),
141        }
142
143        EXPLICIT_UNION = True
144
145        def _param_args_sql(
146            self,
147            expression: exp.Expression,
148            param_names: str | t.List[str],
149            arg_names: str | t.List[str],
150        ) -> str:
151            params = self.format_args(
152                *(
153                    arg
154                    for name in ensure_list(param_names)
155                    for arg in ensure_list(expression.args.get(name))
156                )
157            )
158            args = self.format_args(
159                *(
160                    arg
161                    for name in ensure_list(arg_names)
162                    for arg in ensure_list(expression.args.get(name))
163                )
164            )
165            return f"({params})({args})"
166
167        def cte_sql(self, expression: exp.CTE) -> str:
168            if isinstance(expression.this, exp.Alias):
169                return self.sql(expression, "this")
170
171            return super().cte_sql(expression)

Generator interprets the given syntax tree and produces a SQL string as an output.

Arguments:
  • time_mapping (dict): the dictionary of custom time mappings in which the key represents a python time format and the output the target time format
  • time_trie (trie): a trie of the time_mapping keys
  • pretty (bool): if set to True the returned string will be formatted. Default: False.
  • quote_start (str): specifies which starting character to use to delimit quotes. Default: '.
  • quote_end (str): specifies which ending character to use to delimit quotes. Default: '.
  • identifier_start (str): specifies which starting character to use to delimit identifiers. Default: ".
  • identifier_end (str): specifies which ending character to use to delimit identifiers. Default: ".
  • identify (bool | str): 'always': always quote, 'safe': quote identifiers if they don't contain an upcase, True defaults to always.
  • normalize (bool): if set to True all identifiers will lower cased
  • string_escape (str): specifies a string escape character. Default: '.
  • identifier_escape (str): specifies an identifier escape character. Default: ".
  • pad (int): determines padding in a formatted string. Default: 2.
  • indent (int): determines the size of indentation in a formatted string. Default: 4.
  • unnest_column_only (bool): if true unnest table aliases are considered only as column aliases
  • normalize_functions (str): normalize function names, "upper", "lower", or None Default: "upper"
  • alias_post_tablesample (bool): if the table alias comes after tablesample Default: False
  • unsupported_level (ErrorLevel): determines the generator's behavior when it encounters unsupported expressions. Default ErrorLevel.WARN.
  • null_ordering (str): Indicates the default null ordering method to use if not explicitly set. Options are "nulls_are_small", "nulls_are_large", "nulls_are_last". Default: "nulls_are_small"
  • max_unsupported (int): Maximum number of unsupported messages to include in a raised UnsupportedError. This is only relevant if unsupported_level is ErrorLevel.RAISE. Default: 3
  • leading_comma (bool): if the the comma is leading or trailing in select statements Default: False
  • max_text_width: The max number of characters in a segment before creating new lines in pretty mode. The default is on the smaller end because the length only represents a segment and not the true line length. Default: 80
  • comments: Whether or not to preserve comments in the output SQL code. Default: True
def cte_sql(self, expression: sqlglot.expressions.CTE) -> str:
167        def cte_sql(self, expression: exp.CTE) -> str:
168            if isinstance(expression.this, exp.Alias):
169                return self.sql(expression, "this")
170
171            return super().cte_sql(expression)
Inherited Members
sqlglot.generator.Generator
Generator
generate
unsupported
sep
seg
pad_comment
maybe_comment
wrap
no_identify
normalize_func
indent
sql
uncache_sql
cache_sql
characterset_sql
column_sql
columnposition_sql
columndef_sql
columnconstraint_sql
autoincrementcolumnconstraint_sql
compresscolumnconstraint_sql
generatedasidentitycolumnconstraint_sql
notnullcolumnconstraint_sql
primarykeycolumnconstraint_sql
uniquecolumnconstraint_sql
create_sql
describe_sql
prepend_ctes
with_sql
tablealias_sql
bitstring_sql
hexstring_sql
datatype_sql
directory_sql
delete_sql
drop_sql
except_sql
except_op
fetch_sql
filter_sql
hint_sql
index_sql
identifier_sql
national_sql
partition_sql
properties_sql
root_properties
properties
with_properties
locate_properties
property_sql
likeproperty_sql
fallbackproperty_sql
journalproperty_sql
freespaceproperty_sql
afterjournalproperty_sql
checksumproperty_sql
mergeblockratioproperty_sql
datablocksizeproperty_sql
blockcompressionproperty_sql
isolatedloadingproperty_sql
lockingproperty_sql
withdataproperty_sql
insert_sql
intersect_sql
intersect_op
introducer_sql
pseudotype_sql
returning_sql
rowformatdelimitedproperty_sql
table_sql
tablesample_sql
pivot_sql
tuple_sql
update_sql
values_sql
var_sql
into_sql
from_sql
group_sql
having_sql
join_sql
lambda_sql
lateral_sql
limit_sql
offset_sql
setitem_sql
set_sql
pragma_sql
lock_sql
literal_sql
loaddata_sql
null_sql
boolean_sql
order_sql
cluster_sql
distribute_sql
sort_sql
ordered_sql
matchrecognize_sql
query_modifiers
select_sql
schema_sql
star_sql
structkwarg_sql
parameter_sql
sessionparameter_sql
placeholder_sql
subquery_sql
qualify_sql
union_sql
union_op
unnest_sql
where_sql
window_sql
partition_by_sql
window_spec_sql
withingroup_sql
between_sql
bracket_sql
all_sql
any_sql
exists_sql
case_sql
constraint_sql
extract_sql
trim_sql
concat_sql
check_sql
foreignkey_sql
primarykey_sql
unique_sql
if_sql
matchagainst_sql
jsonkeyvalue_sql
jsonobject_sql
in_sql
in_unnest_op
interval_sql
return_sql
reference_sql
anonymous_sql
paren_sql
neg_sql
not_sql
alias_sql
aliases_sql
attimezone_sql
add_sql
and_sql
connector_sql
bitwiseand_sql
bitwiseleftshift_sql
bitwisenot_sql
bitwiseor_sql
bitwiserightshift_sql
bitwisexor_sql
cast_sql
currentdate_sql
collate_sql
command_sql
comment_sql
transaction_sql
commit_sql
rollback_sql
altercolumn_sql
renametable_sql
altertable_sql
droppartition_sql
addconstraint_sql
distinct_sql
ignorenulls_sql
respectnulls_sql
intdiv_sql
dpipe_sql
div_sql
overlaps_sql
distance_sql
dot_sql
eq_sql
escape_sql
glob_sql
gt_sql
gte_sql
ilike_sql
ilikeany_sql
is_sql
like_sql
likeany_sql
similarto_sql
lt_sql
lte_sql
mod_sql
mul_sql
neq_sql
nullsafeeq_sql
nullsafeneq_sql
or_sql
slice_sql
sub_sql
trycast_sql
use_sql
binary
function_fallback_sql
func
format_args
text_width
format_time
expressions
op_expressions
naked_property
set_operation
tag_sql
token_sql
userdefinedfunction_sql
joinhint_sql
kwarg_sql
when_sql
merge_sql
tochar_sql