Edit on GitHub

sqlglot.dialects.mysql

  1from __future__ import annotations
  2
  3from sqlglot import exp, generator, parser, tokens
  4from sqlglot.dialects.dialect import (
  5    Dialect,
  6    arrow_json_extract_scalar_sql,
  7    locate_to_strposition,
  8    max_or_greatest,
  9    min_or_least,
 10    no_ilike_sql,
 11    no_paren_current_date_sql,
 12    no_tablesample_sql,
 13    no_trycast_sql,
 14    rename_func,
 15    strposition_to_locate_sql,
 16)
 17from sqlglot.helper import seq_get
 18from sqlglot.tokens import TokenType
 19
 20
 21def _show_parser(*args, **kwargs):
 22    def _parse(self):
 23        return self._parse_show_mysql(*args, **kwargs)
 24
 25    return _parse
 26
 27
 28def _date_trunc_sql(self, expression):
 29    expr = self.sql(expression, "this")
 30    unit = expression.text("unit")
 31
 32    if unit == "day":
 33        return f"DATE({expr})"
 34
 35    if unit == "week":
 36        concat = f"CONCAT(YEAR({expr}), ' ', WEEK({expr}, 1), ' 1')"
 37        date_format = "%Y %u %w"
 38    elif unit == "month":
 39        concat = f"CONCAT(YEAR({expr}), ' ', MONTH({expr}), ' 1')"
 40        date_format = "%Y %c %e"
 41    elif unit == "quarter":
 42        concat = f"CONCAT(YEAR({expr}), ' ', QUARTER({expr}) * 3 - 2, ' 1')"
 43        date_format = "%Y %c %e"
 44    elif unit == "year":
 45        concat = f"CONCAT(YEAR({expr}), ' 1 1')"
 46        date_format = "%Y %c %e"
 47    else:
 48        self.unsupported(f"Unexpected interval unit: {unit}")
 49        return f"DATE({expr})"
 50
 51    return f"STR_TO_DATE({concat}, '{date_format}')"
 52
 53
 54def _str_to_date(args):
 55    date_format = MySQL.format_time(seq_get(args, 1))
 56    return exp.StrToDate(this=seq_get(args, 0), format=date_format)
 57
 58
 59def _str_to_date_sql(self, expression):
 60    date_format = self.format_time(expression)
 61    return f"STR_TO_DATE({self.sql(expression.this)}, {date_format})"
 62
 63
 64def _trim_sql(self, expression):
 65    target = self.sql(expression, "this")
 66    trim_type = self.sql(expression, "position")
 67    remove_chars = self.sql(expression, "expression")
 68
 69    # Use TRIM/LTRIM/RTRIM syntax if the expression isn't mysql-specific
 70    if not remove_chars:
 71        return self.trim_sql(expression)
 72
 73    trim_type = f"{trim_type} " if trim_type else ""
 74    remove_chars = f"{remove_chars} " if remove_chars else ""
 75    from_part = "FROM " if trim_type or remove_chars else ""
 76    return f"TRIM({trim_type}{remove_chars}{from_part}{target})"
 77
 78
 79def _date_add(expression_class):
 80    def func(args):
 81        interval = seq_get(args, 1)
 82        return expression_class(
 83            this=seq_get(args, 0),
 84            expression=interval.this,
 85            unit=exp.Literal.string(interval.text("unit").lower()),
 86        )
 87
 88    return func
 89
 90
 91def _date_add_sql(kind):
 92    def func(self, expression):
 93        this = self.sql(expression, "this")
 94        unit = expression.text("unit").upper() or "DAY"
 95        return (
 96            f"DATE_{kind}({this}, {self.sql(exp.Interval(this=expression.expression, unit=unit))})"
 97        )
 98
 99    return func
100
101
102class MySQL(Dialect):
103    time_format = "'%Y-%m-%d %T'"
104
105    # https://prestodb.io/docs/current/functions/datetime.html#mysql-date-functions
106    time_mapping = {
107        "%M": "%B",
108        "%c": "%-m",
109        "%e": "%-d",
110        "%h": "%I",
111        "%i": "%M",
112        "%s": "%S",
113        "%S": "%S",
114        "%u": "%W",
115        "%k": "%-H",
116        "%l": "%-I",
117        "%T": "%H:%M:%S",
118    }
119
120    class Tokenizer(tokens.Tokenizer):
121        QUOTES = ["'", '"']
122        COMMENTS = ["--", "#", ("/*", "*/")]
123        IDENTIFIERS = ["`"]
124        STRING_ESCAPES = ["'", "\\"]
125        BIT_STRINGS = [("b'", "'"), ("B'", "'"), ("0b", "")]
126        HEX_STRINGS = [("x'", "'"), ("X'", "'"), ("0x", "")]
127
128        KEYWORDS = {
129            **tokens.Tokenizer.KEYWORDS,
130            "MEDIUMTEXT": TokenType.MEDIUMTEXT,
131            "LONGTEXT": TokenType.LONGTEXT,
132            "MEDIUMBLOB": TokenType.MEDIUMBLOB,
133            "LONGBLOB": TokenType.LONGBLOB,
134            "START": TokenType.BEGIN,
135            "SEPARATOR": TokenType.SEPARATOR,
136            "_ARMSCII8": TokenType.INTRODUCER,
137            "_ASCII": TokenType.INTRODUCER,
138            "_BIG5": TokenType.INTRODUCER,
139            "_BINARY": TokenType.INTRODUCER,
140            "_CP1250": TokenType.INTRODUCER,
141            "_CP1251": TokenType.INTRODUCER,
142            "_CP1256": TokenType.INTRODUCER,
143            "_CP1257": TokenType.INTRODUCER,
144            "_CP850": TokenType.INTRODUCER,
145            "_CP852": TokenType.INTRODUCER,
146            "_CP866": TokenType.INTRODUCER,
147            "_CP932": TokenType.INTRODUCER,
148            "_DEC8": TokenType.INTRODUCER,
149            "_EUCJPMS": TokenType.INTRODUCER,
150            "_EUCKR": TokenType.INTRODUCER,
151            "_GB18030": TokenType.INTRODUCER,
152            "_GB2312": TokenType.INTRODUCER,
153            "_GBK": TokenType.INTRODUCER,
154            "_GEOSTD8": TokenType.INTRODUCER,
155            "_GREEK": TokenType.INTRODUCER,
156            "_HEBREW": TokenType.INTRODUCER,
157            "_HP8": TokenType.INTRODUCER,
158            "_KEYBCS2": TokenType.INTRODUCER,
159            "_KOI8R": TokenType.INTRODUCER,
160            "_KOI8U": TokenType.INTRODUCER,
161            "_LATIN1": TokenType.INTRODUCER,
162            "_LATIN2": TokenType.INTRODUCER,
163            "_LATIN5": TokenType.INTRODUCER,
164            "_LATIN7": TokenType.INTRODUCER,
165            "_MACCE": TokenType.INTRODUCER,
166            "_MACROMAN": TokenType.INTRODUCER,
167            "_SJIS": TokenType.INTRODUCER,
168            "_SWE7": TokenType.INTRODUCER,
169            "_TIS620": TokenType.INTRODUCER,
170            "_UCS2": TokenType.INTRODUCER,
171            "_UJIS": TokenType.INTRODUCER,
172            # https://dev.mysql.com/doc/refman/8.0/en/string-literals.html
173            "_UTF8": TokenType.INTRODUCER,
174            "_UTF16": TokenType.INTRODUCER,
175            "_UTF16LE": TokenType.INTRODUCER,
176            "_UTF32": TokenType.INTRODUCER,
177            "_UTF8MB3": TokenType.INTRODUCER,
178            "_UTF8MB4": TokenType.INTRODUCER,
179            "@@": TokenType.SESSION_PARAMETER,
180        }
181
182        COMMANDS = tokens.Tokenizer.COMMANDS - {TokenType.SHOW}
183
184    class Parser(parser.Parser):
185        FUNC_TOKENS = {*parser.Parser.FUNC_TOKENS, TokenType.SCHEMA, TokenType.DATABASE}  # type: ignore
186
187        FUNCTIONS = {
188            **parser.Parser.FUNCTIONS,  # type: ignore
189            "DATE_ADD": _date_add(exp.DateAdd),
190            "DATE_SUB": _date_add(exp.DateSub),
191            "STR_TO_DATE": _str_to_date,
192            "LOCATE": locate_to_strposition,
193            "INSTR": lambda args: exp.StrPosition(substr=seq_get(args, 1), this=seq_get(args, 0)),
194            "LEFT": lambda args: exp.Substring(
195                this=seq_get(args, 0), start=exp.Literal.number(1), length=seq_get(args, 1)
196            ),
197        }
198
199        FUNCTION_PARSERS = {
200            **parser.Parser.FUNCTION_PARSERS,  # type: ignore
201            "GROUP_CONCAT": lambda self: self.expression(
202                exp.GroupConcat,
203                this=self._parse_lambda(),
204                separator=self._match(TokenType.SEPARATOR) and self._parse_field(),
205            ),
206        }
207
208        PROPERTY_PARSERS = {
209            **parser.Parser.PROPERTY_PARSERS,  # type: ignore
210            "ENGINE": lambda self: self._parse_property_assignment(exp.EngineProperty),
211        }
212
213        STATEMENT_PARSERS = {
214            **parser.Parser.STATEMENT_PARSERS,  # type: ignore
215            TokenType.SHOW: lambda self: self._parse_show(),
216        }
217
218        SHOW_PARSERS = {
219            "BINARY LOGS": _show_parser("BINARY LOGS"),
220            "MASTER LOGS": _show_parser("BINARY LOGS"),
221            "BINLOG EVENTS": _show_parser("BINLOG EVENTS"),
222            "CHARACTER SET": _show_parser("CHARACTER SET"),
223            "CHARSET": _show_parser("CHARACTER SET"),
224            "COLLATION": _show_parser("COLLATION"),
225            "FULL COLUMNS": _show_parser("COLUMNS", target="FROM", full=True),
226            "COLUMNS": _show_parser("COLUMNS", target="FROM"),
227            "CREATE DATABASE": _show_parser("CREATE DATABASE", target=True),
228            "CREATE EVENT": _show_parser("CREATE EVENT", target=True),
229            "CREATE FUNCTION": _show_parser("CREATE FUNCTION", target=True),
230            "CREATE PROCEDURE": _show_parser("CREATE PROCEDURE", target=True),
231            "CREATE TABLE": _show_parser("CREATE TABLE", target=True),
232            "CREATE TRIGGER": _show_parser("CREATE TRIGGER", target=True),
233            "CREATE VIEW": _show_parser("CREATE VIEW", target=True),
234            "DATABASES": _show_parser("DATABASES"),
235            "ENGINE": _show_parser("ENGINE", target=True),
236            "STORAGE ENGINES": _show_parser("ENGINES"),
237            "ENGINES": _show_parser("ENGINES"),
238            "ERRORS": _show_parser("ERRORS"),
239            "EVENTS": _show_parser("EVENTS"),
240            "FUNCTION CODE": _show_parser("FUNCTION CODE", target=True),
241            "FUNCTION STATUS": _show_parser("FUNCTION STATUS"),
242            "GRANTS": _show_parser("GRANTS", target="FOR"),
243            "INDEX": _show_parser("INDEX", target="FROM"),
244            "MASTER STATUS": _show_parser("MASTER STATUS"),
245            "OPEN TABLES": _show_parser("OPEN TABLES"),
246            "PLUGINS": _show_parser("PLUGINS"),
247            "PROCEDURE CODE": _show_parser("PROCEDURE CODE", target=True),
248            "PROCEDURE STATUS": _show_parser("PROCEDURE STATUS"),
249            "PRIVILEGES": _show_parser("PRIVILEGES"),
250            "FULL PROCESSLIST": _show_parser("PROCESSLIST", full=True),
251            "PROCESSLIST": _show_parser("PROCESSLIST"),
252            "PROFILE": _show_parser("PROFILE"),
253            "PROFILES": _show_parser("PROFILES"),
254            "RELAYLOG EVENTS": _show_parser("RELAYLOG EVENTS"),
255            "REPLICAS": _show_parser("REPLICAS"),
256            "SLAVE HOSTS": _show_parser("REPLICAS"),
257            "REPLICA STATUS": _show_parser("REPLICA STATUS"),
258            "SLAVE STATUS": _show_parser("REPLICA STATUS"),
259            "GLOBAL STATUS": _show_parser("STATUS", global_=True),
260            "SESSION STATUS": _show_parser("STATUS"),
261            "STATUS": _show_parser("STATUS"),
262            "TABLE STATUS": _show_parser("TABLE STATUS"),
263            "FULL TABLES": _show_parser("TABLES", full=True),
264            "TABLES": _show_parser("TABLES"),
265            "TRIGGERS": _show_parser("TRIGGERS"),
266            "GLOBAL VARIABLES": _show_parser("VARIABLES", global_=True),
267            "SESSION VARIABLES": _show_parser("VARIABLES"),
268            "VARIABLES": _show_parser("VARIABLES"),
269            "WARNINGS": _show_parser("WARNINGS"),
270        }
271
272        SET_PARSERS = {
273            **parser.Parser.SET_PARSERS,
274            "PERSIST": lambda self: self._parse_set_item_assignment("PERSIST"),
275            "PERSIST_ONLY": lambda self: self._parse_set_item_assignment("PERSIST_ONLY"),
276            "CHARACTER SET": lambda self: self._parse_set_item_charset("CHARACTER SET"),
277            "CHARSET": lambda self: self._parse_set_item_charset("CHARACTER SET"),
278            "NAMES": lambda self: self._parse_set_item_names(),
279        }
280
281        PROFILE_TYPES = {
282            "ALL",
283            "BLOCK IO",
284            "CONTEXT SWITCHES",
285            "CPU",
286            "IPC",
287            "MEMORY",
288            "PAGE FAULTS",
289            "SOURCE",
290            "SWAPS",
291        }
292
293        LOG_DEFAULTS_TO_LN = True
294
295        def _parse_show_mysql(self, this, target=False, full=None, global_=None):
296            if target:
297                if isinstance(target, str):
298                    self._match_text_seq(target)
299                target_id = self._parse_id_var()
300            else:
301                target_id = None
302
303            log = self._parse_string() if self._match_text_seq("IN") else None
304
305            if this in {"BINLOG EVENTS", "RELAYLOG EVENTS"}:
306                position = self._parse_number() if self._match_text_seq("FROM") else None
307                db = None
308            else:
309                position = None
310                db = None
311
312                if self._match(TokenType.FROM):
313                    db = self._parse_id_var()
314                elif self._match(TokenType.DOT):
315                    db = target_id
316                    target_id = self._parse_id_var()
317
318            channel = self._parse_id_var() if self._match_text_seq("FOR", "CHANNEL") else None
319
320            like = self._parse_string() if self._match_text_seq("LIKE") else None
321            where = self._parse_where()
322
323            if this == "PROFILE":
324                types = self._parse_csv(lambda: self._parse_var_from_options(self.PROFILE_TYPES))
325                query = self._parse_number() if self._match_text_seq("FOR", "QUERY") else None
326                offset = self._parse_number() if self._match_text_seq("OFFSET") else None
327                limit = self._parse_number() if self._match_text_seq("LIMIT") else None
328            else:
329                types, query = None, None
330                offset, limit = self._parse_oldstyle_limit()
331
332            mutex = True if self._match_text_seq("MUTEX") else None
333            mutex = False if self._match_text_seq("STATUS") else mutex
334
335            return self.expression(
336                exp.Show,
337                this=this,
338                target=target_id,
339                full=full,
340                log=log,
341                position=position,
342                db=db,
343                channel=channel,
344                like=like,
345                where=where,
346                types=types,
347                query=query,
348                offset=offset,
349                limit=limit,
350                mutex=mutex,
351                **{"global": global_},
352            )
353
354        def _parse_oldstyle_limit(self):
355            limit = None
356            offset = None
357            if self._match_text_seq("LIMIT"):
358                parts = self._parse_csv(self._parse_number)
359                if len(parts) == 1:
360                    limit = parts[0]
361                elif len(parts) == 2:
362                    limit = parts[1]
363                    offset = parts[0]
364            return offset, limit
365
366        def _parse_set_item_charset(self, kind):
367            this = self._parse_string() or self._parse_id_var()
368
369            return self.expression(
370                exp.SetItem,
371                this=this,
372                kind=kind,
373            )
374
375        def _parse_set_item_names(self):
376            charset = self._parse_string() or self._parse_id_var()
377            if self._match_text_seq("COLLATE"):
378                collate = self._parse_string() or self._parse_id_var()
379            else:
380                collate = None
381            return self.expression(
382                exp.SetItem,
383                this=charset,
384                collate=collate,
385                kind="NAMES",
386            )
387
388    class Generator(generator.Generator):
389        LOCKING_READS_SUPPORTED = True
390        NULL_ORDERING_SUPPORTED = False
391
392        TRANSFORMS = {
393            **generator.Generator.TRANSFORMS,  # type: ignore
394            exp.CurrentDate: no_paren_current_date_sql,
395            exp.CurrentTimestamp: lambda *_: "CURRENT_TIMESTAMP",
396            exp.ILike: no_ilike_sql,
397            exp.JSONExtractScalar: arrow_json_extract_scalar_sql,
398            exp.Max: max_or_greatest,
399            exp.Min: min_or_least,
400            exp.TableSample: no_tablesample_sql,
401            exp.TryCast: no_trycast_sql,
402            exp.DateAdd: _date_add_sql("ADD"),
403            exp.DateDiff: lambda self, e: f"DATEDIFF({self.format_args(e.this, e.expression)})",
404            exp.DateSub: _date_add_sql("SUB"),
405            exp.DateTrunc: _date_trunc_sql,
406            exp.DayOfWeek: rename_func("DAYOFWEEK"),
407            exp.DayOfMonth: rename_func("DAYOFMONTH"),
408            exp.DayOfYear: rename_func("DAYOFYEAR"),
409            exp.WeekOfYear: rename_func("WEEKOFYEAR"),
410            exp.GroupConcat: lambda self, e: f"""GROUP_CONCAT({self.sql(e, "this")} SEPARATOR {self.sql(e, "separator") or "','"})""",
411            exp.StrToDate: _str_to_date_sql,
412            exp.StrToTime: _str_to_date_sql,
413            exp.Trim: _trim_sql,
414            exp.NullSafeEQ: lambda self, e: self.binary(e, "<=>"),
415            exp.NullSafeNEQ: lambda self, e: self.not_sql(self.binary(e, "<=>")),
416            exp.StrPosition: strposition_to_locate_sql,
417        }
418
419        TYPE_MAPPING = generator.Generator.TYPE_MAPPING.copy()
420        TYPE_MAPPING.pop(exp.DataType.Type.MEDIUMTEXT)
421        TYPE_MAPPING.pop(exp.DataType.Type.LONGTEXT)
422        TYPE_MAPPING.pop(exp.DataType.Type.MEDIUMBLOB)
423        TYPE_MAPPING.pop(exp.DataType.Type.LONGBLOB)
424
425        PROPERTIES_LOCATION = {
426            **generator.Generator.PROPERTIES_LOCATION,  # type: ignore
427            exp.TransientProperty: exp.Properties.Location.UNSUPPORTED,
428        }
429
430        LIMIT_FETCH = "LIMIT"
431
432        def show_sql(self, expression: exp.Show) -> str:
433            this = f" {expression.name}"
434            full = " FULL" if expression.args.get("full") else ""
435            global_ = " GLOBAL" if expression.args.get("global") else ""
436
437            target = self.sql(expression, "target")
438            target = f" {target}" if target else ""
439            if expression.name in {"COLUMNS", "INDEX"}:
440                target = f" FROM{target}"
441            elif expression.name == "GRANTS":
442                target = f" FOR{target}"
443
444            db = self._prefixed_sql("FROM", expression, "db")
445
446            like = self._prefixed_sql("LIKE", expression, "like")
447            where = self.sql(expression, "where")
448
449            types = self.expressions(expression, key="types")
450            types = f" {types}" if types else types
451            query = self._prefixed_sql("FOR QUERY", expression, "query")
452
453            if expression.name == "PROFILE":
454                offset = self._prefixed_sql("OFFSET", expression, "offset")
455                limit = self._prefixed_sql("LIMIT", expression, "limit")
456            else:
457                offset = ""
458                limit = self._oldstyle_limit_sql(expression)
459
460            log = self._prefixed_sql("IN", expression, "log")
461            position = self._prefixed_sql("FROM", expression, "position")
462
463            channel = self._prefixed_sql("FOR CHANNEL", expression, "channel")
464
465            if expression.name == "ENGINE":
466                mutex_or_status = " MUTEX" if expression.args.get("mutex") else " STATUS"
467            else:
468                mutex_or_status = ""
469
470            return f"SHOW{full}{global_}{this}{target}{types}{db}{query}{log}{position}{channel}{mutex_or_status}{like}{where}{offset}{limit}"
471
472        def _prefixed_sql(self, prefix: str, expression: exp.Expression, arg: str) -> str:
473            sql = self.sql(expression, arg)
474            if not sql:
475                return ""
476            return f" {prefix} {sql}"
477
478        def _oldstyle_limit_sql(self, expression: exp.Show) -> str:
479            limit = self.sql(expression, "limit")
480            offset = self.sql(expression, "offset")
481            if limit:
482                limit_offset = f"{offset}, {limit}" if offset else limit
483                return f" LIMIT {limit_offset}"
484            return ""
class MySQL(sqlglot.dialects.dialect.Dialect):
103class MySQL(Dialect):
104    time_format = "'%Y-%m-%d %T'"
105
106    # https://prestodb.io/docs/current/functions/datetime.html#mysql-date-functions
107    time_mapping = {
108        "%M": "%B",
109        "%c": "%-m",
110        "%e": "%-d",
111        "%h": "%I",
112        "%i": "%M",
113        "%s": "%S",
114        "%S": "%S",
115        "%u": "%W",
116        "%k": "%-H",
117        "%l": "%-I",
118        "%T": "%H:%M:%S",
119    }
120
121    class Tokenizer(tokens.Tokenizer):
122        QUOTES = ["'", '"']
123        COMMENTS = ["--", "#", ("/*", "*/")]
124        IDENTIFIERS = ["`"]
125        STRING_ESCAPES = ["'", "\\"]
126        BIT_STRINGS = [("b'", "'"), ("B'", "'"), ("0b", "")]
127        HEX_STRINGS = [("x'", "'"), ("X'", "'"), ("0x", "")]
128
129        KEYWORDS = {
130            **tokens.Tokenizer.KEYWORDS,
131            "MEDIUMTEXT": TokenType.MEDIUMTEXT,
132            "LONGTEXT": TokenType.LONGTEXT,
133            "MEDIUMBLOB": TokenType.MEDIUMBLOB,
134            "LONGBLOB": TokenType.LONGBLOB,
135            "START": TokenType.BEGIN,
136            "SEPARATOR": TokenType.SEPARATOR,
137            "_ARMSCII8": TokenType.INTRODUCER,
138            "_ASCII": TokenType.INTRODUCER,
139            "_BIG5": TokenType.INTRODUCER,
140            "_BINARY": TokenType.INTRODUCER,
141            "_CP1250": TokenType.INTRODUCER,
142            "_CP1251": TokenType.INTRODUCER,
143            "_CP1256": TokenType.INTRODUCER,
144            "_CP1257": TokenType.INTRODUCER,
145            "_CP850": TokenType.INTRODUCER,
146            "_CP852": TokenType.INTRODUCER,
147            "_CP866": TokenType.INTRODUCER,
148            "_CP932": TokenType.INTRODUCER,
149            "_DEC8": TokenType.INTRODUCER,
150            "_EUCJPMS": TokenType.INTRODUCER,
151            "_EUCKR": TokenType.INTRODUCER,
152            "_GB18030": TokenType.INTRODUCER,
153            "_GB2312": TokenType.INTRODUCER,
154            "_GBK": TokenType.INTRODUCER,
155            "_GEOSTD8": TokenType.INTRODUCER,
156            "_GREEK": TokenType.INTRODUCER,
157            "_HEBREW": TokenType.INTRODUCER,
158            "_HP8": TokenType.INTRODUCER,
159            "_KEYBCS2": TokenType.INTRODUCER,
160            "_KOI8R": TokenType.INTRODUCER,
161            "_KOI8U": TokenType.INTRODUCER,
162            "_LATIN1": TokenType.INTRODUCER,
163            "_LATIN2": TokenType.INTRODUCER,
164            "_LATIN5": TokenType.INTRODUCER,
165            "_LATIN7": TokenType.INTRODUCER,
166            "_MACCE": TokenType.INTRODUCER,
167            "_MACROMAN": TokenType.INTRODUCER,
168            "_SJIS": TokenType.INTRODUCER,
169            "_SWE7": TokenType.INTRODUCER,
170            "_TIS620": TokenType.INTRODUCER,
171            "_UCS2": TokenType.INTRODUCER,
172            "_UJIS": TokenType.INTRODUCER,
173            # https://dev.mysql.com/doc/refman/8.0/en/string-literals.html
174            "_UTF8": TokenType.INTRODUCER,
175            "_UTF16": TokenType.INTRODUCER,
176            "_UTF16LE": TokenType.INTRODUCER,
177            "_UTF32": TokenType.INTRODUCER,
178            "_UTF8MB3": TokenType.INTRODUCER,
179            "_UTF8MB4": TokenType.INTRODUCER,
180            "@@": TokenType.SESSION_PARAMETER,
181        }
182
183        COMMANDS = tokens.Tokenizer.COMMANDS - {TokenType.SHOW}
184
185    class Parser(parser.Parser):
186        FUNC_TOKENS = {*parser.Parser.FUNC_TOKENS, TokenType.SCHEMA, TokenType.DATABASE}  # type: ignore
187
188        FUNCTIONS = {
189            **parser.Parser.FUNCTIONS,  # type: ignore
190            "DATE_ADD": _date_add(exp.DateAdd),
191            "DATE_SUB": _date_add(exp.DateSub),
192            "STR_TO_DATE": _str_to_date,
193            "LOCATE": locate_to_strposition,
194            "INSTR": lambda args: exp.StrPosition(substr=seq_get(args, 1), this=seq_get(args, 0)),
195            "LEFT": lambda args: exp.Substring(
196                this=seq_get(args, 0), start=exp.Literal.number(1), length=seq_get(args, 1)
197            ),
198        }
199
200        FUNCTION_PARSERS = {
201            **parser.Parser.FUNCTION_PARSERS,  # type: ignore
202            "GROUP_CONCAT": lambda self: self.expression(
203                exp.GroupConcat,
204                this=self._parse_lambda(),
205                separator=self._match(TokenType.SEPARATOR) and self._parse_field(),
206            ),
207        }
208
209        PROPERTY_PARSERS = {
210            **parser.Parser.PROPERTY_PARSERS,  # type: ignore
211            "ENGINE": lambda self: self._parse_property_assignment(exp.EngineProperty),
212        }
213
214        STATEMENT_PARSERS = {
215            **parser.Parser.STATEMENT_PARSERS,  # type: ignore
216            TokenType.SHOW: lambda self: self._parse_show(),
217        }
218
219        SHOW_PARSERS = {
220            "BINARY LOGS": _show_parser("BINARY LOGS"),
221            "MASTER LOGS": _show_parser("BINARY LOGS"),
222            "BINLOG EVENTS": _show_parser("BINLOG EVENTS"),
223            "CHARACTER SET": _show_parser("CHARACTER SET"),
224            "CHARSET": _show_parser("CHARACTER SET"),
225            "COLLATION": _show_parser("COLLATION"),
226            "FULL COLUMNS": _show_parser("COLUMNS", target="FROM", full=True),
227            "COLUMNS": _show_parser("COLUMNS", target="FROM"),
228            "CREATE DATABASE": _show_parser("CREATE DATABASE", target=True),
229            "CREATE EVENT": _show_parser("CREATE EVENT", target=True),
230            "CREATE FUNCTION": _show_parser("CREATE FUNCTION", target=True),
231            "CREATE PROCEDURE": _show_parser("CREATE PROCEDURE", target=True),
232            "CREATE TABLE": _show_parser("CREATE TABLE", target=True),
233            "CREATE TRIGGER": _show_parser("CREATE TRIGGER", target=True),
234            "CREATE VIEW": _show_parser("CREATE VIEW", target=True),
235            "DATABASES": _show_parser("DATABASES"),
236            "ENGINE": _show_parser("ENGINE", target=True),
237            "STORAGE ENGINES": _show_parser("ENGINES"),
238            "ENGINES": _show_parser("ENGINES"),
239            "ERRORS": _show_parser("ERRORS"),
240            "EVENTS": _show_parser("EVENTS"),
241            "FUNCTION CODE": _show_parser("FUNCTION CODE", target=True),
242            "FUNCTION STATUS": _show_parser("FUNCTION STATUS"),
243            "GRANTS": _show_parser("GRANTS", target="FOR"),
244            "INDEX": _show_parser("INDEX", target="FROM"),
245            "MASTER STATUS": _show_parser("MASTER STATUS"),
246            "OPEN TABLES": _show_parser("OPEN TABLES"),
247            "PLUGINS": _show_parser("PLUGINS"),
248            "PROCEDURE CODE": _show_parser("PROCEDURE CODE", target=True),
249            "PROCEDURE STATUS": _show_parser("PROCEDURE STATUS"),
250            "PRIVILEGES": _show_parser("PRIVILEGES"),
251            "FULL PROCESSLIST": _show_parser("PROCESSLIST", full=True),
252            "PROCESSLIST": _show_parser("PROCESSLIST"),
253            "PROFILE": _show_parser("PROFILE"),
254            "PROFILES": _show_parser("PROFILES"),
255            "RELAYLOG EVENTS": _show_parser("RELAYLOG EVENTS"),
256            "REPLICAS": _show_parser("REPLICAS"),
257            "SLAVE HOSTS": _show_parser("REPLICAS"),
258            "REPLICA STATUS": _show_parser("REPLICA STATUS"),
259            "SLAVE STATUS": _show_parser("REPLICA STATUS"),
260            "GLOBAL STATUS": _show_parser("STATUS", global_=True),
261            "SESSION STATUS": _show_parser("STATUS"),
262            "STATUS": _show_parser("STATUS"),
263            "TABLE STATUS": _show_parser("TABLE STATUS"),
264            "FULL TABLES": _show_parser("TABLES", full=True),
265            "TABLES": _show_parser("TABLES"),
266            "TRIGGERS": _show_parser("TRIGGERS"),
267            "GLOBAL VARIABLES": _show_parser("VARIABLES", global_=True),
268            "SESSION VARIABLES": _show_parser("VARIABLES"),
269            "VARIABLES": _show_parser("VARIABLES"),
270            "WARNINGS": _show_parser("WARNINGS"),
271        }
272
273        SET_PARSERS = {
274            **parser.Parser.SET_PARSERS,
275            "PERSIST": lambda self: self._parse_set_item_assignment("PERSIST"),
276            "PERSIST_ONLY": lambda self: self._parse_set_item_assignment("PERSIST_ONLY"),
277            "CHARACTER SET": lambda self: self._parse_set_item_charset("CHARACTER SET"),
278            "CHARSET": lambda self: self._parse_set_item_charset("CHARACTER SET"),
279            "NAMES": lambda self: self._parse_set_item_names(),
280        }
281
282        PROFILE_TYPES = {
283            "ALL",
284            "BLOCK IO",
285            "CONTEXT SWITCHES",
286            "CPU",
287            "IPC",
288            "MEMORY",
289            "PAGE FAULTS",
290            "SOURCE",
291            "SWAPS",
292        }
293
294        LOG_DEFAULTS_TO_LN = True
295
296        def _parse_show_mysql(self, this, target=False, full=None, global_=None):
297            if target:
298                if isinstance(target, str):
299                    self._match_text_seq(target)
300                target_id = self._parse_id_var()
301            else:
302                target_id = None
303
304            log = self._parse_string() if self._match_text_seq("IN") else None
305
306            if this in {"BINLOG EVENTS", "RELAYLOG EVENTS"}:
307                position = self._parse_number() if self._match_text_seq("FROM") else None
308                db = None
309            else:
310                position = None
311                db = None
312
313                if self._match(TokenType.FROM):
314                    db = self._parse_id_var()
315                elif self._match(TokenType.DOT):
316                    db = target_id
317                    target_id = self._parse_id_var()
318
319            channel = self._parse_id_var() if self._match_text_seq("FOR", "CHANNEL") else None
320
321            like = self._parse_string() if self._match_text_seq("LIKE") else None
322            where = self._parse_where()
323
324            if this == "PROFILE":
325                types = self._parse_csv(lambda: self._parse_var_from_options(self.PROFILE_TYPES))
326                query = self._parse_number() if self._match_text_seq("FOR", "QUERY") else None
327                offset = self._parse_number() if self._match_text_seq("OFFSET") else None
328                limit = self._parse_number() if self._match_text_seq("LIMIT") else None
329            else:
330                types, query = None, None
331                offset, limit = self._parse_oldstyle_limit()
332
333            mutex = True if self._match_text_seq("MUTEX") else None
334            mutex = False if self._match_text_seq("STATUS") else mutex
335
336            return self.expression(
337                exp.Show,
338                this=this,
339                target=target_id,
340                full=full,
341                log=log,
342                position=position,
343                db=db,
344                channel=channel,
345                like=like,
346                where=where,
347                types=types,
348                query=query,
349                offset=offset,
350                limit=limit,
351                mutex=mutex,
352                **{"global": global_},
353            )
354
355        def _parse_oldstyle_limit(self):
356            limit = None
357            offset = None
358            if self._match_text_seq("LIMIT"):
359                parts = self._parse_csv(self._parse_number)
360                if len(parts) == 1:
361                    limit = parts[0]
362                elif len(parts) == 2:
363                    limit = parts[1]
364                    offset = parts[0]
365            return offset, limit
366
367        def _parse_set_item_charset(self, kind):
368            this = self._parse_string() or self._parse_id_var()
369
370            return self.expression(
371                exp.SetItem,
372                this=this,
373                kind=kind,
374            )
375
376        def _parse_set_item_names(self):
377            charset = self._parse_string() or self._parse_id_var()
378            if self._match_text_seq("COLLATE"):
379                collate = self._parse_string() or self._parse_id_var()
380            else:
381                collate = None
382            return self.expression(
383                exp.SetItem,
384                this=charset,
385                collate=collate,
386                kind="NAMES",
387            )
388
389    class Generator(generator.Generator):
390        LOCKING_READS_SUPPORTED = True
391        NULL_ORDERING_SUPPORTED = False
392
393        TRANSFORMS = {
394            **generator.Generator.TRANSFORMS,  # type: ignore
395            exp.CurrentDate: no_paren_current_date_sql,
396            exp.CurrentTimestamp: lambda *_: "CURRENT_TIMESTAMP",
397            exp.ILike: no_ilike_sql,
398            exp.JSONExtractScalar: arrow_json_extract_scalar_sql,
399            exp.Max: max_or_greatest,
400            exp.Min: min_or_least,
401            exp.TableSample: no_tablesample_sql,
402            exp.TryCast: no_trycast_sql,
403            exp.DateAdd: _date_add_sql("ADD"),
404            exp.DateDiff: lambda self, e: f"DATEDIFF({self.format_args(e.this, e.expression)})",
405            exp.DateSub: _date_add_sql("SUB"),
406            exp.DateTrunc: _date_trunc_sql,
407            exp.DayOfWeek: rename_func("DAYOFWEEK"),
408            exp.DayOfMonth: rename_func("DAYOFMONTH"),
409            exp.DayOfYear: rename_func("DAYOFYEAR"),
410            exp.WeekOfYear: rename_func("WEEKOFYEAR"),
411            exp.GroupConcat: lambda self, e: f"""GROUP_CONCAT({self.sql(e, "this")} SEPARATOR {self.sql(e, "separator") or "','"})""",
412            exp.StrToDate: _str_to_date_sql,
413            exp.StrToTime: _str_to_date_sql,
414            exp.Trim: _trim_sql,
415            exp.NullSafeEQ: lambda self, e: self.binary(e, "<=>"),
416            exp.NullSafeNEQ: lambda self, e: self.not_sql(self.binary(e, "<=>")),
417            exp.StrPosition: strposition_to_locate_sql,
418        }
419
420        TYPE_MAPPING = generator.Generator.TYPE_MAPPING.copy()
421        TYPE_MAPPING.pop(exp.DataType.Type.MEDIUMTEXT)
422        TYPE_MAPPING.pop(exp.DataType.Type.LONGTEXT)
423        TYPE_MAPPING.pop(exp.DataType.Type.MEDIUMBLOB)
424        TYPE_MAPPING.pop(exp.DataType.Type.LONGBLOB)
425
426        PROPERTIES_LOCATION = {
427            **generator.Generator.PROPERTIES_LOCATION,  # type: ignore
428            exp.TransientProperty: exp.Properties.Location.UNSUPPORTED,
429        }
430
431        LIMIT_FETCH = "LIMIT"
432
433        def show_sql(self, expression: exp.Show) -> str:
434            this = f" {expression.name}"
435            full = " FULL" if expression.args.get("full") else ""
436            global_ = " GLOBAL" if expression.args.get("global") else ""
437
438            target = self.sql(expression, "target")
439            target = f" {target}" if target else ""
440            if expression.name in {"COLUMNS", "INDEX"}:
441                target = f" FROM{target}"
442            elif expression.name == "GRANTS":
443                target = f" FOR{target}"
444
445            db = self._prefixed_sql("FROM", expression, "db")
446
447            like = self._prefixed_sql("LIKE", expression, "like")
448            where = self.sql(expression, "where")
449
450            types = self.expressions(expression, key="types")
451            types = f" {types}" if types else types
452            query = self._prefixed_sql("FOR QUERY", expression, "query")
453
454            if expression.name == "PROFILE":
455                offset = self._prefixed_sql("OFFSET", expression, "offset")
456                limit = self._prefixed_sql("LIMIT", expression, "limit")
457            else:
458                offset = ""
459                limit = self._oldstyle_limit_sql(expression)
460
461            log = self._prefixed_sql("IN", expression, "log")
462            position = self._prefixed_sql("FROM", expression, "position")
463
464            channel = self._prefixed_sql("FOR CHANNEL", expression, "channel")
465
466            if expression.name == "ENGINE":
467                mutex_or_status = " MUTEX" if expression.args.get("mutex") else " STATUS"
468            else:
469                mutex_or_status = ""
470
471            return f"SHOW{full}{global_}{this}{target}{types}{db}{query}{log}{position}{channel}{mutex_or_status}{like}{where}{offset}{limit}"
472
473        def _prefixed_sql(self, prefix: str, expression: exp.Expression, arg: str) -> str:
474            sql = self.sql(expression, arg)
475            if not sql:
476                return ""
477            return f" {prefix} {sql}"
478
479        def _oldstyle_limit_sql(self, expression: exp.Show) -> str:
480            limit = self.sql(expression, "limit")
481            offset = self.sql(expression, "offset")
482            if limit:
483                limit_offset = f"{offset}, {limit}" if offset else limit
484                return f" LIMIT {limit_offset}"
485            return ""
class MySQL.Tokenizer(sqlglot.tokens.Tokenizer):
121    class Tokenizer(tokens.Tokenizer):
122        QUOTES = ["'", '"']
123        COMMENTS = ["--", "#", ("/*", "*/")]
124        IDENTIFIERS = ["`"]
125        STRING_ESCAPES = ["'", "\\"]
126        BIT_STRINGS = [("b'", "'"), ("B'", "'"), ("0b", "")]
127        HEX_STRINGS = [("x'", "'"), ("X'", "'"), ("0x", "")]
128
129        KEYWORDS = {
130            **tokens.Tokenizer.KEYWORDS,
131            "MEDIUMTEXT": TokenType.MEDIUMTEXT,
132            "LONGTEXT": TokenType.LONGTEXT,
133            "MEDIUMBLOB": TokenType.MEDIUMBLOB,
134            "LONGBLOB": TokenType.LONGBLOB,
135            "START": TokenType.BEGIN,
136            "SEPARATOR": TokenType.SEPARATOR,
137            "_ARMSCII8": TokenType.INTRODUCER,
138            "_ASCII": TokenType.INTRODUCER,
139            "_BIG5": TokenType.INTRODUCER,
140            "_BINARY": TokenType.INTRODUCER,
141            "_CP1250": TokenType.INTRODUCER,
142            "_CP1251": TokenType.INTRODUCER,
143            "_CP1256": TokenType.INTRODUCER,
144            "_CP1257": TokenType.INTRODUCER,
145            "_CP850": TokenType.INTRODUCER,
146            "_CP852": TokenType.INTRODUCER,
147            "_CP866": TokenType.INTRODUCER,
148            "_CP932": TokenType.INTRODUCER,
149            "_DEC8": TokenType.INTRODUCER,
150            "_EUCJPMS": TokenType.INTRODUCER,
151            "_EUCKR": TokenType.INTRODUCER,
152            "_GB18030": TokenType.INTRODUCER,
153            "_GB2312": TokenType.INTRODUCER,
154            "_GBK": TokenType.INTRODUCER,
155            "_GEOSTD8": TokenType.INTRODUCER,
156            "_GREEK": TokenType.INTRODUCER,
157            "_HEBREW": TokenType.INTRODUCER,
158            "_HP8": TokenType.INTRODUCER,
159            "_KEYBCS2": TokenType.INTRODUCER,
160            "_KOI8R": TokenType.INTRODUCER,
161            "_KOI8U": TokenType.INTRODUCER,
162            "_LATIN1": TokenType.INTRODUCER,
163            "_LATIN2": TokenType.INTRODUCER,
164            "_LATIN5": TokenType.INTRODUCER,
165            "_LATIN7": TokenType.INTRODUCER,
166            "_MACCE": TokenType.INTRODUCER,
167            "_MACROMAN": TokenType.INTRODUCER,
168            "_SJIS": TokenType.INTRODUCER,
169            "_SWE7": TokenType.INTRODUCER,
170            "_TIS620": TokenType.INTRODUCER,
171            "_UCS2": TokenType.INTRODUCER,
172            "_UJIS": TokenType.INTRODUCER,
173            # https://dev.mysql.com/doc/refman/8.0/en/string-literals.html
174            "_UTF8": TokenType.INTRODUCER,
175            "_UTF16": TokenType.INTRODUCER,
176            "_UTF16LE": TokenType.INTRODUCER,
177            "_UTF32": TokenType.INTRODUCER,
178            "_UTF8MB3": TokenType.INTRODUCER,
179            "_UTF8MB4": TokenType.INTRODUCER,
180            "@@": TokenType.SESSION_PARAMETER,
181        }
182
183        COMMANDS = tokens.Tokenizer.COMMANDS - {TokenType.SHOW}
class MySQL.Parser(sqlglot.parser.Parser):
185    class Parser(parser.Parser):
186        FUNC_TOKENS = {*parser.Parser.FUNC_TOKENS, TokenType.SCHEMA, TokenType.DATABASE}  # type: ignore
187
188        FUNCTIONS = {
189            **parser.Parser.FUNCTIONS,  # type: ignore
190            "DATE_ADD": _date_add(exp.DateAdd),
191            "DATE_SUB": _date_add(exp.DateSub),
192            "STR_TO_DATE": _str_to_date,
193            "LOCATE": locate_to_strposition,
194            "INSTR": lambda args: exp.StrPosition(substr=seq_get(args, 1), this=seq_get(args, 0)),
195            "LEFT": lambda args: exp.Substring(
196                this=seq_get(args, 0), start=exp.Literal.number(1), length=seq_get(args, 1)
197            ),
198        }
199
200        FUNCTION_PARSERS = {
201            **parser.Parser.FUNCTION_PARSERS,  # type: ignore
202            "GROUP_CONCAT": lambda self: self.expression(
203                exp.GroupConcat,
204                this=self._parse_lambda(),
205                separator=self._match(TokenType.SEPARATOR) and self._parse_field(),
206            ),
207        }
208
209        PROPERTY_PARSERS = {
210            **parser.Parser.PROPERTY_PARSERS,  # type: ignore
211            "ENGINE": lambda self: self._parse_property_assignment(exp.EngineProperty),
212        }
213
214        STATEMENT_PARSERS = {
215            **parser.Parser.STATEMENT_PARSERS,  # type: ignore
216            TokenType.SHOW: lambda self: self._parse_show(),
217        }
218
219        SHOW_PARSERS = {
220            "BINARY LOGS": _show_parser("BINARY LOGS"),
221            "MASTER LOGS": _show_parser("BINARY LOGS"),
222            "BINLOG EVENTS": _show_parser("BINLOG EVENTS"),
223            "CHARACTER SET": _show_parser("CHARACTER SET"),
224            "CHARSET": _show_parser("CHARACTER SET"),
225            "COLLATION": _show_parser("COLLATION"),
226            "FULL COLUMNS": _show_parser("COLUMNS", target="FROM", full=True),
227            "COLUMNS": _show_parser("COLUMNS", target="FROM"),
228            "CREATE DATABASE": _show_parser("CREATE DATABASE", target=True),
229            "CREATE EVENT": _show_parser("CREATE EVENT", target=True),
230            "CREATE FUNCTION": _show_parser("CREATE FUNCTION", target=True),
231            "CREATE PROCEDURE": _show_parser("CREATE PROCEDURE", target=True),
232            "CREATE TABLE": _show_parser("CREATE TABLE", target=True),
233            "CREATE TRIGGER": _show_parser("CREATE TRIGGER", target=True),
234            "CREATE VIEW": _show_parser("CREATE VIEW", target=True),
235            "DATABASES": _show_parser("DATABASES"),
236            "ENGINE": _show_parser("ENGINE", target=True),
237            "STORAGE ENGINES": _show_parser("ENGINES"),
238            "ENGINES": _show_parser("ENGINES"),
239            "ERRORS": _show_parser("ERRORS"),
240            "EVENTS": _show_parser("EVENTS"),
241            "FUNCTION CODE": _show_parser("FUNCTION CODE", target=True),
242            "FUNCTION STATUS": _show_parser("FUNCTION STATUS"),
243            "GRANTS": _show_parser("GRANTS", target="FOR"),
244            "INDEX": _show_parser("INDEX", target="FROM"),
245            "MASTER STATUS": _show_parser("MASTER STATUS"),
246            "OPEN TABLES": _show_parser("OPEN TABLES"),
247            "PLUGINS": _show_parser("PLUGINS"),
248            "PROCEDURE CODE": _show_parser("PROCEDURE CODE", target=True),
249            "PROCEDURE STATUS": _show_parser("PROCEDURE STATUS"),
250            "PRIVILEGES": _show_parser("PRIVILEGES"),
251            "FULL PROCESSLIST": _show_parser("PROCESSLIST", full=True),
252            "PROCESSLIST": _show_parser("PROCESSLIST"),
253            "PROFILE": _show_parser("PROFILE"),
254            "PROFILES": _show_parser("PROFILES"),
255            "RELAYLOG EVENTS": _show_parser("RELAYLOG EVENTS"),
256            "REPLICAS": _show_parser("REPLICAS"),
257            "SLAVE HOSTS": _show_parser("REPLICAS"),
258            "REPLICA STATUS": _show_parser("REPLICA STATUS"),
259            "SLAVE STATUS": _show_parser("REPLICA STATUS"),
260            "GLOBAL STATUS": _show_parser("STATUS", global_=True),
261            "SESSION STATUS": _show_parser("STATUS"),
262            "STATUS": _show_parser("STATUS"),
263            "TABLE STATUS": _show_parser("TABLE STATUS"),
264            "FULL TABLES": _show_parser("TABLES", full=True),
265            "TABLES": _show_parser("TABLES"),
266            "TRIGGERS": _show_parser("TRIGGERS"),
267            "GLOBAL VARIABLES": _show_parser("VARIABLES", global_=True),
268            "SESSION VARIABLES": _show_parser("VARIABLES"),
269            "VARIABLES": _show_parser("VARIABLES"),
270            "WARNINGS": _show_parser("WARNINGS"),
271        }
272
273        SET_PARSERS = {
274            **parser.Parser.SET_PARSERS,
275            "PERSIST": lambda self: self._parse_set_item_assignment("PERSIST"),
276            "PERSIST_ONLY": lambda self: self._parse_set_item_assignment("PERSIST_ONLY"),
277            "CHARACTER SET": lambda self: self._parse_set_item_charset("CHARACTER SET"),
278            "CHARSET": lambda self: self._parse_set_item_charset("CHARACTER SET"),
279            "NAMES": lambda self: self._parse_set_item_names(),
280        }
281
282        PROFILE_TYPES = {
283            "ALL",
284            "BLOCK IO",
285            "CONTEXT SWITCHES",
286            "CPU",
287            "IPC",
288            "MEMORY",
289            "PAGE FAULTS",
290            "SOURCE",
291            "SWAPS",
292        }
293
294        LOG_DEFAULTS_TO_LN = True
295
296        def _parse_show_mysql(self, this, target=False, full=None, global_=None):
297            if target:
298                if isinstance(target, str):
299                    self._match_text_seq(target)
300                target_id = self._parse_id_var()
301            else:
302                target_id = None
303
304            log = self._parse_string() if self._match_text_seq("IN") else None
305
306            if this in {"BINLOG EVENTS", "RELAYLOG EVENTS"}:
307                position = self._parse_number() if self._match_text_seq("FROM") else None
308                db = None
309            else:
310                position = None
311                db = None
312
313                if self._match(TokenType.FROM):
314                    db = self._parse_id_var()
315                elif self._match(TokenType.DOT):
316                    db = target_id
317                    target_id = self._parse_id_var()
318
319            channel = self._parse_id_var() if self._match_text_seq("FOR", "CHANNEL") else None
320
321            like = self._parse_string() if self._match_text_seq("LIKE") else None
322            where = self._parse_where()
323
324            if this == "PROFILE":
325                types = self._parse_csv(lambda: self._parse_var_from_options(self.PROFILE_TYPES))
326                query = self._parse_number() if self._match_text_seq("FOR", "QUERY") else None
327                offset = self._parse_number() if self._match_text_seq("OFFSET") else None
328                limit = self._parse_number() if self._match_text_seq("LIMIT") else None
329            else:
330                types, query = None, None
331                offset, limit = self._parse_oldstyle_limit()
332
333            mutex = True if self._match_text_seq("MUTEX") else None
334            mutex = False if self._match_text_seq("STATUS") else mutex
335
336            return self.expression(
337                exp.Show,
338                this=this,
339                target=target_id,
340                full=full,
341                log=log,
342                position=position,
343                db=db,
344                channel=channel,
345                like=like,
346                where=where,
347                types=types,
348                query=query,
349                offset=offset,
350                limit=limit,
351                mutex=mutex,
352                **{"global": global_},
353            )
354
355        def _parse_oldstyle_limit(self):
356            limit = None
357            offset = None
358            if self._match_text_seq("LIMIT"):
359                parts = self._parse_csv(self._parse_number)
360                if len(parts) == 1:
361                    limit = parts[0]
362                elif len(parts) == 2:
363                    limit = parts[1]
364                    offset = parts[0]
365            return offset, limit
366
367        def _parse_set_item_charset(self, kind):
368            this = self._parse_string() or self._parse_id_var()
369
370            return self.expression(
371                exp.SetItem,
372                this=this,
373                kind=kind,
374            )
375
376        def _parse_set_item_names(self):
377            charset = self._parse_string() or self._parse_id_var()
378            if self._match_text_seq("COLLATE"):
379                collate = self._parse_string() or self._parse_id_var()
380            else:
381                collate = None
382            return self.expression(
383                exp.SetItem,
384                this=charset,
385                collate=collate,
386                kind="NAMES",
387            )

Parser consumes a list of tokens produced by the sqlglot.tokens.Tokenizer and produces a parsed syntax tree.

Arguments:
  • error_level: the desired error level. Default: ErrorLevel.RAISE
  • error_message_context: determines the amount of context to capture from a query string when displaying the error message (in number of characters). Default: 50.
  • index_offset: Index offset for arrays eg ARRAY[0] vs ARRAY[1] as the head of a list. Default: 0
  • alias_post_tablesample: If the table alias comes after tablesample. Default: False
  • max_errors: Maximum number of error messages to include in a raised ParseError. This is only relevant if error_level is ErrorLevel.RAISE. Default: 3
  • null_ordering: Indicates the default null ordering method to use if not explicitly set. Options are "nulls_are_small", "nulls_are_large", "nulls_are_last". Default: "nulls_are_small"
class MySQL.Generator(sqlglot.generator.Generator):
389    class Generator(generator.Generator):
390        LOCKING_READS_SUPPORTED = True
391        NULL_ORDERING_SUPPORTED = False
392
393        TRANSFORMS = {
394            **generator.Generator.TRANSFORMS,  # type: ignore
395            exp.CurrentDate: no_paren_current_date_sql,
396            exp.CurrentTimestamp: lambda *_: "CURRENT_TIMESTAMP",
397            exp.ILike: no_ilike_sql,
398            exp.JSONExtractScalar: arrow_json_extract_scalar_sql,
399            exp.Max: max_or_greatest,
400            exp.Min: min_or_least,
401            exp.TableSample: no_tablesample_sql,
402            exp.TryCast: no_trycast_sql,
403            exp.DateAdd: _date_add_sql("ADD"),
404            exp.DateDiff: lambda self, e: f"DATEDIFF({self.format_args(e.this, e.expression)})",
405            exp.DateSub: _date_add_sql("SUB"),
406            exp.DateTrunc: _date_trunc_sql,
407            exp.DayOfWeek: rename_func("DAYOFWEEK"),
408            exp.DayOfMonth: rename_func("DAYOFMONTH"),
409            exp.DayOfYear: rename_func("DAYOFYEAR"),
410            exp.WeekOfYear: rename_func("WEEKOFYEAR"),
411            exp.GroupConcat: lambda self, e: f"""GROUP_CONCAT({self.sql(e, "this")} SEPARATOR {self.sql(e, "separator") or "','"})""",
412            exp.StrToDate: _str_to_date_sql,
413            exp.StrToTime: _str_to_date_sql,
414            exp.Trim: _trim_sql,
415            exp.NullSafeEQ: lambda self, e: self.binary(e, "<=>"),
416            exp.NullSafeNEQ: lambda self, e: self.not_sql(self.binary(e, "<=>")),
417            exp.StrPosition: strposition_to_locate_sql,
418        }
419
420        TYPE_MAPPING = generator.Generator.TYPE_MAPPING.copy()
421        TYPE_MAPPING.pop(exp.DataType.Type.MEDIUMTEXT)
422        TYPE_MAPPING.pop(exp.DataType.Type.LONGTEXT)
423        TYPE_MAPPING.pop(exp.DataType.Type.MEDIUMBLOB)
424        TYPE_MAPPING.pop(exp.DataType.Type.LONGBLOB)
425
426        PROPERTIES_LOCATION = {
427            **generator.Generator.PROPERTIES_LOCATION,  # type: ignore
428            exp.TransientProperty: exp.Properties.Location.UNSUPPORTED,
429        }
430
431        LIMIT_FETCH = "LIMIT"
432
433        def show_sql(self, expression: exp.Show) -> str:
434            this = f" {expression.name}"
435            full = " FULL" if expression.args.get("full") else ""
436            global_ = " GLOBAL" if expression.args.get("global") else ""
437
438            target = self.sql(expression, "target")
439            target = f" {target}" if target else ""
440            if expression.name in {"COLUMNS", "INDEX"}:
441                target = f" FROM{target}"
442            elif expression.name == "GRANTS":
443                target = f" FOR{target}"
444
445            db = self._prefixed_sql("FROM", expression, "db")
446
447            like = self._prefixed_sql("LIKE", expression, "like")
448            where = self.sql(expression, "where")
449
450            types = self.expressions(expression, key="types")
451            types = f" {types}" if types else types
452            query = self._prefixed_sql("FOR QUERY", expression, "query")
453
454            if expression.name == "PROFILE":
455                offset = self._prefixed_sql("OFFSET", expression, "offset")
456                limit = self._prefixed_sql("LIMIT", expression, "limit")
457            else:
458                offset = ""
459                limit = self._oldstyle_limit_sql(expression)
460
461            log = self._prefixed_sql("IN", expression, "log")
462            position = self._prefixed_sql("FROM", expression, "position")
463
464            channel = self._prefixed_sql("FOR CHANNEL", expression, "channel")
465
466            if expression.name == "ENGINE":
467                mutex_or_status = " MUTEX" if expression.args.get("mutex") else " STATUS"
468            else:
469                mutex_or_status = ""
470
471            return f"SHOW{full}{global_}{this}{target}{types}{db}{query}{log}{position}{channel}{mutex_or_status}{like}{where}{offset}{limit}"
472
473        def _prefixed_sql(self, prefix: str, expression: exp.Expression, arg: str) -> str:
474            sql = self.sql(expression, arg)
475            if not sql:
476                return ""
477            return f" {prefix} {sql}"
478
479        def _oldstyle_limit_sql(self, expression: exp.Show) -> str:
480            limit = self.sql(expression, "limit")
481            offset = self.sql(expression, "offset")
482            if limit:
483                limit_offset = f"{offset}, {limit}" if offset else limit
484                return f" LIMIT {limit_offset}"
485            return ""

Generator interprets the given syntax tree and produces a SQL string as an output.

Arguments:
  • time_mapping (dict): the dictionary of custom time mappings in which the key represents a python time format and the output the target time format
  • time_trie (trie): a trie of the time_mapping keys
  • pretty (bool): if set to True the returned string will be formatted. Default: False.
  • quote_start (str): specifies which starting character to use to delimit quotes. Default: '.
  • quote_end (str): specifies which ending character to use to delimit quotes. Default: '.
  • identifier_start (str): specifies which starting character to use to delimit identifiers. Default: ".
  • identifier_end (str): specifies which ending character to use to delimit identifiers. Default: ".
  • identify (bool | str): 'always': always quote, 'safe': quote identifiers if they don't contain an upcase, True defaults to always.
  • normalize (bool): if set to True all identifiers will lower cased
  • string_escape (str): specifies a string escape character. Default: '.
  • identifier_escape (str): specifies an identifier escape character. Default: ".
  • pad (int): determines padding in a formatted string. Default: 2.
  • indent (int): determines the size of indentation in a formatted string. Default: 4.
  • unnest_column_only (bool): if true unnest table aliases are considered only as column aliases
  • normalize_functions (str): normalize function names, "upper", "lower", or None Default: "upper"
  • alias_post_tablesample (bool): if the table alias comes after tablesample Default: False
  • unsupported_level (ErrorLevel): determines the generator's behavior when it encounters unsupported expressions. Default ErrorLevel.WARN.
  • null_ordering (str): Indicates the default null ordering method to use if not explicitly set. Options are "nulls_are_small", "nulls_are_large", "nulls_are_last". Default: "nulls_are_small"
  • max_unsupported (int): Maximum number of unsupported messages to include in a raised UnsupportedError. This is only relevant if unsupported_level is ErrorLevel.RAISE. Default: 3
  • leading_comma (bool): if the the comma is leading or trailing in select statements Default: False
  • max_text_width: The max number of characters in a segment before creating new lines in pretty mode. The default is on the smaller end because the length only represents a segment and not the true line length. Default: 80
  • comments: Whether or not to preserve comments in the output SQL code. Default: True
def show_sql(self, expression: sqlglot.expressions.Show) -> str:
433        def show_sql(self, expression: exp.Show) -> str:
434            this = f" {expression.name}"
435            full = " FULL" if expression.args.get("full") else ""
436            global_ = " GLOBAL" if expression.args.get("global") else ""
437
438            target = self.sql(expression, "target")
439            target = f" {target}" if target else ""
440            if expression.name in {"COLUMNS", "INDEX"}:
441                target = f" FROM{target}"
442            elif expression.name == "GRANTS":
443                target = f" FOR{target}"
444
445            db = self._prefixed_sql("FROM", expression, "db")
446
447            like = self._prefixed_sql("LIKE", expression, "like")
448            where = self.sql(expression, "where")
449
450            types = self.expressions(expression, key="types")
451            types = f" {types}" if types else types
452            query = self._prefixed_sql("FOR QUERY", expression, "query")
453
454            if expression.name == "PROFILE":
455                offset = self._prefixed_sql("OFFSET", expression, "offset")
456                limit = self._prefixed_sql("LIMIT", expression, "limit")
457            else:
458                offset = ""
459                limit = self._oldstyle_limit_sql(expression)
460
461            log = self._prefixed_sql("IN", expression, "log")
462            position = self._prefixed_sql("FROM", expression, "position")
463
464            channel = self._prefixed_sql("FOR CHANNEL", expression, "channel")
465
466            if expression.name == "ENGINE":
467                mutex_or_status = " MUTEX" if expression.args.get("mutex") else " STATUS"
468            else:
469                mutex_or_status = ""
470
471            return f"SHOW{full}{global_}{this}{target}{types}{db}{query}{log}{position}{channel}{mutex_or_status}{like}{where}{offset}{limit}"
Inherited Members
sqlglot.generator.Generator
Generator
generate
unsupported
sep
seg
pad_comment
maybe_comment
wrap
no_identify
normalize_func
indent
sql
uncache_sql
cache_sql
characterset_sql
column_sql
columnposition_sql
columndef_sql
columnconstraint_sql
autoincrementcolumnconstraint_sql
compresscolumnconstraint_sql
generatedasidentitycolumnconstraint_sql
notnullcolumnconstraint_sql
primarykeycolumnconstraint_sql
uniquecolumnconstraint_sql
create_sql
describe_sql
prepend_ctes
with_sql
cte_sql
tablealias_sql
bitstring_sql
hexstring_sql
datatype_sql
directory_sql
delete_sql
drop_sql
except_sql
except_op
fetch_sql
filter_sql
hint_sql
index_sql
identifier_sql
national_sql
partition_sql
properties_sql
root_properties
properties
with_properties
locate_properties
property_sql
likeproperty_sql
fallbackproperty_sql
journalproperty_sql
freespaceproperty_sql
afterjournalproperty_sql
checksumproperty_sql
mergeblockratioproperty_sql
datablocksizeproperty_sql
blockcompressionproperty_sql
isolatedloadingproperty_sql
lockingproperty_sql
withdataproperty_sql
insert_sql
intersect_sql
intersect_op
introducer_sql
pseudotype_sql
returning_sql
rowformatdelimitedproperty_sql
table_sql
tablesample_sql
pivot_sql
tuple_sql
update_sql
values_sql
var_sql
into_sql
from_sql
group_sql
having_sql
join_sql
lambda_sql
lateral_sql
limit_sql
offset_sql
setitem_sql
set_sql
pragma_sql
lock_sql
literal_sql
loaddata_sql
null_sql
boolean_sql
order_sql
cluster_sql
distribute_sql
sort_sql
ordered_sql
matchrecognize_sql
query_modifiers
select_sql
schema_sql
star_sql
structkwarg_sql
parameter_sql
sessionparameter_sql
placeholder_sql
subquery_sql
qualify_sql
union_sql
union_op
unnest_sql
where_sql
window_sql
partition_by_sql
window_spec_sql
withingroup_sql
between_sql
bracket_sql
all_sql
any_sql
exists_sql
case_sql
constraint_sql
extract_sql
trim_sql
concat_sql
check_sql
foreignkey_sql
primarykey_sql
unique_sql
if_sql
matchagainst_sql
jsonkeyvalue_sql
jsonobject_sql
in_sql
in_unnest_op
interval_sql
return_sql
reference_sql
anonymous_sql
paren_sql
neg_sql
not_sql
alias_sql
aliases_sql
attimezone_sql
add_sql
and_sql
connector_sql
bitwiseand_sql
bitwiseleftshift_sql
bitwisenot_sql
bitwiseor_sql
bitwiserightshift_sql
bitwisexor_sql
cast_sql
currentdate_sql
collate_sql
command_sql
comment_sql
transaction_sql
commit_sql
rollback_sql
altercolumn_sql
renametable_sql
altertable_sql
droppartition_sql
addconstraint_sql
distinct_sql
ignorenulls_sql
respectnulls_sql
intdiv_sql
dpipe_sql
div_sql
overlaps_sql
distance_sql
dot_sql
eq_sql
escape_sql
glob_sql
gt_sql
gte_sql
ilike_sql
ilikeany_sql
is_sql
like_sql
likeany_sql
similarto_sql
lt_sql
lte_sql
mod_sql
mul_sql
neq_sql
nullsafeeq_sql
nullsafeneq_sql
or_sql
slice_sql
sub_sql
trycast_sql
use_sql
binary
function_fallback_sql
func
format_args
text_width
format_time
expressions
op_expressions
naked_property
set_operation
tag_sql
token_sql
userdefinedfunction_sql
joinhint_sql
kwarg_sql
when_sql
merge_sql
tochar_sql