sqlglot.dialects.mysql
1from __future__ import annotations 2 3from sqlglot import exp, generator, parser, tokens 4from sqlglot.dialects.dialect import ( 5 Dialect, 6 locate_to_strposition, 7 no_ilike_sql, 8 no_paren_current_date_sql, 9 no_tablesample_sql, 10 no_trycast_sql, 11 strposition_to_locate_sql, 12) 13from sqlglot.helper import seq_get 14from sqlglot.tokens import TokenType 15 16 17def _show_parser(*args, **kwargs): 18 def _parse(self): 19 return self._parse_show_mysql(*args, **kwargs) 20 21 return _parse 22 23 24def _date_trunc_sql(self, expression): 25 unit = expression.name.lower() 26 27 expr = self.sql(expression.expression) 28 29 if unit == "day": 30 return f"DATE({expr})" 31 32 if unit == "week": 33 concat = f"CONCAT(YEAR({expr}), ' ', WEEK({expr}, 1), ' 1')" 34 date_format = "%Y %u %w" 35 elif unit == "month": 36 concat = f"CONCAT(YEAR({expr}), ' ', MONTH({expr}), ' 1')" 37 date_format = "%Y %c %e" 38 elif unit == "quarter": 39 concat = f"CONCAT(YEAR({expr}), ' ', QUARTER({expr}) * 3 - 2, ' 1')" 40 date_format = "%Y %c %e" 41 elif unit == "year": 42 concat = f"CONCAT(YEAR({expr}), ' 1 1')" 43 date_format = "%Y %c %e" 44 else: 45 self.unsupported("Unexpected interval unit: {unit}") 46 return f"DATE({expr})" 47 48 return f"STR_TO_DATE({concat}, '{date_format}')" 49 50 51def _str_to_date(args): 52 date_format = MySQL.format_time(seq_get(args, 1)) 53 return exp.StrToDate(this=seq_get(args, 0), format=date_format) 54 55 56def _str_to_date_sql(self, expression): 57 date_format = self.format_time(expression) 58 return f"STR_TO_DATE({self.sql(expression.this)}, {date_format})" 59 60 61def _trim_sql(self, expression): 62 target = self.sql(expression, "this") 63 trim_type = self.sql(expression, "position") 64 remove_chars = self.sql(expression, "expression") 65 66 # Use TRIM/LTRIM/RTRIM syntax if the expression isn't mysql-specific 67 if not remove_chars: 68 return self.trim_sql(expression) 69 70 trim_type = f"{trim_type} " if trim_type else "" 71 remove_chars = f"{remove_chars} " if remove_chars else "" 72 from_part = "FROM " if trim_type or remove_chars else "" 73 return f"TRIM({trim_type}{remove_chars}{from_part}{target})" 74 75 76def _date_add(expression_class): 77 def func(args): 78 interval = seq_get(args, 1) 79 return expression_class( 80 this=seq_get(args, 0), 81 expression=interval.this, 82 unit=exp.Literal.string(interval.text("unit").lower()), 83 ) 84 85 return func 86 87 88def _date_add_sql(kind): 89 def func(self, expression): 90 this = self.sql(expression, "this") 91 unit = expression.text("unit").upper() or "DAY" 92 return ( 93 f"DATE_{kind}({this}, {self.sql(exp.Interval(this=expression.expression, unit=unit))})" 94 ) 95 96 return func 97 98 99class MySQL(Dialect): 100 time_format = "'%Y-%m-%d %T'" 101 102 # https://prestodb.io/docs/current/functions/datetime.html#mysql-date-functions 103 time_mapping = { 104 "%M": "%B", 105 "%c": "%-m", 106 "%e": "%-d", 107 "%h": "%I", 108 "%i": "%M", 109 "%s": "%S", 110 "%S": "%S", 111 "%u": "%W", 112 "%k": "%-H", 113 "%l": "%-I", 114 "%T": "%H:%M:%S", 115 } 116 117 class Tokenizer(tokens.Tokenizer): 118 QUOTES = ["'", '"'] 119 COMMENTS = ["--", "#", ("/*", "*/")] 120 IDENTIFIERS = ["`"] 121 STRING_ESCAPES = ["'", "\\"] 122 BIT_STRINGS = [("b'", "'"), ("B'", "'"), ("0b", "")] 123 HEX_STRINGS = [("x'", "'"), ("X'", "'"), ("0x", "")] 124 125 KEYWORDS = { 126 **tokens.Tokenizer.KEYWORDS, 127 "MEDIUMTEXT": TokenType.MEDIUMTEXT, 128 "LONGTEXT": TokenType.LONGTEXT, 129 "MEDIUMBLOB": TokenType.MEDIUMBLOB, 130 "LONGBLOB": TokenType.LONGBLOB, 131 "START": TokenType.BEGIN, 132 "SEPARATOR": TokenType.SEPARATOR, 133 "_ARMSCII8": TokenType.INTRODUCER, 134 "_ASCII": TokenType.INTRODUCER, 135 "_BIG5": TokenType.INTRODUCER, 136 "_BINARY": TokenType.INTRODUCER, 137 "_CP1250": TokenType.INTRODUCER, 138 "_CP1251": TokenType.INTRODUCER, 139 "_CP1256": TokenType.INTRODUCER, 140 "_CP1257": TokenType.INTRODUCER, 141 "_CP850": TokenType.INTRODUCER, 142 "_CP852": TokenType.INTRODUCER, 143 "_CP866": TokenType.INTRODUCER, 144 "_CP932": TokenType.INTRODUCER, 145 "_DEC8": TokenType.INTRODUCER, 146 "_EUCJPMS": TokenType.INTRODUCER, 147 "_EUCKR": TokenType.INTRODUCER, 148 "_GB18030": TokenType.INTRODUCER, 149 "_GB2312": TokenType.INTRODUCER, 150 "_GBK": TokenType.INTRODUCER, 151 "_GEOSTD8": TokenType.INTRODUCER, 152 "_GREEK": TokenType.INTRODUCER, 153 "_HEBREW": TokenType.INTRODUCER, 154 "_HP8": TokenType.INTRODUCER, 155 "_KEYBCS2": TokenType.INTRODUCER, 156 "_KOI8R": TokenType.INTRODUCER, 157 "_KOI8U": TokenType.INTRODUCER, 158 "_LATIN1": TokenType.INTRODUCER, 159 "_LATIN2": TokenType.INTRODUCER, 160 "_LATIN5": TokenType.INTRODUCER, 161 "_LATIN7": TokenType.INTRODUCER, 162 "_MACCE": TokenType.INTRODUCER, 163 "_MACROMAN": TokenType.INTRODUCER, 164 "_SJIS": TokenType.INTRODUCER, 165 "_SWE7": TokenType.INTRODUCER, 166 "_TIS620": TokenType.INTRODUCER, 167 "_UCS2": TokenType.INTRODUCER, 168 "_UJIS": TokenType.INTRODUCER, 169 # https://dev.mysql.com/doc/refman/8.0/en/string-literals.html 170 "_UTF8": TokenType.INTRODUCER, 171 "_UTF16": TokenType.INTRODUCER, 172 "_UTF16LE": TokenType.INTRODUCER, 173 "_UTF32": TokenType.INTRODUCER, 174 "_UTF8MB3": TokenType.INTRODUCER, 175 "_UTF8MB4": TokenType.INTRODUCER, 176 "@@": TokenType.SESSION_PARAMETER, 177 } 178 179 COMMANDS = tokens.Tokenizer.COMMANDS - {TokenType.SET, TokenType.SHOW} 180 181 class Parser(parser.Parser): 182 FUNC_TOKENS = {*parser.Parser.FUNC_TOKENS, TokenType.SCHEMA} # type: ignore 183 184 FUNCTIONS = { 185 **parser.Parser.FUNCTIONS, # type: ignore 186 "DATE_ADD": _date_add(exp.DateAdd), 187 "DATE_SUB": _date_add(exp.DateSub), 188 "STR_TO_DATE": _str_to_date, 189 "LOCATE": locate_to_strposition, 190 "INSTR": lambda args: exp.StrPosition(substr=seq_get(args, 1), this=seq_get(args, 0)), 191 "LEFT": lambda args: exp.Substring( 192 this=seq_get(args, 0), start=exp.Literal.number(1), length=seq_get(args, 1) 193 ), 194 } 195 196 FUNCTION_PARSERS = { 197 **parser.Parser.FUNCTION_PARSERS, # type: ignore 198 "GROUP_CONCAT": lambda self: self.expression( 199 exp.GroupConcat, 200 this=self._parse_lambda(), 201 separator=self._match(TokenType.SEPARATOR) and self._parse_field(), 202 ), 203 } 204 205 PROPERTY_PARSERS = { 206 **parser.Parser.PROPERTY_PARSERS, # type: ignore 207 "ENGINE": lambda self: self._parse_property_assignment(exp.EngineProperty), 208 } 209 210 STATEMENT_PARSERS = { 211 **parser.Parser.STATEMENT_PARSERS, # type: ignore 212 TokenType.SHOW: lambda self: self._parse_show(), 213 TokenType.SET: lambda self: self._parse_set(), 214 } 215 216 SHOW_PARSERS = { 217 "BINARY LOGS": _show_parser("BINARY LOGS"), 218 "MASTER LOGS": _show_parser("BINARY LOGS"), 219 "BINLOG EVENTS": _show_parser("BINLOG EVENTS"), 220 "CHARACTER SET": _show_parser("CHARACTER SET"), 221 "CHARSET": _show_parser("CHARACTER SET"), 222 "COLLATION": _show_parser("COLLATION"), 223 "FULL COLUMNS": _show_parser("COLUMNS", target="FROM", full=True), 224 "COLUMNS": _show_parser("COLUMNS", target="FROM"), 225 "CREATE DATABASE": _show_parser("CREATE DATABASE", target=True), 226 "CREATE EVENT": _show_parser("CREATE EVENT", target=True), 227 "CREATE FUNCTION": _show_parser("CREATE FUNCTION", target=True), 228 "CREATE PROCEDURE": _show_parser("CREATE PROCEDURE", target=True), 229 "CREATE TABLE": _show_parser("CREATE TABLE", target=True), 230 "CREATE TRIGGER": _show_parser("CREATE TRIGGER", target=True), 231 "CREATE VIEW": _show_parser("CREATE VIEW", target=True), 232 "DATABASES": _show_parser("DATABASES"), 233 "ENGINE": _show_parser("ENGINE", target=True), 234 "STORAGE ENGINES": _show_parser("ENGINES"), 235 "ENGINES": _show_parser("ENGINES"), 236 "ERRORS": _show_parser("ERRORS"), 237 "EVENTS": _show_parser("EVENTS"), 238 "FUNCTION CODE": _show_parser("FUNCTION CODE", target=True), 239 "FUNCTION STATUS": _show_parser("FUNCTION STATUS"), 240 "GRANTS": _show_parser("GRANTS", target="FOR"), 241 "INDEX": _show_parser("INDEX", target="FROM"), 242 "MASTER STATUS": _show_parser("MASTER STATUS"), 243 "OPEN TABLES": _show_parser("OPEN TABLES"), 244 "PLUGINS": _show_parser("PLUGINS"), 245 "PROCEDURE CODE": _show_parser("PROCEDURE CODE", target=True), 246 "PROCEDURE STATUS": _show_parser("PROCEDURE STATUS"), 247 "PRIVILEGES": _show_parser("PRIVILEGES"), 248 "FULL PROCESSLIST": _show_parser("PROCESSLIST", full=True), 249 "PROCESSLIST": _show_parser("PROCESSLIST"), 250 "PROFILE": _show_parser("PROFILE"), 251 "PROFILES": _show_parser("PROFILES"), 252 "RELAYLOG EVENTS": _show_parser("RELAYLOG EVENTS"), 253 "REPLICAS": _show_parser("REPLICAS"), 254 "SLAVE HOSTS": _show_parser("REPLICAS"), 255 "REPLICA STATUS": _show_parser("REPLICA STATUS"), 256 "SLAVE STATUS": _show_parser("REPLICA STATUS"), 257 "GLOBAL STATUS": _show_parser("STATUS", global_=True), 258 "SESSION STATUS": _show_parser("STATUS"), 259 "STATUS": _show_parser("STATUS"), 260 "TABLE STATUS": _show_parser("TABLE STATUS"), 261 "FULL TABLES": _show_parser("TABLES", full=True), 262 "TABLES": _show_parser("TABLES"), 263 "TRIGGERS": _show_parser("TRIGGERS"), 264 "GLOBAL VARIABLES": _show_parser("VARIABLES", global_=True), 265 "SESSION VARIABLES": _show_parser("VARIABLES"), 266 "VARIABLES": _show_parser("VARIABLES"), 267 "WARNINGS": _show_parser("WARNINGS"), 268 } 269 270 SET_PARSERS = { 271 "GLOBAL": lambda self: self._parse_set_item_assignment("GLOBAL"), 272 "PERSIST": lambda self: self._parse_set_item_assignment("PERSIST"), 273 "PERSIST_ONLY": lambda self: self._parse_set_item_assignment("PERSIST_ONLY"), 274 "SESSION": lambda self: self._parse_set_item_assignment("SESSION"), 275 "LOCAL": lambda self: self._parse_set_item_assignment("LOCAL"), 276 "CHARACTER SET": lambda self: self._parse_set_item_charset("CHARACTER SET"), 277 "CHARSET": lambda self: self._parse_set_item_charset("CHARACTER SET"), 278 "NAMES": lambda self: self._parse_set_item_names(), 279 "TRANSACTION": lambda self: self._parse_set_transaction(), 280 } 281 282 PROFILE_TYPES = { 283 "ALL", 284 "BLOCK IO", 285 "CONTEXT SWITCHES", 286 "CPU", 287 "IPC", 288 "MEMORY", 289 "PAGE FAULTS", 290 "SOURCE", 291 "SWAPS", 292 } 293 294 TRANSACTION_CHARACTERISTICS = { 295 "ISOLATION LEVEL REPEATABLE READ", 296 "ISOLATION LEVEL READ COMMITTED", 297 "ISOLATION LEVEL READ UNCOMMITTED", 298 "ISOLATION LEVEL SERIALIZABLE", 299 "READ WRITE", 300 "READ ONLY", 301 } 302 303 def _parse_show_mysql(self, this, target=False, full=None, global_=None): 304 if target: 305 if isinstance(target, str): 306 self._match_text_seq(target) 307 target_id = self._parse_id_var() 308 else: 309 target_id = None 310 311 log = self._parse_string() if self._match_text_seq("IN") else None 312 313 if this in {"BINLOG EVENTS", "RELAYLOG EVENTS"}: 314 position = self._parse_number() if self._match_text_seq("FROM") else None 315 db = None 316 else: 317 position = None 318 db = self._parse_id_var() if self._match_text_seq("FROM") else None 319 320 channel = self._parse_id_var() if self._match_text_seq("FOR", "CHANNEL") else None 321 322 like = self._parse_string() if self._match_text_seq("LIKE") else None 323 where = self._parse_where() 324 325 if this == "PROFILE": 326 types = self._parse_csv(lambda: self._parse_var_from_options(self.PROFILE_TYPES)) 327 query = self._parse_number() if self._match_text_seq("FOR", "QUERY") else None 328 offset = self._parse_number() if self._match_text_seq("OFFSET") else None 329 limit = self._parse_number() if self._match_text_seq("LIMIT") else None 330 else: 331 types, query = None, None 332 offset, limit = self._parse_oldstyle_limit() 333 334 mutex = True if self._match_text_seq("MUTEX") else None 335 mutex = False if self._match_text_seq("STATUS") else mutex 336 337 return self.expression( 338 exp.Show, 339 this=this, 340 target=target_id, 341 full=full, 342 log=log, 343 position=position, 344 db=db, 345 channel=channel, 346 like=like, 347 where=where, 348 types=types, 349 query=query, 350 offset=offset, 351 limit=limit, 352 mutex=mutex, 353 **{"global": global_}, 354 ) 355 356 def _parse_var_from_options(self, options): 357 for option in options: 358 if self._match_text_seq(*option.split(" ")): 359 return exp.Var(this=option) 360 return None 361 362 def _parse_oldstyle_limit(self): 363 limit = None 364 offset = None 365 if self._match_text_seq("LIMIT"): 366 parts = self._parse_csv(self._parse_number) 367 if len(parts) == 1: 368 limit = parts[0] 369 elif len(parts) == 2: 370 limit = parts[1] 371 offset = parts[0] 372 return offset, limit 373 374 def _default_parse_set_item(self): 375 return self._parse_set_item_assignment(kind=None) 376 377 def _parse_set_item_assignment(self, kind): 378 if kind in {"GLOBAL", "SESSION"} and self._match_text_seq("TRANSACTION"): 379 return self._parse_set_transaction(global_=kind == "GLOBAL") 380 381 left = self._parse_primary() or self._parse_id_var() 382 if not self._match(TokenType.EQ): 383 self.raise_error("Expected =") 384 right = self._parse_statement() or self._parse_id_var() 385 386 this = self.expression( 387 exp.EQ, 388 this=left, 389 expression=right, 390 ) 391 392 return self.expression( 393 exp.SetItem, 394 this=this, 395 kind=kind, 396 ) 397 398 def _parse_set_item_charset(self, kind): 399 this = self._parse_string() or self._parse_id_var() 400 401 return self.expression( 402 exp.SetItem, 403 this=this, 404 kind=kind, 405 ) 406 407 def _parse_set_item_names(self): 408 charset = self._parse_string() or self._parse_id_var() 409 if self._match_text_seq("COLLATE"): 410 collate = self._parse_string() or self._parse_id_var() 411 else: 412 collate = None 413 return self.expression( 414 exp.SetItem, 415 this=charset, 416 collate=collate, 417 kind="NAMES", 418 ) 419 420 def _parse_set_transaction(self, global_=False): 421 self._match_text_seq("TRANSACTION") 422 characteristics = self._parse_csv( 423 lambda: self._parse_var_from_options(self.TRANSACTION_CHARACTERISTICS) 424 ) 425 return self.expression( 426 exp.SetItem, 427 expressions=characteristics, 428 kind="TRANSACTION", 429 **{"global": global_}, 430 ) 431 432 class Generator(generator.Generator): 433 LOCKING_READS_SUPPORTED = True 434 NULL_ORDERING_SUPPORTED = False 435 436 TRANSFORMS = { 437 **generator.Generator.TRANSFORMS, # type: ignore 438 exp.CurrentDate: no_paren_current_date_sql, 439 exp.CurrentTimestamp: lambda *_: "CURRENT_TIMESTAMP", 440 exp.ILike: no_ilike_sql, 441 exp.TableSample: no_tablesample_sql, 442 exp.TryCast: no_trycast_sql, 443 exp.DateAdd: _date_add_sql("ADD"), 444 exp.DateSub: _date_add_sql("SUB"), 445 exp.DateTrunc: _date_trunc_sql, 446 exp.GroupConcat: lambda self, e: f"""GROUP_CONCAT({self.sql(e, "this")} SEPARATOR {self.sql(e, "separator") or "','"})""", 447 exp.StrToDate: _str_to_date_sql, 448 exp.StrToTime: _str_to_date_sql, 449 exp.Trim: _trim_sql, 450 exp.NullSafeEQ: lambda self, e: self.binary(e, "<=>"), 451 exp.NullSafeNEQ: lambda self, e: self.not_sql(self.binary(e, "<=>")), 452 exp.StrPosition: strposition_to_locate_sql, 453 } 454 455 TYPE_MAPPING = generator.Generator.TYPE_MAPPING.copy() 456 TYPE_MAPPING.pop(exp.DataType.Type.MEDIUMTEXT) 457 TYPE_MAPPING.pop(exp.DataType.Type.LONGTEXT) 458 TYPE_MAPPING.pop(exp.DataType.Type.MEDIUMBLOB) 459 TYPE_MAPPING.pop(exp.DataType.Type.LONGBLOB) 460 461 def show_sql(self, expression): 462 this = f" {expression.name}" 463 full = " FULL" if expression.args.get("full") else "" 464 global_ = " GLOBAL" if expression.args.get("global") else "" 465 466 target = self.sql(expression, "target") 467 target = f" {target}" if target else "" 468 if expression.name in {"COLUMNS", "INDEX"}: 469 target = f" FROM{target}" 470 elif expression.name == "GRANTS": 471 target = f" FOR{target}" 472 473 db = self._prefixed_sql("FROM", expression, "db") 474 475 like = self._prefixed_sql("LIKE", expression, "like") 476 where = self.sql(expression, "where") 477 478 types = self.expressions(expression, key="types") 479 types = f" {types}" if types else types 480 query = self._prefixed_sql("FOR QUERY", expression, "query") 481 482 if expression.name == "PROFILE": 483 offset = self._prefixed_sql("OFFSET", expression, "offset") 484 limit = self._prefixed_sql("LIMIT", expression, "limit") 485 else: 486 offset = "" 487 limit = self._oldstyle_limit_sql(expression) 488 489 log = self._prefixed_sql("IN", expression, "log") 490 position = self._prefixed_sql("FROM", expression, "position") 491 492 channel = self._prefixed_sql("FOR CHANNEL", expression, "channel") 493 494 if expression.name == "ENGINE": 495 mutex_or_status = " MUTEX" if expression.args.get("mutex") else " STATUS" 496 else: 497 mutex_or_status = "" 498 499 return f"SHOW{full}{global_}{this}{target}{types}{db}{query}{log}{position}{channel}{mutex_or_status}{like}{where}{offset}{limit}" 500 501 def _prefixed_sql(self, prefix, expression, arg): 502 sql = self.sql(expression, arg) 503 if not sql: 504 return "" 505 return f" {prefix} {sql}" 506 507 def _oldstyle_limit_sql(self, expression): 508 limit = self.sql(expression, "limit") 509 offset = self.sql(expression, "offset") 510 if limit: 511 limit_offset = f"{offset}, {limit}" if offset else limit 512 return f" LIMIT {limit_offset}" 513 return "" 514 515 def setitem_sql(self, expression): 516 kind = self.sql(expression, "kind") 517 kind = f"{kind} " if kind else "" 518 this = self.sql(expression, "this") 519 expressions = self.expressions(expression) 520 collate = self.sql(expression, "collate") 521 collate = f" COLLATE {collate}" if collate else "" 522 global_ = "GLOBAL " if expression.args.get("global") else "" 523 return f"{global_}{kind}{this}{expressions}{collate}" 524 525 def set_sql(self, expression): 526 return f"SET {self.expressions(expression)}"
100class MySQL(Dialect): 101 time_format = "'%Y-%m-%d %T'" 102 103 # https://prestodb.io/docs/current/functions/datetime.html#mysql-date-functions 104 time_mapping = { 105 "%M": "%B", 106 "%c": "%-m", 107 "%e": "%-d", 108 "%h": "%I", 109 "%i": "%M", 110 "%s": "%S", 111 "%S": "%S", 112 "%u": "%W", 113 "%k": "%-H", 114 "%l": "%-I", 115 "%T": "%H:%M:%S", 116 } 117 118 class Tokenizer(tokens.Tokenizer): 119 QUOTES = ["'", '"'] 120 COMMENTS = ["--", "#", ("/*", "*/")] 121 IDENTIFIERS = ["`"] 122 STRING_ESCAPES = ["'", "\\"] 123 BIT_STRINGS = [("b'", "'"), ("B'", "'"), ("0b", "")] 124 HEX_STRINGS = [("x'", "'"), ("X'", "'"), ("0x", "")] 125 126 KEYWORDS = { 127 **tokens.Tokenizer.KEYWORDS, 128 "MEDIUMTEXT": TokenType.MEDIUMTEXT, 129 "LONGTEXT": TokenType.LONGTEXT, 130 "MEDIUMBLOB": TokenType.MEDIUMBLOB, 131 "LONGBLOB": TokenType.LONGBLOB, 132 "START": TokenType.BEGIN, 133 "SEPARATOR": TokenType.SEPARATOR, 134 "_ARMSCII8": TokenType.INTRODUCER, 135 "_ASCII": TokenType.INTRODUCER, 136 "_BIG5": TokenType.INTRODUCER, 137 "_BINARY": TokenType.INTRODUCER, 138 "_CP1250": TokenType.INTRODUCER, 139 "_CP1251": TokenType.INTRODUCER, 140 "_CP1256": TokenType.INTRODUCER, 141 "_CP1257": TokenType.INTRODUCER, 142 "_CP850": TokenType.INTRODUCER, 143 "_CP852": TokenType.INTRODUCER, 144 "_CP866": TokenType.INTRODUCER, 145 "_CP932": TokenType.INTRODUCER, 146 "_DEC8": TokenType.INTRODUCER, 147 "_EUCJPMS": TokenType.INTRODUCER, 148 "_EUCKR": TokenType.INTRODUCER, 149 "_GB18030": TokenType.INTRODUCER, 150 "_GB2312": TokenType.INTRODUCER, 151 "_GBK": TokenType.INTRODUCER, 152 "_GEOSTD8": TokenType.INTRODUCER, 153 "_GREEK": TokenType.INTRODUCER, 154 "_HEBREW": TokenType.INTRODUCER, 155 "_HP8": TokenType.INTRODUCER, 156 "_KEYBCS2": TokenType.INTRODUCER, 157 "_KOI8R": TokenType.INTRODUCER, 158 "_KOI8U": TokenType.INTRODUCER, 159 "_LATIN1": TokenType.INTRODUCER, 160 "_LATIN2": TokenType.INTRODUCER, 161 "_LATIN5": TokenType.INTRODUCER, 162 "_LATIN7": TokenType.INTRODUCER, 163 "_MACCE": TokenType.INTRODUCER, 164 "_MACROMAN": TokenType.INTRODUCER, 165 "_SJIS": TokenType.INTRODUCER, 166 "_SWE7": TokenType.INTRODUCER, 167 "_TIS620": TokenType.INTRODUCER, 168 "_UCS2": TokenType.INTRODUCER, 169 "_UJIS": TokenType.INTRODUCER, 170 # https://dev.mysql.com/doc/refman/8.0/en/string-literals.html 171 "_UTF8": TokenType.INTRODUCER, 172 "_UTF16": TokenType.INTRODUCER, 173 "_UTF16LE": TokenType.INTRODUCER, 174 "_UTF32": TokenType.INTRODUCER, 175 "_UTF8MB3": TokenType.INTRODUCER, 176 "_UTF8MB4": TokenType.INTRODUCER, 177 "@@": TokenType.SESSION_PARAMETER, 178 } 179 180 COMMANDS = tokens.Tokenizer.COMMANDS - {TokenType.SET, TokenType.SHOW} 181 182 class Parser(parser.Parser): 183 FUNC_TOKENS = {*parser.Parser.FUNC_TOKENS, TokenType.SCHEMA} # type: ignore 184 185 FUNCTIONS = { 186 **parser.Parser.FUNCTIONS, # type: ignore 187 "DATE_ADD": _date_add(exp.DateAdd), 188 "DATE_SUB": _date_add(exp.DateSub), 189 "STR_TO_DATE": _str_to_date, 190 "LOCATE": locate_to_strposition, 191 "INSTR": lambda args: exp.StrPosition(substr=seq_get(args, 1), this=seq_get(args, 0)), 192 "LEFT": lambda args: exp.Substring( 193 this=seq_get(args, 0), start=exp.Literal.number(1), length=seq_get(args, 1) 194 ), 195 } 196 197 FUNCTION_PARSERS = { 198 **parser.Parser.FUNCTION_PARSERS, # type: ignore 199 "GROUP_CONCAT": lambda self: self.expression( 200 exp.GroupConcat, 201 this=self._parse_lambda(), 202 separator=self._match(TokenType.SEPARATOR) and self._parse_field(), 203 ), 204 } 205 206 PROPERTY_PARSERS = { 207 **parser.Parser.PROPERTY_PARSERS, # type: ignore 208 "ENGINE": lambda self: self._parse_property_assignment(exp.EngineProperty), 209 } 210 211 STATEMENT_PARSERS = { 212 **parser.Parser.STATEMENT_PARSERS, # type: ignore 213 TokenType.SHOW: lambda self: self._parse_show(), 214 TokenType.SET: lambda self: self._parse_set(), 215 } 216 217 SHOW_PARSERS = { 218 "BINARY LOGS": _show_parser("BINARY LOGS"), 219 "MASTER LOGS": _show_parser("BINARY LOGS"), 220 "BINLOG EVENTS": _show_parser("BINLOG EVENTS"), 221 "CHARACTER SET": _show_parser("CHARACTER SET"), 222 "CHARSET": _show_parser("CHARACTER SET"), 223 "COLLATION": _show_parser("COLLATION"), 224 "FULL COLUMNS": _show_parser("COLUMNS", target="FROM", full=True), 225 "COLUMNS": _show_parser("COLUMNS", target="FROM"), 226 "CREATE DATABASE": _show_parser("CREATE DATABASE", target=True), 227 "CREATE EVENT": _show_parser("CREATE EVENT", target=True), 228 "CREATE FUNCTION": _show_parser("CREATE FUNCTION", target=True), 229 "CREATE PROCEDURE": _show_parser("CREATE PROCEDURE", target=True), 230 "CREATE TABLE": _show_parser("CREATE TABLE", target=True), 231 "CREATE TRIGGER": _show_parser("CREATE TRIGGER", target=True), 232 "CREATE VIEW": _show_parser("CREATE VIEW", target=True), 233 "DATABASES": _show_parser("DATABASES"), 234 "ENGINE": _show_parser("ENGINE", target=True), 235 "STORAGE ENGINES": _show_parser("ENGINES"), 236 "ENGINES": _show_parser("ENGINES"), 237 "ERRORS": _show_parser("ERRORS"), 238 "EVENTS": _show_parser("EVENTS"), 239 "FUNCTION CODE": _show_parser("FUNCTION CODE", target=True), 240 "FUNCTION STATUS": _show_parser("FUNCTION STATUS"), 241 "GRANTS": _show_parser("GRANTS", target="FOR"), 242 "INDEX": _show_parser("INDEX", target="FROM"), 243 "MASTER STATUS": _show_parser("MASTER STATUS"), 244 "OPEN TABLES": _show_parser("OPEN TABLES"), 245 "PLUGINS": _show_parser("PLUGINS"), 246 "PROCEDURE CODE": _show_parser("PROCEDURE CODE", target=True), 247 "PROCEDURE STATUS": _show_parser("PROCEDURE STATUS"), 248 "PRIVILEGES": _show_parser("PRIVILEGES"), 249 "FULL PROCESSLIST": _show_parser("PROCESSLIST", full=True), 250 "PROCESSLIST": _show_parser("PROCESSLIST"), 251 "PROFILE": _show_parser("PROFILE"), 252 "PROFILES": _show_parser("PROFILES"), 253 "RELAYLOG EVENTS": _show_parser("RELAYLOG EVENTS"), 254 "REPLICAS": _show_parser("REPLICAS"), 255 "SLAVE HOSTS": _show_parser("REPLICAS"), 256 "REPLICA STATUS": _show_parser("REPLICA STATUS"), 257 "SLAVE STATUS": _show_parser("REPLICA STATUS"), 258 "GLOBAL STATUS": _show_parser("STATUS", global_=True), 259 "SESSION STATUS": _show_parser("STATUS"), 260 "STATUS": _show_parser("STATUS"), 261 "TABLE STATUS": _show_parser("TABLE STATUS"), 262 "FULL TABLES": _show_parser("TABLES", full=True), 263 "TABLES": _show_parser("TABLES"), 264 "TRIGGERS": _show_parser("TRIGGERS"), 265 "GLOBAL VARIABLES": _show_parser("VARIABLES", global_=True), 266 "SESSION VARIABLES": _show_parser("VARIABLES"), 267 "VARIABLES": _show_parser("VARIABLES"), 268 "WARNINGS": _show_parser("WARNINGS"), 269 } 270 271 SET_PARSERS = { 272 "GLOBAL": lambda self: self._parse_set_item_assignment("GLOBAL"), 273 "PERSIST": lambda self: self._parse_set_item_assignment("PERSIST"), 274 "PERSIST_ONLY": lambda self: self._parse_set_item_assignment("PERSIST_ONLY"), 275 "SESSION": lambda self: self._parse_set_item_assignment("SESSION"), 276 "LOCAL": lambda self: self._parse_set_item_assignment("LOCAL"), 277 "CHARACTER SET": lambda self: self._parse_set_item_charset("CHARACTER SET"), 278 "CHARSET": lambda self: self._parse_set_item_charset("CHARACTER SET"), 279 "NAMES": lambda self: self._parse_set_item_names(), 280 "TRANSACTION": lambda self: self._parse_set_transaction(), 281 } 282 283 PROFILE_TYPES = { 284 "ALL", 285 "BLOCK IO", 286 "CONTEXT SWITCHES", 287 "CPU", 288 "IPC", 289 "MEMORY", 290 "PAGE FAULTS", 291 "SOURCE", 292 "SWAPS", 293 } 294 295 TRANSACTION_CHARACTERISTICS = { 296 "ISOLATION LEVEL REPEATABLE READ", 297 "ISOLATION LEVEL READ COMMITTED", 298 "ISOLATION LEVEL READ UNCOMMITTED", 299 "ISOLATION LEVEL SERIALIZABLE", 300 "READ WRITE", 301 "READ ONLY", 302 } 303 304 def _parse_show_mysql(self, this, target=False, full=None, global_=None): 305 if target: 306 if isinstance(target, str): 307 self._match_text_seq(target) 308 target_id = self._parse_id_var() 309 else: 310 target_id = None 311 312 log = self._parse_string() if self._match_text_seq("IN") else None 313 314 if this in {"BINLOG EVENTS", "RELAYLOG EVENTS"}: 315 position = self._parse_number() if self._match_text_seq("FROM") else None 316 db = None 317 else: 318 position = None 319 db = self._parse_id_var() if self._match_text_seq("FROM") else None 320 321 channel = self._parse_id_var() if self._match_text_seq("FOR", "CHANNEL") else None 322 323 like = self._parse_string() if self._match_text_seq("LIKE") else None 324 where = self._parse_where() 325 326 if this == "PROFILE": 327 types = self._parse_csv(lambda: self._parse_var_from_options(self.PROFILE_TYPES)) 328 query = self._parse_number() if self._match_text_seq("FOR", "QUERY") else None 329 offset = self._parse_number() if self._match_text_seq("OFFSET") else None 330 limit = self._parse_number() if self._match_text_seq("LIMIT") else None 331 else: 332 types, query = None, None 333 offset, limit = self._parse_oldstyle_limit() 334 335 mutex = True if self._match_text_seq("MUTEX") else None 336 mutex = False if self._match_text_seq("STATUS") else mutex 337 338 return self.expression( 339 exp.Show, 340 this=this, 341 target=target_id, 342 full=full, 343 log=log, 344 position=position, 345 db=db, 346 channel=channel, 347 like=like, 348 where=where, 349 types=types, 350 query=query, 351 offset=offset, 352 limit=limit, 353 mutex=mutex, 354 **{"global": global_}, 355 ) 356 357 def _parse_var_from_options(self, options): 358 for option in options: 359 if self._match_text_seq(*option.split(" ")): 360 return exp.Var(this=option) 361 return None 362 363 def _parse_oldstyle_limit(self): 364 limit = None 365 offset = None 366 if self._match_text_seq("LIMIT"): 367 parts = self._parse_csv(self._parse_number) 368 if len(parts) == 1: 369 limit = parts[0] 370 elif len(parts) == 2: 371 limit = parts[1] 372 offset = parts[0] 373 return offset, limit 374 375 def _default_parse_set_item(self): 376 return self._parse_set_item_assignment(kind=None) 377 378 def _parse_set_item_assignment(self, kind): 379 if kind in {"GLOBAL", "SESSION"} and self._match_text_seq("TRANSACTION"): 380 return self._parse_set_transaction(global_=kind == "GLOBAL") 381 382 left = self._parse_primary() or self._parse_id_var() 383 if not self._match(TokenType.EQ): 384 self.raise_error("Expected =") 385 right = self._parse_statement() or self._parse_id_var() 386 387 this = self.expression( 388 exp.EQ, 389 this=left, 390 expression=right, 391 ) 392 393 return self.expression( 394 exp.SetItem, 395 this=this, 396 kind=kind, 397 ) 398 399 def _parse_set_item_charset(self, kind): 400 this = self._parse_string() or self._parse_id_var() 401 402 return self.expression( 403 exp.SetItem, 404 this=this, 405 kind=kind, 406 ) 407 408 def _parse_set_item_names(self): 409 charset = self._parse_string() or self._parse_id_var() 410 if self._match_text_seq("COLLATE"): 411 collate = self._parse_string() or self._parse_id_var() 412 else: 413 collate = None 414 return self.expression( 415 exp.SetItem, 416 this=charset, 417 collate=collate, 418 kind="NAMES", 419 ) 420 421 def _parse_set_transaction(self, global_=False): 422 self._match_text_seq("TRANSACTION") 423 characteristics = self._parse_csv( 424 lambda: self._parse_var_from_options(self.TRANSACTION_CHARACTERISTICS) 425 ) 426 return self.expression( 427 exp.SetItem, 428 expressions=characteristics, 429 kind="TRANSACTION", 430 **{"global": global_}, 431 ) 432 433 class Generator(generator.Generator): 434 LOCKING_READS_SUPPORTED = True 435 NULL_ORDERING_SUPPORTED = False 436 437 TRANSFORMS = { 438 **generator.Generator.TRANSFORMS, # type: ignore 439 exp.CurrentDate: no_paren_current_date_sql, 440 exp.CurrentTimestamp: lambda *_: "CURRENT_TIMESTAMP", 441 exp.ILike: no_ilike_sql, 442 exp.TableSample: no_tablesample_sql, 443 exp.TryCast: no_trycast_sql, 444 exp.DateAdd: _date_add_sql("ADD"), 445 exp.DateSub: _date_add_sql("SUB"), 446 exp.DateTrunc: _date_trunc_sql, 447 exp.GroupConcat: lambda self, e: f"""GROUP_CONCAT({self.sql(e, "this")} SEPARATOR {self.sql(e, "separator") or "','"})""", 448 exp.StrToDate: _str_to_date_sql, 449 exp.StrToTime: _str_to_date_sql, 450 exp.Trim: _trim_sql, 451 exp.NullSafeEQ: lambda self, e: self.binary(e, "<=>"), 452 exp.NullSafeNEQ: lambda self, e: self.not_sql(self.binary(e, "<=>")), 453 exp.StrPosition: strposition_to_locate_sql, 454 } 455 456 TYPE_MAPPING = generator.Generator.TYPE_MAPPING.copy() 457 TYPE_MAPPING.pop(exp.DataType.Type.MEDIUMTEXT) 458 TYPE_MAPPING.pop(exp.DataType.Type.LONGTEXT) 459 TYPE_MAPPING.pop(exp.DataType.Type.MEDIUMBLOB) 460 TYPE_MAPPING.pop(exp.DataType.Type.LONGBLOB) 461 462 def show_sql(self, expression): 463 this = f" {expression.name}" 464 full = " FULL" if expression.args.get("full") else "" 465 global_ = " GLOBAL" if expression.args.get("global") else "" 466 467 target = self.sql(expression, "target") 468 target = f" {target}" if target else "" 469 if expression.name in {"COLUMNS", "INDEX"}: 470 target = f" FROM{target}" 471 elif expression.name == "GRANTS": 472 target = f" FOR{target}" 473 474 db = self._prefixed_sql("FROM", expression, "db") 475 476 like = self._prefixed_sql("LIKE", expression, "like") 477 where = self.sql(expression, "where") 478 479 types = self.expressions(expression, key="types") 480 types = f" {types}" if types else types 481 query = self._prefixed_sql("FOR QUERY", expression, "query") 482 483 if expression.name == "PROFILE": 484 offset = self._prefixed_sql("OFFSET", expression, "offset") 485 limit = self._prefixed_sql("LIMIT", expression, "limit") 486 else: 487 offset = "" 488 limit = self._oldstyle_limit_sql(expression) 489 490 log = self._prefixed_sql("IN", expression, "log") 491 position = self._prefixed_sql("FROM", expression, "position") 492 493 channel = self._prefixed_sql("FOR CHANNEL", expression, "channel") 494 495 if expression.name == "ENGINE": 496 mutex_or_status = " MUTEX" if expression.args.get("mutex") else " STATUS" 497 else: 498 mutex_or_status = "" 499 500 return f"SHOW{full}{global_}{this}{target}{types}{db}{query}{log}{position}{channel}{mutex_or_status}{like}{where}{offset}{limit}" 501 502 def _prefixed_sql(self, prefix, expression, arg): 503 sql = self.sql(expression, arg) 504 if not sql: 505 return "" 506 return f" {prefix} {sql}" 507 508 def _oldstyle_limit_sql(self, expression): 509 limit = self.sql(expression, "limit") 510 offset = self.sql(expression, "offset") 511 if limit: 512 limit_offset = f"{offset}, {limit}" if offset else limit 513 return f" LIMIT {limit_offset}" 514 return "" 515 516 def setitem_sql(self, expression): 517 kind = self.sql(expression, "kind") 518 kind = f"{kind} " if kind else "" 519 this = self.sql(expression, "this") 520 expressions = self.expressions(expression) 521 collate = self.sql(expression, "collate") 522 collate = f" COLLATE {collate}" if collate else "" 523 global_ = "GLOBAL " if expression.args.get("global") else "" 524 return f"{global_}{kind}{this}{expressions}{collate}" 525 526 def set_sql(self, expression): 527 return f"SET {self.expressions(expression)}"
Inherited Members
118 class Tokenizer(tokens.Tokenizer): 119 QUOTES = ["'", '"'] 120 COMMENTS = ["--", "#", ("/*", "*/")] 121 IDENTIFIERS = ["`"] 122 STRING_ESCAPES = ["'", "\\"] 123 BIT_STRINGS = [("b'", "'"), ("B'", "'"), ("0b", "")] 124 HEX_STRINGS = [("x'", "'"), ("X'", "'"), ("0x", "")] 125 126 KEYWORDS = { 127 **tokens.Tokenizer.KEYWORDS, 128 "MEDIUMTEXT": TokenType.MEDIUMTEXT, 129 "LONGTEXT": TokenType.LONGTEXT, 130 "MEDIUMBLOB": TokenType.MEDIUMBLOB, 131 "LONGBLOB": TokenType.LONGBLOB, 132 "START": TokenType.BEGIN, 133 "SEPARATOR": TokenType.SEPARATOR, 134 "_ARMSCII8": TokenType.INTRODUCER, 135 "_ASCII": TokenType.INTRODUCER, 136 "_BIG5": TokenType.INTRODUCER, 137 "_BINARY": TokenType.INTRODUCER, 138 "_CP1250": TokenType.INTRODUCER, 139 "_CP1251": TokenType.INTRODUCER, 140 "_CP1256": TokenType.INTRODUCER, 141 "_CP1257": TokenType.INTRODUCER, 142 "_CP850": TokenType.INTRODUCER, 143 "_CP852": TokenType.INTRODUCER, 144 "_CP866": TokenType.INTRODUCER, 145 "_CP932": TokenType.INTRODUCER, 146 "_DEC8": TokenType.INTRODUCER, 147 "_EUCJPMS": TokenType.INTRODUCER, 148 "_EUCKR": TokenType.INTRODUCER, 149 "_GB18030": TokenType.INTRODUCER, 150 "_GB2312": TokenType.INTRODUCER, 151 "_GBK": TokenType.INTRODUCER, 152 "_GEOSTD8": TokenType.INTRODUCER, 153 "_GREEK": TokenType.INTRODUCER, 154 "_HEBREW": TokenType.INTRODUCER, 155 "_HP8": TokenType.INTRODUCER, 156 "_KEYBCS2": TokenType.INTRODUCER, 157 "_KOI8R": TokenType.INTRODUCER, 158 "_KOI8U": TokenType.INTRODUCER, 159 "_LATIN1": TokenType.INTRODUCER, 160 "_LATIN2": TokenType.INTRODUCER, 161 "_LATIN5": TokenType.INTRODUCER, 162 "_LATIN7": TokenType.INTRODUCER, 163 "_MACCE": TokenType.INTRODUCER, 164 "_MACROMAN": TokenType.INTRODUCER, 165 "_SJIS": TokenType.INTRODUCER, 166 "_SWE7": TokenType.INTRODUCER, 167 "_TIS620": TokenType.INTRODUCER, 168 "_UCS2": TokenType.INTRODUCER, 169 "_UJIS": TokenType.INTRODUCER, 170 # https://dev.mysql.com/doc/refman/8.0/en/string-literals.html 171 "_UTF8": TokenType.INTRODUCER, 172 "_UTF16": TokenType.INTRODUCER, 173 "_UTF16LE": TokenType.INTRODUCER, 174 "_UTF32": TokenType.INTRODUCER, 175 "_UTF8MB3": TokenType.INTRODUCER, 176 "_UTF8MB4": TokenType.INTRODUCER, 177 "@@": TokenType.SESSION_PARAMETER, 178 } 179 180 COMMANDS = tokens.Tokenizer.COMMANDS - {TokenType.SET, TokenType.SHOW}
Inherited Members
182 class Parser(parser.Parser): 183 FUNC_TOKENS = {*parser.Parser.FUNC_TOKENS, TokenType.SCHEMA} # type: ignore 184 185 FUNCTIONS = { 186 **parser.Parser.FUNCTIONS, # type: ignore 187 "DATE_ADD": _date_add(exp.DateAdd), 188 "DATE_SUB": _date_add(exp.DateSub), 189 "STR_TO_DATE": _str_to_date, 190 "LOCATE": locate_to_strposition, 191 "INSTR": lambda args: exp.StrPosition(substr=seq_get(args, 1), this=seq_get(args, 0)), 192 "LEFT": lambda args: exp.Substring( 193 this=seq_get(args, 0), start=exp.Literal.number(1), length=seq_get(args, 1) 194 ), 195 } 196 197 FUNCTION_PARSERS = { 198 **parser.Parser.FUNCTION_PARSERS, # type: ignore 199 "GROUP_CONCAT": lambda self: self.expression( 200 exp.GroupConcat, 201 this=self._parse_lambda(), 202 separator=self._match(TokenType.SEPARATOR) and self._parse_field(), 203 ), 204 } 205 206 PROPERTY_PARSERS = { 207 **parser.Parser.PROPERTY_PARSERS, # type: ignore 208 "ENGINE": lambda self: self._parse_property_assignment(exp.EngineProperty), 209 } 210 211 STATEMENT_PARSERS = { 212 **parser.Parser.STATEMENT_PARSERS, # type: ignore 213 TokenType.SHOW: lambda self: self._parse_show(), 214 TokenType.SET: lambda self: self._parse_set(), 215 } 216 217 SHOW_PARSERS = { 218 "BINARY LOGS": _show_parser("BINARY LOGS"), 219 "MASTER LOGS": _show_parser("BINARY LOGS"), 220 "BINLOG EVENTS": _show_parser("BINLOG EVENTS"), 221 "CHARACTER SET": _show_parser("CHARACTER SET"), 222 "CHARSET": _show_parser("CHARACTER SET"), 223 "COLLATION": _show_parser("COLLATION"), 224 "FULL COLUMNS": _show_parser("COLUMNS", target="FROM", full=True), 225 "COLUMNS": _show_parser("COLUMNS", target="FROM"), 226 "CREATE DATABASE": _show_parser("CREATE DATABASE", target=True), 227 "CREATE EVENT": _show_parser("CREATE EVENT", target=True), 228 "CREATE FUNCTION": _show_parser("CREATE FUNCTION", target=True), 229 "CREATE PROCEDURE": _show_parser("CREATE PROCEDURE", target=True), 230 "CREATE TABLE": _show_parser("CREATE TABLE", target=True), 231 "CREATE TRIGGER": _show_parser("CREATE TRIGGER", target=True), 232 "CREATE VIEW": _show_parser("CREATE VIEW", target=True), 233 "DATABASES": _show_parser("DATABASES"), 234 "ENGINE": _show_parser("ENGINE", target=True), 235 "STORAGE ENGINES": _show_parser("ENGINES"), 236 "ENGINES": _show_parser("ENGINES"), 237 "ERRORS": _show_parser("ERRORS"), 238 "EVENTS": _show_parser("EVENTS"), 239 "FUNCTION CODE": _show_parser("FUNCTION CODE", target=True), 240 "FUNCTION STATUS": _show_parser("FUNCTION STATUS"), 241 "GRANTS": _show_parser("GRANTS", target="FOR"), 242 "INDEX": _show_parser("INDEX", target="FROM"), 243 "MASTER STATUS": _show_parser("MASTER STATUS"), 244 "OPEN TABLES": _show_parser("OPEN TABLES"), 245 "PLUGINS": _show_parser("PLUGINS"), 246 "PROCEDURE CODE": _show_parser("PROCEDURE CODE", target=True), 247 "PROCEDURE STATUS": _show_parser("PROCEDURE STATUS"), 248 "PRIVILEGES": _show_parser("PRIVILEGES"), 249 "FULL PROCESSLIST": _show_parser("PROCESSLIST", full=True), 250 "PROCESSLIST": _show_parser("PROCESSLIST"), 251 "PROFILE": _show_parser("PROFILE"), 252 "PROFILES": _show_parser("PROFILES"), 253 "RELAYLOG EVENTS": _show_parser("RELAYLOG EVENTS"), 254 "REPLICAS": _show_parser("REPLICAS"), 255 "SLAVE HOSTS": _show_parser("REPLICAS"), 256 "REPLICA STATUS": _show_parser("REPLICA STATUS"), 257 "SLAVE STATUS": _show_parser("REPLICA STATUS"), 258 "GLOBAL STATUS": _show_parser("STATUS", global_=True), 259 "SESSION STATUS": _show_parser("STATUS"), 260 "STATUS": _show_parser("STATUS"), 261 "TABLE STATUS": _show_parser("TABLE STATUS"), 262 "FULL TABLES": _show_parser("TABLES", full=True), 263 "TABLES": _show_parser("TABLES"), 264 "TRIGGERS": _show_parser("TRIGGERS"), 265 "GLOBAL VARIABLES": _show_parser("VARIABLES", global_=True), 266 "SESSION VARIABLES": _show_parser("VARIABLES"), 267 "VARIABLES": _show_parser("VARIABLES"), 268 "WARNINGS": _show_parser("WARNINGS"), 269 } 270 271 SET_PARSERS = { 272 "GLOBAL": lambda self: self._parse_set_item_assignment("GLOBAL"), 273 "PERSIST": lambda self: self._parse_set_item_assignment("PERSIST"), 274 "PERSIST_ONLY": lambda self: self._parse_set_item_assignment("PERSIST_ONLY"), 275 "SESSION": lambda self: self._parse_set_item_assignment("SESSION"), 276 "LOCAL": lambda self: self._parse_set_item_assignment("LOCAL"), 277 "CHARACTER SET": lambda self: self._parse_set_item_charset("CHARACTER SET"), 278 "CHARSET": lambda self: self._parse_set_item_charset("CHARACTER SET"), 279 "NAMES": lambda self: self._parse_set_item_names(), 280 "TRANSACTION": lambda self: self._parse_set_transaction(), 281 } 282 283 PROFILE_TYPES = { 284 "ALL", 285 "BLOCK IO", 286 "CONTEXT SWITCHES", 287 "CPU", 288 "IPC", 289 "MEMORY", 290 "PAGE FAULTS", 291 "SOURCE", 292 "SWAPS", 293 } 294 295 TRANSACTION_CHARACTERISTICS = { 296 "ISOLATION LEVEL REPEATABLE READ", 297 "ISOLATION LEVEL READ COMMITTED", 298 "ISOLATION LEVEL READ UNCOMMITTED", 299 "ISOLATION LEVEL SERIALIZABLE", 300 "READ WRITE", 301 "READ ONLY", 302 } 303 304 def _parse_show_mysql(self, this, target=False, full=None, global_=None): 305 if target: 306 if isinstance(target, str): 307 self._match_text_seq(target) 308 target_id = self._parse_id_var() 309 else: 310 target_id = None 311 312 log = self._parse_string() if self._match_text_seq("IN") else None 313 314 if this in {"BINLOG EVENTS", "RELAYLOG EVENTS"}: 315 position = self._parse_number() if self._match_text_seq("FROM") else None 316 db = None 317 else: 318 position = None 319 db = self._parse_id_var() if self._match_text_seq("FROM") else None 320 321 channel = self._parse_id_var() if self._match_text_seq("FOR", "CHANNEL") else None 322 323 like = self._parse_string() if self._match_text_seq("LIKE") else None 324 where = self._parse_where() 325 326 if this == "PROFILE": 327 types = self._parse_csv(lambda: self._parse_var_from_options(self.PROFILE_TYPES)) 328 query = self._parse_number() if self._match_text_seq("FOR", "QUERY") else None 329 offset = self._parse_number() if self._match_text_seq("OFFSET") else None 330 limit = self._parse_number() if self._match_text_seq("LIMIT") else None 331 else: 332 types, query = None, None 333 offset, limit = self._parse_oldstyle_limit() 334 335 mutex = True if self._match_text_seq("MUTEX") else None 336 mutex = False if self._match_text_seq("STATUS") else mutex 337 338 return self.expression( 339 exp.Show, 340 this=this, 341 target=target_id, 342 full=full, 343 log=log, 344 position=position, 345 db=db, 346 channel=channel, 347 like=like, 348 where=where, 349 types=types, 350 query=query, 351 offset=offset, 352 limit=limit, 353 mutex=mutex, 354 **{"global": global_}, 355 ) 356 357 def _parse_var_from_options(self, options): 358 for option in options: 359 if self._match_text_seq(*option.split(" ")): 360 return exp.Var(this=option) 361 return None 362 363 def _parse_oldstyle_limit(self): 364 limit = None 365 offset = None 366 if self._match_text_seq("LIMIT"): 367 parts = self._parse_csv(self._parse_number) 368 if len(parts) == 1: 369 limit = parts[0] 370 elif len(parts) == 2: 371 limit = parts[1] 372 offset = parts[0] 373 return offset, limit 374 375 def _default_parse_set_item(self): 376 return self._parse_set_item_assignment(kind=None) 377 378 def _parse_set_item_assignment(self, kind): 379 if kind in {"GLOBAL", "SESSION"} and self._match_text_seq("TRANSACTION"): 380 return self._parse_set_transaction(global_=kind == "GLOBAL") 381 382 left = self._parse_primary() or self._parse_id_var() 383 if not self._match(TokenType.EQ): 384 self.raise_error("Expected =") 385 right = self._parse_statement() or self._parse_id_var() 386 387 this = self.expression( 388 exp.EQ, 389 this=left, 390 expression=right, 391 ) 392 393 return self.expression( 394 exp.SetItem, 395 this=this, 396 kind=kind, 397 ) 398 399 def _parse_set_item_charset(self, kind): 400 this = self._parse_string() or self._parse_id_var() 401 402 return self.expression( 403 exp.SetItem, 404 this=this, 405 kind=kind, 406 ) 407 408 def _parse_set_item_names(self): 409 charset = self._parse_string() or self._parse_id_var() 410 if self._match_text_seq("COLLATE"): 411 collate = self._parse_string() or self._parse_id_var() 412 else: 413 collate = None 414 return self.expression( 415 exp.SetItem, 416 this=charset, 417 collate=collate, 418 kind="NAMES", 419 ) 420 421 def _parse_set_transaction(self, global_=False): 422 self._match_text_seq("TRANSACTION") 423 characteristics = self._parse_csv( 424 lambda: self._parse_var_from_options(self.TRANSACTION_CHARACTERISTICS) 425 ) 426 return self.expression( 427 exp.SetItem, 428 expressions=characteristics, 429 kind="TRANSACTION", 430 **{"global": global_}, 431 )
Parser consumes a list of tokens produced by the sqlglot.tokens.Tokenizer
and produces
a parsed syntax tree.
Arguments:
- error_level: the desired error level. Default: ErrorLevel.RAISE
- error_message_context: determines the amount of context to capture from a query string when displaying the error message (in number of characters). Default: 50.
- index_offset: Index offset for arrays eg ARRAY[0] vs ARRAY[1] as the head of a list. Default: 0
- alias_post_tablesample: If the table alias comes after tablesample. Default: False
- max_errors: Maximum number of error messages to include in a raised ParseError. This is only relevant if error_level is ErrorLevel.RAISE. Default: 3
- null_ordering: Indicates the default null ordering method to use if not explicitly set. Options are "nulls_are_small", "nulls_are_large", "nulls_are_last". Default: "nulls_are_small"
Inherited Members
433 class Generator(generator.Generator): 434 LOCKING_READS_SUPPORTED = True 435 NULL_ORDERING_SUPPORTED = False 436 437 TRANSFORMS = { 438 **generator.Generator.TRANSFORMS, # type: ignore 439 exp.CurrentDate: no_paren_current_date_sql, 440 exp.CurrentTimestamp: lambda *_: "CURRENT_TIMESTAMP", 441 exp.ILike: no_ilike_sql, 442 exp.TableSample: no_tablesample_sql, 443 exp.TryCast: no_trycast_sql, 444 exp.DateAdd: _date_add_sql("ADD"), 445 exp.DateSub: _date_add_sql("SUB"), 446 exp.DateTrunc: _date_trunc_sql, 447 exp.GroupConcat: lambda self, e: f"""GROUP_CONCAT({self.sql(e, "this")} SEPARATOR {self.sql(e, "separator") or "','"})""", 448 exp.StrToDate: _str_to_date_sql, 449 exp.StrToTime: _str_to_date_sql, 450 exp.Trim: _trim_sql, 451 exp.NullSafeEQ: lambda self, e: self.binary(e, "<=>"), 452 exp.NullSafeNEQ: lambda self, e: self.not_sql(self.binary(e, "<=>")), 453 exp.StrPosition: strposition_to_locate_sql, 454 } 455 456 TYPE_MAPPING = generator.Generator.TYPE_MAPPING.copy() 457 TYPE_MAPPING.pop(exp.DataType.Type.MEDIUMTEXT) 458 TYPE_MAPPING.pop(exp.DataType.Type.LONGTEXT) 459 TYPE_MAPPING.pop(exp.DataType.Type.MEDIUMBLOB) 460 TYPE_MAPPING.pop(exp.DataType.Type.LONGBLOB) 461 462 def show_sql(self, expression): 463 this = f" {expression.name}" 464 full = " FULL" if expression.args.get("full") else "" 465 global_ = " GLOBAL" if expression.args.get("global") else "" 466 467 target = self.sql(expression, "target") 468 target = f" {target}" if target else "" 469 if expression.name in {"COLUMNS", "INDEX"}: 470 target = f" FROM{target}" 471 elif expression.name == "GRANTS": 472 target = f" FOR{target}" 473 474 db = self._prefixed_sql("FROM", expression, "db") 475 476 like = self._prefixed_sql("LIKE", expression, "like") 477 where = self.sql(expression, "where") 478 479 types = self.expressions(expression, key="types") 480 types = f" {types}" if types else types 481 query = self._prefixed_sql("FOR QUERY", expression, "query") 482 483 if expression.name == "PROFILE": 484 offset = self._prefixed_sql("OFFSET", expression, "offset") 485 limit = self._prefixed_sql("LIMIT", expression, "limit") 486 else: 487 offset = "" 488 limit = self._oldstyle_limit_sql(expression) 489 490 log = self._prefixed_sql("IN", expression, "log") 491 position = self._prefixed_sql("FROM", expression, "position") 492 493 channel = self._prefixed_sql("FOR CHANNEL", expression, "channel") 494 495 if expression.name == "ENGINE": 496 mutex_or_status = " MUTEX" if expression.args.get("mutex") else " STATUS" 497 else: 498 mutex_or_status = "" 499 500 return f"SHOW{full}{global_}{this}{target}{types}{db}{query}{log}{position}{channel}{mutex_or_status}{like}{where}{offset}{limit}" 501 502 def _prefixed_sql(self, prefix, expression, arg): 503 sql = self.sql(expression, arg) 504 if not sql: 505 return "" 506 return f" {prefix} {sql}" 507 508 def _oldstyle_limit_sql(self, expression): 509 limit = self.sql(expression, "limit") 510 offset = self.sql(expression, "offset") 511 if limit: 512 limit_offset = f"{offset}, {limit}" if offset else limit 513 return f" LIMIT {limit_offset}" 514 return "" 515 516 def setitem_sql(self, expression): 517 kind = self.sql(expression, "kind") 518 kind = f"{kind} " if kind else "" 519 this = self.sql(expression, "this") 520 expressions = self.expressions(expression) 521 collate = self.sql(expression, "collate") 522 collate = f" COLLATE {collate}" if collate else "" 523 global_ = "GLOBAL " if expression.args.get("global") else "" 524 return f"{global_}{kind}{this}{expressions}{collate}" 525 526 def set_sql(self, expression): 527 return f"SET {self.expressions(expression)}"
Generator interprets the given syntax tree and produces a SQL string as an output.
Arguments:
- time_mapping (dict): the dictionary of custom time mappings in which the key represents a python time format and the output the target time format
- time_trie (trie): a trie of the time_mapping keys
- pretty (bool): if set to True the returned string will be formatted. Default: False.
- quote_start (str): specifies which starting character to use to delimit quotes. Default: '.
- quote_end (str): specifies which ending character to use to delimit quotes. Default: '.
- identifier_start (str): specifies which starting character to use to delimit identifiers. Default: ".
- identifier_end (str): specifies which ending character to use to delimit identifiers. Default: ".
- identify (bool): if set to True all identifiers will be delimited by the corresponding character.
- normalize (bool): if set to True all identifiers will lower cased
- string_escape (str): specifies a string escape character. Default: '.
- identifier_escape (str): specifies an identifier escape character. Default: ".
- pad (int): determines padding in a formatted string. Default: 2.
- indent (int): determines the size of indentation in a formatted string. Default: 4.
- unnest_column_only (bool): if true unnest table aliases are considered only as column aliases
- normalize_functions (str): normalize function names, "upper", "lower", or None Default: "upper"
- alias_post_tablesample (bool): if the table alias comes after tablesample Default: False
- unsupported_level (ErrorLevel): determines the generator's behavior when it encounters unsupported expressions. Default ErrorLevel.WARN.
- null_ordering (str): Indicates the default null ordering method to use if not explicitly set. Options are "nulls_are_small", "nulls_are_large", "nulls_are_last". Default: "nulls_are_small"
- max_unsupported (int): Maximum number of unsupported messages to include in a raised UnsupportedError. This is only relevant if unsupported_level is ErrorLevel.RAISE. Default: 3
- leading_comma (bool): if the the comma is leading or trailing in select statements Default: False
- max_text_width: The max number of characters in a segment before creating new lines in pretty mode. The default is on the smaller end because the length only represents a segment and not the true line length. Default: 80
- comments: Whether or not to preserve comments in the output SQL code. Default: True
def
show_sql(self, expression):
462 def show_sql(self, expression): 463 this = f" {expression.name}" 464 full = " FULL" if expression.args.get("full") else "" 465 global_ = " GLOBAL" if expression.args.get("global") else "" 466 467 target = self.sql(expression, "target") 468 target = f" {target}" if target else "" 469 if expression.name in {"COLUMNS", "INDEX"}: 470 target = f" FROM{target}" 471 elif expression.name == "GRANTS": 472 target = f" FOR{target}" 473 474 db = self._prefixed_sql("FROM", expression, "db") 475 476 like = self._prefixed_sql("LIKE", expression, "like") 477 where = self.sql(expression, "where") 478 479 types = self.expressions(expression, key="types") 480 types = f" {types}" if types else types 481 query = self._prefixed_sql("FOR QUERY", expression, "query") 482 483 if expression.name == "PROFILE": 484 offset = self._prefixed_sql("OFFSET", expression, "offset") 485 limit = self._prefixed_sql("LIMIT", expression, "limit") 486 else: 487 offset = "" 488 limit = self._oldstyle_limit_sql(expression) 489 490 log = self._prefixed_sql("IN", expression, "log") 491 position = self._prefixed_sql("FROM", expression, "position") 492 493 channel = self._prefixed_sql("FOR CHANNEL", expression, "channel") 494 495 if expression.name == "ENGINE": 496 mutex_or_status = " MUTEX" if expression.args.get("mutex") else " STATUS" 497 else: 498 mutex_or_status = "" 499 500 return f"SHOW{full}{global_}{this}{target}{types}{db}{query}{log}{position}{channel}{mutex_or_status}{like}{where}{offset}{limit}"
def
setitem_sql(self, expression):
516 def setitem_sql(self, expression): 517 kind = self.sql(expression, "kind") 518 kind = f"{kind} " if kind else "" 519 this = self.sql(expression, "this") 520 expressions = self.expressions(expression) 521 collate = self.sql(expression, "collate") 522 collate = f" COLLATE {collate}" if collate else "" 523 global_ = "GLOBAL " if expression.args.get("global") else "" 524 return f"{global_}{kind}{this}{expressions}{collate}"
Inherited Members
- sqlglot.generator.Generator
- Generator
- generate
- unsupported
- sep
- seg
- pad_comment
- maybe_comment
- wrap
- no_identify
- normalize_func
- indent
- sql
- uncache_sql
- cache_sql
- characterset_sql
- column_sql
- columndef_sql
- columnconstraint_sql
- autoincrementcolumnconstraint_sql
- checkcolumnconstraint_sql
- commentcolumnconstraint_sql
- collatecolumnconstraint_sql
- encodecolumnconstraint_sql
- defaultcolumnconstraint_sql
- generatedasidentitycolumnconstraint_sql
- notnullcolumnconstraint_sql
- primarykeycolumnconstraint_sql
- uniquecolumnconstraint_sql
- create_sql
- describe_sql
- prepend_ctes
- with_sql
- cte_sql
- tablealias_sql
- bitstring_sql
- hexstring_sql
- datatype_sql
- directory_sql
- delete_sql
- drop_sql
- except_sql
- except_op
- fetch_sql
- filter_sql
- hint_sql
- index_sql
- identifier_sql
- national_sql
- partition_sql
- properties_sql
- root_properties
- properties
- with_properties
- locate_properties
- property_sql
- likeproperty_sql
- fallbackproperty_sql
- journalproperty_sql
- freespaceproperty_sql
- afterjournalproperty_sql
- checksumproperty_sql
- mergeblockratioproperty_sql
- datablocksizeproperty_sql
- blockcompressionproperty_sql
- isolatedloadingproperty_sql
- insert_sql
- intersect_sql
- intersect_op
- introducer_sql
- pseudotype_sql
- rowformatdelimitedproperty_sql
- table_sql
- tablesample_sql
- pivot_sql
- tuple_sql
- update_sql
- values_sql
- var_sql
- into_sql
- from_sql
- group_sql
- having_sql
- join_sql
- lambda_sql
- lateral_sql
- limit_sql
- offset_sql
- lock_sql
- literal_sql
- loaddata_sql
- null_sql
- boolean_sql
- order_sql
- cluster_sql
- distribute_sql
- sort_sql
- ordered_sql
- matchrecognize_sql
- query_modifiers
- select_sql
- schema_sql
- star_sql
- structkwarg_sql
- parameter_sql
- sessionparameter_sql
- placeholder_sql
- subquery_sql
- qualify_sql
- union_sql
- union_op
- unnest_sql
- where_sql
- window_sql
- partition_by_sql
- window_spec_sql
- withingroup_sql
- between_sql
- bracket_sql
- all_sql
- any_sql
- exists_sql
- case_sql
- constraint_sql
- extract_sql
- trim_sql
- concat_sql
- check_sql
- foreignkey_sql
- primarykey_sql
- unique_sql
- if_sql
- in_sql
- in_unnest_op
- interval_sql
- return_sql
- reference_sql
- anonymous_sql
- paren_sql
- neg_sql
- not_sql
- alias_sql
- aliases_sql
- attimezone_sql
- add_sql
- and_sql
- connector_sql
- bitwiseand_sql
- bitwiseleftshift_sql
- bitwisenot_sql
- bitwiseor_sql
- bitwiserightshift_sql
- bitwisexor_sql
- cast_sql
- currentdate_sql
- collate_sql
- command_sql
- transaction_sql
- commit_sql
- rollback_sql
- altercolumn_sql
- renametable_sql
- altertable_sql
- droppartition_sql
- addconstraint_sql
- distinct_sql
- ignorenulls_sql
- respectnulls_sql
- intdiv_sql
- dpipe_sql
- div_sql
- distance_sql
- dot_sql
- eq_sql
- escape_sql
- glob_sql
- gt_sql
- gte_sql
- ilike_sql
- is_sql
- like_sql
- similarto_sql
- lt_sql
- lte_sql
- mod_sql
- mul_sql
- neq_sql
- nullsafeeq_sql
- nullsafeneq_sql
- or_sql
- slice_sql
- sub_sql
- trycast_sql
- use_sql
- binary
- function_fallback_sql
- format_args
- text_width
- format_time
- expressions
- op_expressions
- naked_property
- set_operation
- tag_sql
- token_sql
- userdefinedfunction_sql
- userdefinedfunctionkwarg_sql
- joinhint_sql
- kwarg_sql
- when_sql
- merge_sql