diff options
author | Daniel Baumann <daniel.baumann@progress-linux.org> | 2023-02-08 04:14:30 +0000 |
---|---|---|
committer | Daniel Baumann <daniel.baumann@progress-linux.org> | 2023-02-08 04:14:30 +0000 |
commit | 99980f928b5b7be237d108266072e51aa3bb354e (patch) | |
tree | ce6fff00ea2b834bdbe3d84dcac90df1617d4245 /sqlglot/parser.py | |
parent | Adding upstream version 10.6.0. (diff) | |
download | sqlglot-99980f928b5b7be237d108266072e51aa3bb354e.tar.xz sqlglot-99980f928b5b7be237d108266072e51aa3bb354e.zip |
Adding upstream version 10.6.3.upstream/10.6.3
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'sqlglot/parser.py')
-rw-r--r-- | sqlglot/parser.py | 122 |
1 files changed, 101 insertions, 21 deletions
diff --git a/sqlglot/parser.py b/sqlglot/parser.py index 6229105..e2b2c54 100644 --- a/sqlglot/parser.py +++ b/sqlglot/parser.py @@ -80,6 +80,7 @@ class Parser(metaclass=_Parser): length=exp.Literal.number(10), ), "VAR_MAP": parse_var_map, + "IFNULL": exp.Coalesce.from_arg_list, } NO_PAREN_FUNCTIONS = { @@ -567,6 +568,8 @@ class Parser(metaclass=_Parser): default=self._prev.text.upper() == "DEFAULT" ), "BLOCKCOMPRESSION": lambda self: self._parse_blockcompression(), + "ALGORITHM": lambda self: self._parse_property_assignment(exp.AlgorithmProperty), + "DEFINER": lambda self: self._parse_definer(), } CONSTRAINT_PARSERS = { @@ -608,6 +611,7 @@ class Parser(metaclass=_Parser): "order": lambda self: self._parse_order(), "limit": lambda self: self._parse_limit(), "offset": lambda self: self._parse_offset(), + "lock": lambda self: self._parse_lock(), } SHOW_PARSERS: t.Dict[str, t.Callable] = {} @@ -850,7 +854,7 @@ class Parser(metaclass=_Parser): self.raise_error(error_message) def _find_sql(self, start: Token, end: Token) -> str: - return self.sql[self._find_token(start) : self._find_token(end)] + return self.sql[self._find_token(start) : self._find_token(end) + len(end.text)] def _find_token(self, token: Token) -> int: line = 1 @@ -901,6 +905,7 @@ class Parser(metaclass=_Parser): return expression def _parse_drop(self, default_kind: t.Optional[str] = None) -> t.Optional[exp.Expression]: + start = self._prev temporary = self._match(TokenType.TEMPORARY) materialized = self._match(TokenType.MATERIALIZED) kind = self._match_set(self.CREATABLES) and self._prev.text @@ -908,8 +913,7 @@ class Parser(metaclass=_Parser): if default_kind: kind = default_kind else: - self.raise_error(f"Expected {self.CREATABLES}") - return None + return self._parse_as_command(start) return self.expression( exp.Drop, @@ -929,6 +933,7 @@ class Parser(metaclass=_Parser): ) def _parse_create(self) -> t.Optional[exp.Expression]: + start = self._prev replace = self._match_pair(TokenType.OR, TokenType.REPLACE) set_ = self._match(TokenType.SET) # Teradata multiset = self._match_text_seq("MULTISET") # Teradata @@ -943,16 +948,19 @@ class Parser(metaclass=_Parser): if self._match_pair(TokenType.TABLE, TokenType.FUNCTION, advance=False): self._match(TokenType.TABLE) + properties = None create_token = self._match_set(self.CREATABLES) and self._prev if not create_token: - self.raise_error(f"Expected {self.CREATABLES}") - return None + properties = self._parse_properties() + create_token = self._match_set(self.CREATABLES) and self._prev + + if not properties or not create_token: + return self._parse_as_command(start) exists = self._parse_exists(not_=True) this = None expression = None - properties = None data = None statistics = None no_primary_index = None @@ -1006,6 +1014,14 @@ class Parser(metaclass=_Parser): indexes = [] while True: index = self._parse_create_table_index() + + # post index PARTITION BY property + if self._match(TokenType.PARTITION_BY, advance=False): + if properties: + properties.expressions.append(self._parse_property()) + else: + properties = self._parse_properties() + if not index: break else: @@ -1040,6 +1056,9 @@ class Parser(metaclass=_Parser): ) def _parse_property_before(self) -> t.Optional[exp.Expression]: + self._match(TokenType.COMMA) + + # parsers look to _prev for no/dual/default, so need to consume first self._match_text_seq("NO") self._match_text_seq("DUAL") self._match_text_seq("DEFAULT") @@ -1059,6 +1078,9 @@ class Parser(metaclass=_Parser): if self._match_pair(TokenType.COMPOUND, TokenType.SORTKEY): return self._parse_sortkey(compound=True) + if self._match_text_seq("SQL", "SECURITY"): + return self.expression(exp.SqlSecurityProperty, definer=self._match_text_seq("DEFINER")) + assignment = self._match_pair( TokenType.VAR, TokenType.EQ, advance=False ) or self._match_pair(TokenType.STRING, TokenType.EQ, advance=False) @@ -1083,7 +1105,6 @@ class Parser(metaclass=_Parser): while True: if before: - self._match(TokenType.COMMA) identified_property = self._parse_property_before() else: identified_property = self._parse_property() @@ -1094,7 +1115,7 @@ class Parser(metaclass=_Parser): properties.append(p) if properties: - return self.expression(exp.Properties, expressions=properties, before=before) + return self.expression(exp.Properties, expressions=properties) return None @@ -1118,6 +1139,19 @@ class Parser(metaclass=_Parser): return self._parse_withisolatedloading() + # https://dev.mysql.com/doc/refman/8.0/en/create-view.html + def _parse_definer(self) -> t.Optional[exp.Expression]: + self._match(TokenType.EQ) + + user = self._parse_id_var() + self._match(TokenType.PARAMETER) + host = self._parse_id_var() or (self._match(TokenType.MOD) and self._prev.text) + + if not user or not host: + return None + + return exp.DefinerProperty(this=f"{user}@{host}") + def _parse_withjournaltable(self) -> exp.Expression: self._match_text_seq("WITH", "JOURNAL", "TABLE") self._match(TokenType.EQ) @@ -1695,12 +1729,10 @@ class Parser(metaclass=_Parser): paren += 1 if self._curr.token_type == TokenType.R_PAREN: paren -= 1 + end = self._prev self._advance() if paren > 0: self.raise_error("Expecting )", self._curr) - if not self._curr: - self.raise_error("Expecting pattern", self._curr) - end = self._prev pattern = exp.Var(this=self._find_sql(start, end)) else: pattern = None @@ -2044,9 +2076,16 @@ class Parser(metaclass=_Parser): expressions = self._parse_csv(self._parse_conjunction) grouping_sets = self._parse_grouping_sets() + self._match(TokenType.COMMA) with_ = self._match(TokenType.WITH) - cube = self._match(TokenType.CUBE) and (with_ or self._parse_wrapped_id_vars()) - rollup = self._match(TokenType.ROLLUP) and (with_ or self._parse_wrapped_id_vars()) + cube = self._match(TokenType.CUBE) and ( + with_ or self._parse_wrapped_csv(self._parse_column) + ) + + self._match(TokenType.COMMA) + rollup = self._match(TokenType.ROLLUP) and ( + with_ or self._parse_wrapped_csv(self._parse_column) + ) return self.expression( exp.Group, @@ -2149,6 +2188,14 @@ class Parser(metaclass=_Parser): self._match_set((TokenType.ROW, TokenType.ROWS)) return self.expression(exp.Offset, this=this, expression=count) + def _parse_lock(self) -> t.Optional[exp.Expression]: + if self._match_text_seq("FOR", "UPDATE"): + return self.expression(exp.Lock, update=True) + if self._match_text_seq("FOR", "SHARE"): + return self.expression(exp.Lock, update=False) + + return None + def _parse_set_operations(self, this: t.Optional[exp.Expression]) -> t.Optional[exp.Expression]: if not self._match_set(self.SET_OPERATIONS): return this @@ -2330,12 +2377,21 @@ class Parser(metaclass=_Parser): maybe_func = True if not nested and self._match_pair(TokenType.L_BRACKET, TokenType.R_BRACKET): - return exp.DataType( + this = exp.DataType( this=exp.DataType.Type.ARRAY, expressions=[exp.DataType.build(type_token.value, expressions=expressions)], nested=True, ) + while self._match_pair(TokenType.L_BRACKET, TokenType.R_BRACKET): + this = exp.DataType( + this=exp.DataType.Type.ARRAY, + expressions=[this], + nested=True, + ) + + return this + if self._match(TokenType.L_BRACKET): self._retreat(index) return None @@ -2430,7 +2486,12 @@ class Parser(metaclass=_Parser): self.raise_error("Expected type") elif op: self._advance() - field = exp.Literal.string(self._prev.text) + value = self._prev.text + field = ( + exp.Literal.number(value) + if self._prev.token_type == TokenType.NUMBER + else exp.Literal.string(value) + ) else: field = self._parse_star() or self._parse_function() or self._parse_id_var() @@ -2752,7 +2813,23 @@ class Parser(metaclass=_Parser): if not self._curr: break - if self._match_text_seq("NOT", "ENFORCED"): + if self._match(TokenType.ON): + action = None + on = self._advance_any() and self._prev.text + + if self._match(TokenType.NO_ACTION): + action = "NO ACTION" + elif self._match(TokenType.CASCADE): + action = "CASCADE" + elif self._match_pair(TokenType.SET, TokenType.NULL): + action = "SET NULL" + elif self._match_pair(TokenType.SET, TokenType.DEFAULT): + action = "SET DEFAULT" + else: + self.raise_error("Invalid key constraint") + + options.append(f"ON {on} {action}") + elif self._match_text_seq("NOT", "ENFORCED"): options.append("NOT ENFORCED") elif self._match_text_seq("DEFERRABLE"): options.append("DEFERRABLE") @@ -2762,10 +2839,6 @@ class Parser(metaclass=_Parser): options.append("NORELY") elif self._match_text_seq("MATCH", "FULL"): options.append("MATCH FULL") - elif self._match_text_seq("ON", "UPDATE", "NO ACTION"): - options.append("ON UPDATE NO ACTION") - elif self._match_text_seq("ON", "DELETE", "NO ACTION"): - options.append("ON DELETE NO ACTION") else: break @@ -3158,7 +3231,9 @@ class Parser(metaclass=_Parser): prefix += self._prev.text if (any_token and self._advance_any()) or self._match_set(tokens or self.ID_VAR_TOKENS): - return exp.Identifier(this=prefix + self._prev.text, quoted=False) + quoted = self._prev.token_type == TokenType.STRING + return exp.Identifier(this=prefix + self._prev.text, quoted=quoted) + return None def _parse_string(self) -> t.Optional[exp.Expression]: @@ -3486,6 +3561,11 @@ class Parser(metaclass=_Parser): def _parse_set(self) -> exp.Expression: return self.expression(exp.Set, expressions=self._parse_csv(self._parse_set_item)) + def _parse_as_command(self, start: Token) -> exp.Command: + while self._curr: + self._advance() + return exp.Command(this=self._find_sql(start, self._prev)) + def _find_parser( self, parsers: t.Dict[str, t.Callable], trie: t.Dict ) -> t.Optional[t.Callable]: |