diff options
author | Daniel Baumann <daniel.baumann@progress-linux.org> | 2023-02-12 10:03:37 +0000 |
---|---|---|
committer | Daniel Baumann <daniel.baumann@progress-linux.org> | 2023-02-12 10:03:37 +0000 |
commit | 24e839c04c39d1f4423b267c371e8e5b5bc33867 (patch) | |
tree | 2395ec71424fe1ff783e0dc7ca0f9b4e16def8a9 /sqlglot/parser.py | |
parent | Adding upstream version 10.6.3. (diff) | |
download | sqlglot-24e839c04c39d1f4423b267c371e8e5b5bc33867.tar.xz sqlglot-24e839c04c39d1f4423b267c371e8e5b5bc33867.zip |
Adding upstream version 11.0.1.upstream/11.0.1
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'sqlglot/parser.py')
-rw-r--r-- | sqlglot/parser.py | 48 |
1 files changed, 36 insertions, 12 deletions
diff --git a/sqlglot/parser.py b/sqlglot/parser.py index e2b2c54..579c2ce 100644 --- a/sqlglot/parser.py +++ b/sqlglot/parser.py @@ -194,6 +194,7 @@ class Parser(metaclass=_Parser): TokenType.INTERVAL, TokenType.LAZY, TokenType.LEADING, + TokenType.LEFT, TokenType.LOCAL, TokenType.MATERIALIZED, TokenType.MERGE, @@ -208,6 +209,7 @@ class Parser(metaclass=_Parser): TokenType.PRECEDING, TokenType.RANGE, TokenType.REFERENCES, + TokenType.RIGHT, TokenType.ROW, TokenType.ROWS, TokenType.SCHEMA, @@ -237,8 +239,10 @@ class Parser(metaclass=_Parser): TABLE_ALIAS_TOKENS = ID_VAR_TOKENS - { TokenType.APPLY, + TokenType.LEFT, TokenType.NATURAL, TokenType.OFFSET, + TokenType.RIGHT, TokenType.WINDOW, } @@ -258,6 +262,8 @@ class Parser(metaclass=_Parser): TokenType.IDENTIFIER, TokenType.INDEX, TokenType.ISNULL, + TokenType.ILIKE, + TokenType.LIKE, TokenType.MERGE, TokenType.OFFSET, TokenType.PRIMARY_KEY, @@ -971,13 +977,14 @@ class Parser(metaclass=_Parser): if create_token.token_type in (TokenType.FUNCTION, TokenType.PROCEDURE): this = self._parse_user_defined_function(kind=create_token.token_type) properties = self._parse_properties() - if self._match(TokenType.ALIAS): - begin = self._match(TokenType.BEGIN) - return_ = self._match_text_seq("RETURN") - expression = self._parse_statement() - if return_: - expression = self.expression(exp.Return, this=expression) + self._match(TokenType.ALIAS) + begin = self._match(TokenType.BEGIN) + return_ = self._match_text_seq("RETURN") + expression = self._parse_statement() + + if return_: + expression = self.expression(exp.Return, this=expression) elif create_token.token_type == TokenType.INDEX: this = self._parse_index() elif create_token.token_type in ( @@ -2163,7 +2170,9 @@ class Parser(metaclass=_Parser): ) -> t.Optional[exp.Expression]: if self._match(TokenType.TOP if top else TokenType.LIMIT): limit_paren = self._match(TokenType.L_PAREN) - limit_exp = self.expression(exp.Limit, this=this, expression=self._parse_number()) + limit_exp = self.expression( + exp.Limit, this=this, expression=self._parse_number() if top else self._parse_term() + ) if limit_paren: self._match_r_paren() @@ -2740,8 +2749,23 @@ class Parser(metaclass=_Parser): kind: exp.Expression - if self._match(TokenType.AUTO_INCREMENT): - kind = exp.AutoIncrementColumnConstraint() + if self._match_set((TokenType.AUTO_INCREMENT, TokenType.IDENTITY)): + start = None + increment = None + + if self._match(TokenType.L_PAREN, advance=False): + args = self._parse_wrapped_csv(self._parse_bitwise) + start = seq_get(args, 0) + increment = seq_get(args, 1) + elif self._match_text_seq("START"): + start = self._parse_bitwise() + self._match_text_seq("INCREMENT") + increment = self._parse_bitwise() + + if start and increment: + kind = exp.GeneratedAsIdentityColumnConstraint(start=start, increment=increment) + else: + kind = exp.AutoIncrementColumnConstraint() elif self._match(TokenType.CHECK): constraint = self._parse_wrapped(self._parse_conjunction) kind = self.expression(exp.CheckColumnConstraint, this=constraint) @@ -3294,8 +3318,8 @@ class Parser(metaclass=_Parser): if not self._match(TokenType.EXCEPT): return None if self._match(TokenType.L_PAREN, advance=False): - return self._parse_wrapped_id_vars() - return self._parse_csv(self._parse_id_var) + return self._parse_wrapped_csv(self._parse_column) + return self._parse_csv(self._parse_column) def _parse_replace(self) -> t.Optional[t.List[t.Optional[exp.Expression]]]: if not self._match(TokenType.REPLACE): @@ -3442,7 +3466,7 @@ class Parser(metaclass=_Parser): def _parse_alter(self) -> t.Optional[exp.Expression]: if not self._match(TokenType.TABLE): - return None + return self._parse_as_command(self._prev) exists = self._parse_exists() this = self._parse_table(schema=True) |