diff options
author | Daniel Baumann <daniel.baumann@progress-linux.org> | 2022-10-15 13:52:53 +0000 |
---|---|---|
committer | Daniel Baumann <daniel.baumann@progress-linux.org> | 2022-10-15 13:52:53 +0000 |
commit | 97d3673ec2d668050912aa6aea1816885ca6c5ab (patch) | |
tree | f391e30e039a3d22368e9696e171f759e104c765 /sqlglot/tokens.py | |
parent | Adding upstream version 6.3.1. (diff) | |
download | sqlglot-97d3673ec2d668050912aa6aea1816885ca6c5ab.tar.xz sqlglot-97d3673ec2d668050912aa6aea1816885ca6c5ab.zip |
Adding upstream version 7.1.3.upstream/7.1.3
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'sqlglot/tokens.py')
-rw-r--r-- | sqlglot/tokens.py | 15 |
1 files changed, 13 insertions, 2 deletions
diff --git a/sqlglot/tokens.py b/sqlglot/tokens.py index 17c038c..fc8e6e7 100644 --- a/sqlglot/tokens.py +++ b/sqlglot/tokens.py @@ -123,6 +123,7 @@ class TokenType(AutoName): CLUSTER_BY = auto() COLLATE = auto() COMMENT = auto() + COMMENT_ON = auto() COMMIT = auto() CONSTRAINT = auto() CREATE = auto() @@ -133,13 +134,14 @@ class TokenType(AutoName): CURRENT_ROW = auto() CURRENT_TIME = auto() CURRENT_TIMESTAMP = auto() - DIV = auto() DEFAULT = auto() DELETE = auto() DESC = auto() + DESCRIBE = auto() DETERMINISTIC = auto() DISTINCT = auto() DISTRIBUTE_BY = auto() + DIV = auto() DROP = auto() ELSE = auto() END = auto() @@ -189,6 +191,8 @@ class TokenType(AutoName): LEFT = auto() LIKE = auto() LIMIT = auto() + LOAD_DATA = auto() + LOCAL = auto() LOCATION = auto() MAP = auto() MATERIALIZED = auto() @@ -196,6 +200,7 @@ class TokenType(AutoName): NATURAL = auto() NEXT = auto() NO_ACTION = auto() + NOTNULL = auto() NULL = auto() NULLS_FIRST = auto() NULLS_LAST = auto() @@ -436,13 +441,14 @@ class Tokenizer(metaclass=_Tokenizer): "CURRENT_DATE": TokenType.CURRENT_DATE, "CURRENT ROW": TokenType.CURRENT_ROW, "CURRENT_TIMESTAMP": TokenType.CURRENT_TIMESTAMP, - "DIV": TokenType.DIV, "DEFAULT": TokenType.DEFAULT, "DELETE": TokenType.DELETE, "DESC": TokenType.DESC, + "DESCRIBE": TokenType.DESCRIBE, "DETERMINISTIC": TokenType.DETERMINISTIC, "DISTINCT": TokenType.DISTINCT, "DISTRIBUTE BY": TokenType.DISTRIBUTE_BY, + "DIV": TokenType.DIV, "DROP": TokenType.DROP, "ELSE": TokenType.ELSE, "END": TokenType.END, @@ -487,12 +493,15 @@ class Tokenizer(metaclass=_Tokenizer): "LEFT": TokenType.LEFT, "LIKE": TokenType.LIKE, "LIMIT": TokenType.LIMIT, + "LOAD DATA": TokenType.LOAD_DATA, + "LOCAL": TokenType.LOCAL, "LOCATION": TokenType.LOCATION, "MATERIALIZED": TokenType.MATERIALIZED, "NATURAL": TokenType.NATURAL, "NEXT": TokenType.NEXT, "NO ACTION": TokenType.NO_ACTION, "NOT": TokenType.NOT, + "NOTNULL": TokenType.NOTNULL, "NULL": TokenType.NULL, "NULLS FIRST": TokenType.NULLS_FIRST, "NULLS LAST": TokenType.NULLS_LAST, @@ -530,6 +539,7 @@ class Tokenizer(metaclass=_Tokenizer): "ROLLUP": TokenType.ROLLUP, "ROW": TokenType.ROW, "ROWS": TokenType.ROWS, + "SCHEMA": TokenType.SCHEMA, "SEED": TokenType.SEED, "SELECT": TokenType.SELECT, "SEMI": TokenType.SEMI, @@ -629,6 +639,7 @@ class Tokenizer(metaclass=_Tokenizer): TokenType.ANALYZE, TokenType.BEGIN, TokenType.CALL, + TokenType.COMMENT_ON, TokenType.COMMIT, TokenType.EXPLAIN, TokenType.OPTIMIZE, |