summaryrefslogtreecommitdiffstats
path: root/sqlglot/tokens.py
diff options
context:
space:
mode:
Diffstat (limited to 'sqlglot/tokens.py')
-rw-r--r--sqlglot/tokens.py19
1 files changed, 18 insertions, 1 deletions
diff --git a/sqlglot/tokens.py b/sqlglot/tokens.py
index 7a50fc3..c81f0db 100644
--- a/sqlglot/tokens.py
+++ b/sqlglot/tokens.py
@@ -94,7 +94,8 @@ class TokenType(AutoName):
SMALLMONEY = auto()
ROWVERSION = auto()
IMAGE = auto()
- SQL_VARIANT = auto()
+ VARIANT = auto()
+ OBJECT = auto()
# keywords
ADD_FILE = auto()
@@ -177,6 +178,7 @@ class TokenType(AutoName):
IS = auto()
ISNULL = auto()
JOIN = auto()
+ LANGUAGE = auto()
LATERAL = auto()
LAZY = auto()
LEADING = auto()
@@ -185,6 +187,7 @@ class TokenType(AutoName):
LIMIT = auto()
LOCATION = auto()
MAP = auto()
+ MATERIALIZED = auto()
MOD = auto()
NATURAL = auto()
NEXT = auto()
@@ -208,6 +211,7 @@ class TokenType(AutoName):
PARTITION_BY = auto()
PARTITIONED_BY = auto()
PERCENT = auto()
+ PIVOT = auto()
PLACEHOLDER = auto()
PRECEDING = auto()
PRIMARY_KEY = auto()
@@ -219,12 +223,14 @@ class TokenType(AutoName):
REPLACE = auto()
RESPECT_NULLS = auto()
REFERENCES = auto()
+ RETURNS = auto()
RIGHT = auto()
RLIKE = auto()
ROLLUP = auto()
ROW = auto()
ROWS = auto()
SCHEMA_COMMENT = auto()
+ SEED = auto()
SELECT = auto()
SEPARATOR = auto()
SET = auto()
@@ -246,6 +252,7 @@ class TokenType(AutoName):
UNCACHE = auto()
UNION = auto()
UNNEST = auto()
+ UNPIVOT = auto()
UPDATE = auto()
USE = auto()
USING = auto()
@@ -440,6 +447,7 @@ class Tokenizer(metaclass=_Tokenizer):
"FULL": TokenType.FULL,
"FUNCTION": TokenType.FUNCTION,
"FOLLOWING": TokenType.FOLLOWING,
+ "FOR": TokenType.FOR,
"FOREIGN KEY": TokenType.FOREIGN_KEY,
"FORMAT": TokenType.FORMAT,
"FROM": TokenType.FROM,
@@ -459,6 +467,7 @@ class Tokenizer(metaclass=_Tokenizer):
"IS": TokenType.IS,
"ISNULL": TokenType.ISNULL,
"JOIN": TokenType.JOIN,
+ "LANGUAGE": TokenType.LANGUAGE,
"LATERAL": TokenType.LATERAL,
"LAZY": TokenType.LAZY,
"LEADING": TokenType.LEADING,
@@ -466,6 +475,7 @@ class Tokenizer(metaclass=_Tokenizer):
"LIKE": TokenType.LIKE,
"LIMIT": TokenType.LIMIT,
"LOCATION": TokenType.LOCATION,
+ "MATERIALIZED": TokenType.MATERIALIZED,
"NATURAL": TokenType.NATURAL,
"NEXT": TokenType.NEXT,
"NO ACTION": TokenType.NO_ACTION,
@@ -473,6 +483,7 @@ class Tokenizer(metaclass=_Tokenizer):
"NULL": TokenType.NULL,
"NULLS FIRST": TokenType.NULLS_FIRST,
"NULLS LAST": TokenType.NULLS_LAST,
+ "OBJECT": TokenType.OBJECT,
"OFFSET": TokenType.OFFSET,
"ON": TokenType.ON,
"ONLY": TokenType.ONLY,
@@ -488,7 +499,9 @@ class Tokenizer(metaclass=_Tokenizer):
"PARTITION": TokenType.PARTITION,
"PARTITION BY": TokenType.PARTITION_BY,
"PARTITIONED BY": TokenType.PARTITIONED_BY,
+ "PARTITIONED_BY": TokenType.PARTITIONED_BY,
"PERCENT": TokenType.PERCENT,
+ "PIVOT": TokenType.PIVOT,
"PRECEDING": TokenType.PRECEDING,
"PRIMARY KEY": TokenType.PRIMARY_KEY,
"RANGE": TokenType.RANGE,
@@ -497,11 +510,13 @@ class Tokenizer(metaclass=_Tokenizer):
"REPLACE": TokenType.REPLACE,
"RESPECT NULLS": TokenType.RESPECT_NULLS,
"REFERENCES": TokenType.REFERENCES,
+ "RETURNS": TokenType.RETURNS,
"RIGHT": TokenType.RIGHT,
"RLIKE": TokenType.RLIKE,
"ROLLUP": TokenType.ROLLUP,
"ROW": TokenType.ROW,
"ROWS": TokenType.ROWS,
+ "SEED": TokenType.SEED,
"SELECT": TokenType.SELECT,
"SET": TokenType.SET,
"SHOW": TokenType.SHOW,
@@ -520,6 +535,7 @@ class Tokenizer(metaclass=_Tokenizer):
"TRUNCATE": TokenType.TRUNCATE,
"UNBOUNDED": TokenType.UNBOUNDED,
"UNION": TokenType.UNION,
+ "UNPIVOT": TokenType.UNPIVOT,
"UNNEST": TokenType.UNNEST,
"UPDATE": TokenType.UPDATE,
"USE": TokenType.USE,
@@ -577,6 +593,7 @@ class Tokenizer(metaclass=_Tokenizer):
"DATETIME": TokenType.DATETIME,
"UNIQUE": TokenType.UNIQUE,
"STRUCT": TokenType.STRUCT,
+ "VARIANT": TokenType.VARIANT,
}
WHITE_SPACE = {