summaryrefslogtreecommitdiffstats
path: root/tests/test_tokens.py
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2022-12-12 15:42:33 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2022-12-12 15:42:33 +0000
commit579e404567dfff42e64325a8c79f03ac627ea341 (patch)
tree12d101aa5d1b70a69132e5cbd3307741c00d097f /tests/test_tokens.py
parentAdding upstream version 10.1.3. (diff)
downloadsqlglot-579e404567dfff42e64325a8c79f03ac627ea341.tar.xz
sqlglot-579e404567dfff42e64325a8c79f03ac627ea341.zip
Adding upstream version 10.2.6.upstream/10.2.6
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'tests/test_tokens.py')
-rw-r--r--tests/test_tokens.py47
1 files changed, 46 insertions, 1 deletions
diff --git a/tests/test_tokens.py b/tests/test_tokens.py
index 1d1b966..1376849 100644
--- a/tests/test_tokens.py
+++ b/tests/test_tokens.py
@@ -1,6 +1,6 @@
import unittest
-from sqlglot.tokens import Tokenizer
+from sqlglot.tokens import Tokenizer, TokenType
class TestTokens(unittest.TestCase):
@@ -17,3 +17,48 @@ class TestTokens(unittest.TestCase):
for sql, comment in sql_comment:
self.assertEqual(tokenizer.tokenize(sql)[0].comments, comment)
+
+ def test_jinja(self):
+ tokenizer = Tokenizer()
+
+ tokens = tokenizer.tokenize(
+ """
+ SELECT
+ {{ x }},
+ {{- x -}},
+ {% for x in y -%}
+ a {{+ b }}
+ {% endfor %};
+ """
+ )
+
+ tokens = [(token.token_type, token.text) for token in tokens]
+
+ self.assertEqual(
+ tokens,
+ [
+ (TokenType.SELECT, "SELECT"),
+ (TokenType.BLOCK_START, "{{"),
+ (TokenType.VAR, "x"),
+ (TokenType.BLOCK_END, "}}"),
+ (TokenType.COMMA, ","),
+ (TokenType.BLOCK_START, "{{-"),
+ (TokenType.VAR, "x"),
+ (TokenType.BLOCK_END, "-}}"),
+ (TokenType.COMMA, ","),
+ (TokenType.BLOCK_START, "{%"),
+ (TokenType.FOR, "for"),
+ (TokenType.VAR, "x"),
+ (TokenType.IN, "in"),
+ (TokenType.VAR, "y"),
+ (TokenType.BLOCK_END, "-%}"),
+ (TokenType.VAR, "a"),
+ (TokenType.BLOCK_START, "{{+"),
+ (TokenType.VAR, "b"),
+ (TokenType.BLOCK_END, "}}"),
+ (TokenType.BLOCK_START, "{%"),
+ (TokenType.VAR, "endfor"),
+ (TokenType.BLOCK_END, "%}"),
+ (TokenType.SEMICOLON, ";"),
+ ],
+ )