1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
|
import unittest
from sqlglot.dialects import BigQuery
from sqlglot.errors import TokenError
from sqlglot.tokens import Tokenizer, TokenType
class TestTokens(unittest.TestCase):
def test_comment_attachment(self):
tokenizer = Tokenizer()
sql_comment = [
("/*comment*/ foo", ["comment"]),
("/*comment*/ foo --test", ["comment", "test"]),
("--comment\nfoo --test", ["comment", "test"]),
("foo --comment", ["comment"]),
("foo", []),
("foo /*comment 1*/ /*comment 2*/", ["comment 1", "comment 2"]),
("foo\n-- comment", [" comment"]),
("1 /*/2 */", ["/2 "]),
("1\n/*comment*/;", ["comment"]),
]
for sql, comment in sql_comment:
self.assertEqual(tokenizer.tokenize(sql)[0].comments, comment)
def test_token_line_col(self):
tokens = Tokenizer().tokenize(
"""SELECT /*
line break
*/
'x
y',
x"""
)
self.assertEqual(tokens[0].line, 1)
self.assertEqual(tokens[0].col, 6)
self.assertEqual(tokens[1].line, 5)
self.assertEqual(tokens[1].col, 3)
self.assertEqual(tokens[2].line, 5)
self.assertEqual(tokens[2].col, 4)
self.assertEqual(tokens[3].line, 6)
self.assertEqual(tokens[3].col, 1)
tokens = Tokenizer().tokenize("SELECT .")
self.assertEqual(tokens[1].line, 1)
self.assertEqual(tokens[1].col, 8)
self.assertEqual(Tokenizer().tokenize("'''abc'")[0].start, 0)
self.assertEqual(Tokenizer().tokenize("'''abc'")[0].end, 6)
self.assertEqual(Tokenizer().tokenize("'abc'")[0].start, 0)
def test_command(self):
tokens = Tokenizer().tokenize("SHOW;")
self.assertEqual(tokens[0].token_type, TokenType.SHOW)
self.assertEqual(tokens[1].token_type, TokenType.SEMICOLON)
tokens = Tokenizer().tokenize("EXECUTE")
self.assertEqual(tokens[0].token_type, TokenType.EXECUTE)
self.assertEqual(len(tokens), 1)
tokens = Tokenizer().tokenize("FETCH;SHOW;")
self.assertEqual(tokens[0].token_type, TokenType.FETCH)
self.assertEqual(tokens[1].token_type, TokenType.SEMICOLON)
self.assertEqual(tokens[2].token_type, TokenType.SHOW)
self.assertEqual(tokens[3].token_type, TokenType.SEMICOLON)
def test_error_msg(self):
with self.assertRaisesRegex(TokenError, "Error tokenizing 'select /'"):
Tokenizer().tokenize("select /*")
def test_jinja(self):
# Check that {#, #} are treated as token delimiters, even though BigQuery overrides COMMENTS
tokenizer = BigQuery.Tokenizer()
tokens = tokenizer.tokenize(
"""
SELECT
{{ x }},
{{- x -}},
{# it's a comment #}
{% for x in y -%}
a {{+ b }}
{% endfor %};
"""
)
tokens = [(token.token_type, token.text) for token in tokens]
self.assertEqual(
tokens,
[
(TokenType.SELECT, "SELECT"),
(TokenType.L_BRACE, "{"),
(TokenType.L_BRACE, "{"),
(TokenType.VAR, "x"),
(TokenType.R_BRACE, "}"),
(TokenType.R_BRACE, "}"),
(TokenType.COMMA, ","),
(TokenType.BLOCK_START, "{{-"),
(TokenType.VAR, "x"),
(TokenType.BLOCK_END, "-}}"),
(TokenType.COMMA, ","),
(TokenType.BLOCK_START, "{%"),
(TokenType.FOR, "for"),
(TokenType.VAR, "x"),
(TokenType.IN, "in"),
(TokenType.VAR, "y"),
(TokenType.BLOCK_END, "-%}"),
(TokenType.VAR, "a"),
(TokenType.BLOCK_START, "{{+"),
(TokenType.VAR, "b"),
(TokenType.R_BRACE, "}"),
(TokenType.R_BRACE, "}"),
(TokenType.BLOCK_START, "{%"),
(TokenType.VAR, "endfor"),
(TokenType.BLOCK_END, "%}"),
(TokenType.SEMICOLON, ";"),
],
)
tokens = tokenizer.tokenize("""'{{ var('x') }}'""")
tokens = [(token.token_type, token.text) for token in tokens]
self.assertEqual(
tokens,
[
(TokenType.STRING, "{{ var("),
(TokenType.VAR, "x"),
(TokenType.STRING, ") }}"),
],
)
|