diff options
author | Daniel Baumann <daniel.baumann@progress-linux.org> | 2023-06-02 23:59:40 +0000 |
---|---|---|
committer | Daniel Baumann <daniel.baumann@progress-linux.org> | 2023-06-02 23:59:46 +0000 |
commit | 20739a12c39121a9e7ad3c9a2469ec5a6876199d (patch) | |
tree | c000de91c59fd29b2d9beecf9f93b84e69727f37 /tests/test_tokens.py | |
parent | Releasing debian version 12.2.0-1. (diff) | |
download | sqlglot-20739a12c39121a9e7ad3c9a2469ec5a6876199d.tar.xz sqlglot-20739a12c39121a9e7ad3c9a2469ec5a6876199d.zip |
Merging upstream version 15.0.0.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'tests/test_tokens.py')
-rw-r--r-- | tests/test_tokens.py | 21 |
1 files changed, 17 insertions, 4 deletions
diff --git a/tests/test_tokens.py b/tests/test_tokens.py index f70d70e..30af34f 100644 --- a/tests/test_tokens.py +++ b/tests/test_tokens.py @@ -20,7 +20,7 @@ class TestTokens(unittest.TestCase): for sql, comment in sql_comment: self.assertEqual(tokenizer.tokenize(sql)[0].comments, comment) - def test_token_line(self): + def test_token_line_col(self): tokens = Tokenizer().tokenize( """SELECT /* line break @@ -30,10 +30,23 @@ line break x""" ) + self.assertEqual(tokens[0].line, 1) + self.assertEqual(tokens[0].col, 6) self.assertEqual(tokens[1].line, 5) self.assertEqual(tokens[1].col, 3) - self.assertEqual(tokens[-1].line, 6) - self.assertEqual(tokens[-1].col, 1) + self.assertEqual(tokens[2].line, 5) + self.assertEqual(tokens[2].col, 4) + self.assertEqual(tokens[3].line, 6) + self.assertEqual(tokens[3].col, 1) + + tokens = Tokenizer().tokenize("SELECT .") + + self.assertEqual(tokens[1].line, 1) + self.assertEqual(tokens[1].col, 8) + + self.assertEqual(Tokenizer().tokenize("'''abc'")[0].start, 0) + self.assertEqual(Tokenizer().tokenize("'''abc'")[0].end, 6) + self.assertEqual(Tokenizer().tokenize("'abc'")[0].start, 0) def test_command(self): tokens = Tokenizer().tokenize("SHOW;") @@ -51,7 +64,7 @@ x""" self.assertEqual(tokens[3].token_type, TokenType.SEMICOLON) def test_error_msg(self): - with self.assertRaisesRegex(ValueError, "Error tokenizing 'select.*"): + with self.assertRaisesRegex(ValueError, "Error tokenizing 'select /'"): Tokenizer().tokenize("select /*") def test_jinja(self): |