summaryrefslogtreecommitdiffstats
path: root/tests/test_crystal.py
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-04 11:33:32 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-04 11:33:32 +0000
commit1f403ad2197fc7442409f434ee574f3e6b46fb73 (patch)
tree0299c6dd11d5edfa918a29b6456bc1875f1d288c /tests/test_crystal.py
parentInitial commit. (diff)
downloadpygments-1f403ad2197fc7442409f434ee574f3e6b46fb73.tar.xz
pygments-1f403ad2197fc7442409f434ee574f3e6b46fb73.zip
Adding upstream version 2.14.0+dfsg.upstream/2.14.0+dfsgupstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'tests/test_crystal.py')
-rw-r--r--tests/test_crystal.py80
1 files changed, 80 insertions, 0 deletions
diff --git a/tests/test_crystal.py b/tests/test_crystal.py
new file mode 100644
index 0000000..962d9e5
--- /dev/null
+++ b/tests/test_crystal.py
@@ -0,0 +1,80 @@
+"""
+ Basic CrystalLexer Test
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import pytest
+
+from pygments.token import Text, String, Number, Punctuation, Error, Whitespace
+from pygments.lexers import CrystalLexer
+
+
+@pytest.fixture(scope='module')
+def lexer():
+ yield CrystalLexer()
+
+
+def test_numbers(lexer):
+ for kind, testset in [
+ (Number.Integer, '0 1 1_000_000 1u8 11231231231121312i64'),
+ (Number.Float, '0.0 1.0_f32 1_f32 0f64 1e+4 1e111 1_234.567_890'),
+ (Number.Bin, '0b1001_0110 0b0u8'),
+ (Number.Oct, '0o17 0o7_i32'),
+ (Number.Hex, '0xdeadBEEF'),
+ ]:
+ for fragment in testset.split():
+ assert list(lexer.get_tokens(fragment + '\n')) == \
+ [(kind, fragment), (Whitespace, '\n')]
+
+ for fragment in '01 0b2 0x129g2 0o12358'.split():
+ assert next(lexer.get_tokens(fragment + '\n'))[0] == Error
+
+
+def test_symbols(lexer):
+ for fragment in [':sym_bol', ':\u3042', ':question?']:
+ assert list(lexer.get_tokens(fragment + '\n')) == \
+ [(String.Symbol, fragment), (Whitespace, '\n')]
+
+ fragment = ':"sym bol"\n'
+ tokens = [
+ (String.Symbol, ':"'),
+ (String.Symbol, 'sym bol'),
+ (String.Symbol, '"'),
+ (Whitespace, '\n'),
+ ]
+ assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_chars(lexer):
+ for fragment in ["'a'", "'я'", "'\\u{1234}'", "'\n'"]:
+ assert list(lexer.get_tokens(fragment + '\n')) == \
+ [(String.Char, fragment), (Whitespace, '\n')]
+ assert next(lexer.get_tokens("'abc'"))[0] == Error
+
+
+def test_string_escapes(lexer):
+ for body in ['\\n', '\\a', '\\xff', '\\u1234', '\\000', '\\u{0}', '\\u{10AfF9}']:
+ fragment = '"a' + body + 'z"\n'
+ assert list(lexer.get_tokens(fragment)) == [
+ (String.Double, '"'),
+ (String.Double, 'a'),
+ (String.Escape, body),
+ (String.Double, 'z'),
+ (String.Double, '"'),
+ (Whitespace, '\n'),
+ ]
+
+
+def test_empty_percent_strings(lexer):
+ for body in ['%()', '%[]', '%{}', '%<>', '%||']:
+ fragment = '(' + body + ')\n'
+ assert list(lexer.get_tokens(fragment)) == [
+ (Punctuation, '('),
+ (String.Other, body[:-1]),
+ (String.Other, body[-1]),
+ (Punctuation, ')'),
+ (Whitespace, '\n'),
+ ]