summaryrefslogtreecommitdiffstats
path: root/tests/test_regexlexer.py
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-04 11:33:32 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-04 11:33:32 +0000
commit1f403ad2197fc7442409f434ee574f3e6b46fb73 (patch)
tree0299c6dd11d5edfa918a29b6456bc1875f1d288c /tests/test_regexlexer.py
parentInitial commit. (diff)
downloadpygments-1f403ad2197fc7442409f434ee574f3e6b46fb73.tar.xz
pygments-1f403ad2197fc7442409f434ee574f3e6b46fb73.zip
Adding upstream version 2.14.0+dfsg.upstream/2.14.0+dfsgupstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to '')
-rw-r--r--tests/test_regexlexer.py65
1 files changed, 65 insertions, 0 deletions
diff --git a/tests/test_regexlexer.py b/tests/test_regexlexer.py
new file mode 100644
index 0000000..1b9639f
--- /dev/null
+++ b/tests/test_regexlexer.py
@@ -0,0 +1,65 @@
+"""
+ Pygments regex lexer tests
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import pytest
+
+from pygments.token import Text, Whitespace
+from pygments.lexer import RegexLexer, default
+
+
+@pytest.fixture(scope='module')
+def lexer():
+ yield MyLexer()
+
+
+class MyLexer(RegexLexer):
+ """Test tuple state transitions including #pop."""
+ tokens = {
+ 'root': [
+ ('a', Text.Root, 'rag'),
+ ('e', Text.Root),
+ ('#', Text.Root, '#pop'),
+ ('@', Text.Root, ('#pop', '#pop')),
+ default(('beer', 'beer'))
+ ],
+ 'beer': [
+ ('d', Text.Beer, ('#pop', '#pop')),
+ ],
+ 'rag': [
+ ('b', Text.Rag, '#push'),
+ ('c', Text.Rag, ('#pop', 'beer')),
+ ],
+ }
+
+
+def test_tuple(lexer):
+ toks = list(lexer.get_tokens_unprocessed('abcde'))
+ assert toks == [
+ (0, Text.Root, 'a'), (1, Text.Rag, 'b'), (2, Text.Rag, 'c'),
+ (3, Text.Beer, 'd'), (4, Text.Root, 'e')]
+
+
+def test_multiline(lexer):
+ toks = list(lexer.get_tokens_unprocessed('a\ne'))
+ assert toks == [
+ (0, Text.Root, 'a'), (1, Whitespace, '\n'), (2, Text.Root, 'e')]
+
+
+def test_default(lexer):
+ toks = list(lexer.get_tokens_unprocessed('d'))
+ assert toks == [(0, Text.Beer, 'd')]
+
+
+def test_pop_empty_regular(lexer):
+ toks = list(lexer.get_tokens_unprocessed('#e'))
+ assert toks == [(0, Text.Root, '#'), (1, Text.Root, 'e')]
+
+
+def test_pop_empty_tuple(lexer):
+ toks = list(lexer.get_tokens_unprocessed('@e'))
+ assert toks == [(0, Text.Root, '@'), (1, Text.Root, 'e')]