diff options
author | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-05-04 17:35:20 +0000 |
---|---|---|
committer | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-05-04 17:35:20 +0000 |
commit | e106bf94eff07d9a59771d9ccc4406421e18ab64 (patch) | |
tree | edb6545500e39df9c67aa918a6125bffc8ec1aee /src/prompt_toolkit/lexers/base.py | |
parent | Initial commit. (diff) | |
download | prompt-toolkit-0a3e0a12a3f6453f8e5b1cffb2f7e1e619420f44.tar.xz prompt-toolkit-0a3e0a12a3f6453f8e5b1cffb2f7e1e619420f44.zip |
Adding upstream version 3.0.36.upstream/3.0.36upstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'src/prompt_toolkit/lexers/base.py')
-rw-r--r-- | src/prompt_toolkit/lexers/base.py | 82 |
1 files changed, 82 insertions, 0 deletions
diff --git a/src/prompt_toolkit/lexers/base.py b/src/prompt_toolkit/lexers/base.py new file mode 100644 index 0000000..dd9f245 --- /dev/null +++ b/src/prompt_toolkit/lexers/base.py @@ -0,0 +1,82 @@ +""" +Base classes for prompt_toolkit lexers. +""" +from abc import ABCMeta, abstractmethod +from typing import Callable, Hashable, Optional + +from prompt_toolkit.document import Document +from prompt_toolkit.formatted_text.base import StyleAndTextTuples + +__all__ = [ + "Lexer", + "SimpleLexer", + "DynamicLexer", +] + + +class Lexer(metaclass=ABCMeta): + """ + Base class for all lexers. + """ + + @abstractmethod + def lex_document(self, document: Document) -> Callable[[int], StyleAndTextTuples]: + """ + Takes a :class:`~prompt_toolkit.document.Document` and returns a + callable that takes a line number and returns a list of + ``(style_str, text)`` tuples for that line. + + XXX: Note that in the past, this was supposed to return a list + of ``(Token, text)`` tuples, just like a Pygments lexer. + """ + + def invalidation_hash(self) -> Hashable: + """ + When this changes, `lex_document` could give a different output. + (Only used for `DynamicLexer`.) + """ + return id(self) + + +class SimpleLexer(Lexer): + """ + Lexer that doesn't do any tokenizing and returns the whole input as one + token. + + :param style: The style string for this lexer. + """ + + def __init__(self, style: str = "") -> None: + self.style = style + + def lex_document(self, document: Document) -> Callable[[int], StyleAndTextTuples]: + lines = document.lines + + def get_line(lineno: int) -> StyleAndTextTuples: + "Return the tokens for the given line." + try: + return [(self.style, lines[lineno])] + except IndexError: + return [] + + return get_line + + +class DynamicLexer(Lexer): + """ + Lexer class that can dynamically returns any Lexer. + + :param get_lexer: Callable that returns a :class:`.Lexer` instance. + """ + + def __init__(self, get_lexer: Callable[[], Optional[Lexer]]) -> None: + self.get_lexer = get_lexer + self._dummy = SimpleLexer() + + def lex_document(self, document: Document) -> Callable[[int], StyleAndTextTuples]: + lexer = self.get_lexer() or self._dummy + return lexer.lex_document(document) + + def invalidation_hash(self) -> Hashable: + lexer = self.get_lexer() or self._dummy + return id(lexer) |