summaryrefslogtreecommitdiffstats
path: root/tests/lsp_tests/lsp_tutil.py
diff options
context:
space:
mode:
Diffstat (limited to 'tests/lsp_tests/lsp_tutil.py')
-rw-r--r--tests/lsp_tests/lsp_tutil.py92
1 files changed, 87 insertions, 5 deletions
diff --git a/tests/lsp_tests/lsp_tutil.py b/tests/lsp_tests/lsp_tutil.py
index 0843f79..bc0fa91 100644
--- a/tests/lsp_tests/lsp_tutil.py
+++ b/tests/lsp_tests/lsp_tutil.py
@@ -1,16 +1,52 @@
-from typing import Tuple, Union
+import dataclasses
+from typing import Tuple, Union, FrozenSet, Optional, List
+
+from debputy.lsp.lsp_features import SEMANTIC_TOKENS_LEGEND
+from debputy.util import grouper
try:
- from pygls.server import LanguageServer
from lsprotocol.types import (
TextDocumentItem,
Position,
+ Range,
+ SemanticTokens,
)
from debputy.lsp.debputy_ls import DebputyLanguageServer
except ImportError:
pass
+@dataclasses.dataclass(slots=True, frozen=True)
+class ResolvedSemanticToken:
+ range: "Range"
+ token_name: str
+ modifiers: FrozenSet[str] = frozenset()
+
+
+def resolved_semantic_token(
+ line_no: int,
+ col_start: int,
+ token_len: int,
+ token_type: str,
+ *,
+ token_modifiers: FrozenSet[str] = frozenset(),
+) -> ResolvedSemanticToken:
+ return ResolvedSemanticToken(
+ Range(
+ Position(
+ line_no,
+ col_start,
+ ),
+ Position(
+ line_no,
+ col_start + token_len,
+ ),
+ ),
+ token_type,
+ token_modifiers,
+ )
+
+
def _locate_cursor(text: str) -> Tuple[str, "Position"]:
lines = text.splitlines(keepends=True)
for line_no in range(len(lines)):
@@ -27,12 +63,27 @@ def _locate_cursor(text: str) -> Tuple[str, "Position"]:
def put_doc_with_cursor(
- ls: Union["LanguageServer", "DebputyLanguageServer"],
+ ls: "DebputyLanguageServer",
uri: str,
language_id: str,
content: str,
) -> "Position":
cleaned_content, cursor_pos = _locate_cursor(content)
+ put_doc_no_cursor(
+ ls,
+ uri,
+ language_id,
+ cleaned_content,
+ )
+ return cursor_pos
+
+
+def put_doc_no_cursor(
+ ls: "DebputyLanguageServer",
+ uri: str,
+ language_id: str,
+ content: str,
+) -> None:
doc_version = 1
existing = ls.workspace.text_documents.get(uri)
if existing is not None:
@@ -42,7 +93,38 @@ def put_doc_with_cursor(
uri,
language_id,
doc_version,
- cleaned_content,
+ content,
)
)
- return cursor_pos
+
+
+def resolve_semantic_tokens(
+ token_result: Optional["SemanticTokens"],
+) -> Optional[List[ResolvedSemanticToken]]:
+ if token_result is None:
+ return None
+ assert (len(token_result.data) % 5) == 0
+ current_line = 0
+ current_col = 0
+ resolved_tokens = []
+ token_types = SEMANTIC_TOKENS_LEGEND.token_types
+ for token_data in grouper(token_result.data, 5, incomplete="strict"):
+ line_delta, col_start_delta, token_len, token_code, modifier_codes = token_data
+ if line_delta:
+ current_col = 0
+ current_line += line_delta
+ current_col += col_start_delta
+ assert (
+ not modifier_codes
+ ), "TODO: Modifiers not supported (no modifiers defined)"
+
+ resolved_tokens.append(
+ resolved_semantic_token(
+ current_line,
+ current_col,
+ token_len,
+ token_types[token_code],
+ ),
+ )
+
+ return resolved_tokens