summaryrefslogtreecommitdiffstats
path: root/src
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-14 16:23:19 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-14 16:23:19 +0000
commit3554c1d4ae60c53c0232d05454ff853dc537096b (patch)
tree9f528439859090d3800a85fc43d13b8bf53bf451 /src
parentAdding debian version 0.1.32. (diff)
downloaddebputy-3554c1d4ae60c53c0232d05454ff853dc537096b.tar.xz
debputy-3554c1d4ae60c53c0232d05454ff853dc537096b.zip
Merging upstream version 0.1.33.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'src')
-rw-r--r--src/debputy/dh_migration/migrators_impl.py5
-rw-r--r--src/debputy/linting/lint_impl.py35
-rw-r--r--src/debputy/linting/lint_util.py59
-rw-r--r--src/debputy/lsp/diagnostics.py2
-rw-r--r--src/debputy/lsp/lsp_debian_control.py119
-rw-r--r--src/debputy/lsp/lsp_debian_control_reference_data.py287
-rw-r--r--src/debputy/lsp/lsp_debian_copyright.py31
-rw-r--r--src/debputy/lsp/lsp_debian_patches_series.py454
-rw-r--r--src/debputy/lsp/lsp_debian_tests_control.py31
-rw-r--r--src/debputy/lsp/lsp_features.py8
-rw-r--r--src/debputy/lsp/lsp_generic_deb822.py72
-rw-r--r--src/debputy/lsp/quickfixes.py47
-rw-r--r--src/debputy/lsp/spellchecking.py3
-rw-r--r--src/debputy/lsp/style_prefs.py4
-rw-r--r--src/debputy/lsp/text_util.py44
15 files changed, 988 insertions, 213 deletions
diff --git a/src/debputy/dh_migration/migrators_impl.py b/src/debputy/dh_migration/migrators_impl.py
index 2ceefd5..d68768c 100644
--- a/src/debputy/dh_migration/migrators_impl.py
+++ b/src/debputy/dh_migration/migrators_impl.py
@@ -231,6 +231,11 @@ DH_ADDONS_TO_PLUGINS = {
remove_dh_sequence=False,
must_use_zz_debputy=True,
),
+ "grantlee": DHSequenceMigration(
+ "grantlee",
+ remove_dh_sequence=True,
+ must_use_zz_debputy=True,
+ ),
"numpy3": DHSequenceMigration(
"numpy3",
# The sequence provides (build-time) dependencies that we cannot provide
diff --git a/src/debputy/linting/lint_impl.py b/src/debputy/linting/lint_impl.py
index 81ce0e9..6ccf03d 100644
--- a/src/debputy/linting/lint_impl.py
+++ b/src/debputy/linting/lint_impl.py
@@ -6,24 +6,9 @@ import sys
import textwrap
from typing import Optional, List, Union, NoReturn, Mapping
-from debputy.filesystem_scan import FSROOverlay
-from debputy.lsp.vendoring._deb822_repro import Deb822FileElement
-from debputy.plugin.api import VirtualPath
-from debputy.yaml import MANIFEST_YAML, YAMLError
-from lsprotocol.types import (
- CodeAction,
- Command,
- CodeActionParams,
- CodeActionContext,
- TextDocumentIdentifier,
- TextEdit,
- Position,
- DiagnosticSeverity,
- Diagnostic,
-)
-
from debputy.commands.debputy_cmd.context import CommandContext
from debputy.commands.debputy_cmd.output import _output_styling, OutputStylingBase
+from debputy.filesystem_scan import FSROOverlay
from debputy.linting.lint_util import (
report_diagnostic,
LinterImpl,
@@ -41,6 +26,7 @@ from debputy.lsp.lsp_debian_copyright import (
_reformat_debian_copyright,
)
from debputy.lsp.lsp_debian_debputy_manifest import _lint_debian_debputy_manifest
+from debputy.lsp.lsp_debian_patches_series import _lint_debian_patches_series
from debputy.lsp.lsp_debian_rules import _lint_debian_rules_impl
from debputy.lsp.lsp_debian_tests_control import (
_lint_debian_tests_control,
@@ -59,10 +45,24 @@ from debputy.lsp.text_edit import (
apply_text_edits,
OverLappingTextEditException,
)
+from debputy.lsp.vendoring._deb822_repro import Deb822FileElement
from debputy.packages import SourcePackage, BinaryPackage
+from debputy.plugin.api import VirtualPath
from debputy.plugin.api.feature_set import PluginProvidedFeatureSet
from debputy.util import _warn, _error, _info
-from debputy.yaml.compat import CommentedMap, __all__
+from debputy.yaml import MANIFEST_YAML, YAMLError
+from debputy.yaml.compat import CommentedMap
+from lsprotocol.types import (
+ CodeAction,
+ Command,
+ CodeActionParams,
+ CodeActionContext,
+ TextDocumentIdentifier,
+ TextEdit,
+ Position,
+ DiagnosticSeverity,
+ Diagnostic,
+)
LINTER_FORMATS = {
"debian/changelog": _lint_debian_changelog,
@@ -70,6 +70,7 @@ LINTER_FORMATS = {
"debian/copyright": _lint_debian_copyright,
"debian/debputy.manifest": _lint_debian_debputy_manifest,
"debian/rules": _lint_debian_rules_impl,
+ "debian/patches/series": _lint_debian_patches_series,
"debian/tests/control": _lint_debian_tests_control,
}
diff --git a/src/debputy/linting/lint_util.py b/src/debputy/linting/lint_util.py
index 78e9f9a..745d24c 100644
--- a/src/debputy/linting/lint_util.py
+++ b/src/debputy/linting/lint_util.py
@@ -1,16 +1,24 @@
import dataclasses
import os
-from typing import List, Optional, Callable, Counter, TYPE_CHECKING, Mapping, Sequence
-
-from lsprotocol.types import Position, Range, Diagnostic, DiagnosticSeverity, TextEdit
+from typing import (
+ List,
+ Optional,
+ Callable,
+ Counter,
+ TYPE_CHECKING,
+ Mapping,
+ Sequence,
+ cast,
+)
from debputy.commands.debputy_cmd.output import OutputStylingBase
from debputy.filesystem_scan import VirtualPathBase
+from debputy.lsp.diagnostics import LintSeverity
from debputy.lsp.vendoring._deb822_repro import Deb822FileElement, parse_deb822_file
from debputy.packages import SourcePackage, BinaryPackage
-from debputy.plugin.api import VirtualPath
from debputy.plugin.api.feature_set import PluginProvidedFeatureSet
from debputy.util import _DEFAULT_LOGGER, _warn
+from lsprotocol.types import Position, Range, Diagnostic, DiagnosticSeverity, TextEdit
if TYPE_CHECKING:
from debputy.lsp.text_util import LintCapablePositionCodec
@@ -158,26 +166,26 @@ LINTER_POSITION_CODEC = LinterPositionCodec()
_SEVERITY2TAG = {
- DiagnosticSeverity.Error: lambda fo: fo.colored(
- "error",
+ DiagnosticSeverity.Error: lambda fo, lint_tag=None: fo.colored(
+ lint_tag if lint_tag else "error",
fg="red",
bg="black",
style="bold",
),
- DiagnosticSeverity.Warning: lambda fo: fo.colored(
- "warning",
+ DiagnosticSeverity.Warning: lambda fo, lint_tag=None: fo.colored(
+ lint_tag if lint_tag else "warning",
fg="yellow",
bg="black",
style="bold",
),
- DiagnosticSeverity.Information: lambda fo: fo.colored(
- "informational",
+ DiagnosticSeverity.Information: lambda fo, lint_tag=None: fo.colored(
+ lint_tag if lint_tag else "informational",
fg="blue",
bg="black",
style="bold",
),
- DiagnosticSeverity.Hint: lambda fo: fo.colored(
- "pedantic",
+ DiagnosticSeverity.Hint: lambda fo, lint_tag=None: fo.colored(
+ lint_tag if lint_tag else "pedantic",
fg="green",
bg="black",
style="bold",
@@ -231,12 +239,15 @@ def report_diagnostic(
missing_severity = True
if not auto_fixed:
tag_unresolved = _SEVERITY2TAG.get(severity)
+ lint_tag: Optional[LintSeverity] = None
+ if isinstance(diagnostic.data, dict):
+ lint_tag = cast("LintSeverity", diagnostic.data.get("lint_severity"))
if tag_unresolved is None:
tag_unresolved = _SEVERITY2TAG[DiagnosticSeverity.Warning]
lint_report.diagnostics_without_severity += 1
else:
lint_report.diagnostics_count[severity] += 1
- tag = tag_unresolved(fo)
+ tag = tag_unresolved(fo, lint_tag)
else:
tag = fo.colored(
"auto-fixing",
@@ -264,8 +275,13 @@ def report_diagnostic(
# If it is fixed, there is no reason to show additional context.
lint_report.fixed += 1
return
+ if _is_file_level_diagnostic(
+ lines, start_line, start_position, end_line, end_position
+ ):
+ print(f" File-level diagnostic")
+ return
lines_to_print = _lines_to_print(diagnostic.range)
- if diagnostic.range.end.line > len(lines) or diagnostic.range.start.line < 0:
+ if end_line > len(lines) or start_line < 0:
lint_report.diagnostic_errors += 1
_warn(
"Bug in the underlying linter: The line numbers of the warning does not fit in the file..."
@@ -278,3 +294,18 @@ def report_diagnostic(
for line_no in range(start_line, end_line):
line = _highlight_range(fo, lines[line_no], line_no, diagnostic.range)
print(f" {line_no+1:{line_no_width}}: {line}")
+
+
+def _is_file_level_diagnostic(
+ lines: List[str],
+ start_line: int,
+ start_position: int,
+ end_line: int,
+ end_position: int,
+) -> bool:
+ if start_line != 0 or start_position != 0:
+ return False
+ line_count = len(lines)
+ if end_line + 1 == line_count and end_position == 0:
+ return True
+ return end_line == line_count and line_count and end_position == len(lines[-1])
diff --git a/src/debputy/lsp/diagnostics.py b/src/debputy/lsp/diagnostics.py
index 6e0b88a..5ae7ec5 100644
--- a/src/debputy/lsp/diagnostics.py
+++ b/src/debputy/lsp/diagnostics.py
@@ -1,6 +1,6 @@
from typing import TypedDict, NotRequired, List, Any, Literal, Optional
-LintSeverity = Literal["style"]
+LintSeverity = Literal["spelling"]
class DiagnosticData(TypedDict):
diff --git a/src/debputy/lsp/lsp_debian_control.py b/src/debputy/lsp/lsp_debian_control.py
index 699193c..ce92374 100644
--- a/src/debputy/lsp/lsp_debian_control.py
+++ b/src/debputy/lsp/lsp_debian_control.py
@@ -9,6 +9,7 @@ from typing import (
Mapping,
List,
Dict,
+ Any,
)
from lsprotocol.types import (
@@ -43,6 +44,7 @@ from debputy.lsp.lsp_debian_control_reference_data import (
DctrlFileMetadata,
package_name_to_section,
all_package_relationship_fields,
+ extract_first_value_and_position,
)
from debputy.lsp.lsp_features import (
lint_diagnostics,
@@ -67,6 +69,7 @@ from debputy.lsp.quickfixes import (
range_compatible_with_remove_line_fix,
propose_correct_text_quick_fix,
propose_insert_text_on_line_after_diagnostic_quick_fix,
+ propose_remove_range_quick_fix,
)
from debputy.lsp.spellchecking import default_spellchecker
from debputy.lsp.text_util import (
@@ -82,6 +85,8 @@ from debputy.lsp.vendoring._deb822_repro import (
from debputy.lsp.vendoring._deb822_repro.parsing import (
Deb822KeyValuePairElement,
LIST_SPACE_SEPARATED_INTERPRETATION,
+ Interpretation,
+ Deb822ParsedTokenList,
)
try:
@@ -414,30 +419,46 @@ def _paragraph_representation_field(
return next(iter(paragraph.iter_parts_of_type(Deb822KeyValuePairElement)))
-def _extract_first_value_and_position(
- kvpair: Deb822KeyValuePairElement,
- stanza_pos: "TEPosition",
- position_codec: "LintCapablePositionCodec",
- lines: List[str],
-) -> Tuple[Optional[str], Optional[Range]]:
- kvpair_pos = kvpair.position_in_parent().relative_to(stanza_pos)
- value_element_pos = kvpair.value_element.position_in_parent().relative_to(
- kvpair_pos
- )
- for value_ref in kvpair.interpret_as(
- LIST_SPACE_SEPARATED_INTERPRETATION
- ).iter_value_references():
- v = value_ref.value
- section_value_loc = value_ref.locatable
- value_range_te = section_value_loc.range_in_parent().relative_to(
- value_element_pos
+def _source_package_checks(
+ stanza: Deb822ParagraphElement,
+ stanza_position: "TEPosition",
+ lint_state: LintState,
+ diagnostics: List[Diagnostic],
+) -> None:
+ vcs_fields = {}
+ for kvpair in stanza.iter_parts_of_type(Deb822KeyValuePairElement):
+ name = normalize_dctrl_field_name(kvpair.field_name.lower())
+ if (
+ not name.startswith("vcs-")
+ or name == "vcs-browser"
+ or name not in SOURCE_FIELDS
+ ):
+ continue
+ vcs_fields[name] = kvpair
+
+ if len(vcs_fields) < 2:
+ return
+ for kvpair in vcs_fields.values():
+ kvpair_range_server_units = te_range_to_lsp(
+ kvpair.range_in_parent().relative_to(stanza_position)
)
- section_range_server_units = te_range_to_lsp(value_range_te)
- section_range = position_codec.range_to_client_units(
- lines, section_range_server_units
+ diagnostics.append(
+ Diagnostic(
+ lint_state.position_codec.range_to_client_units(
+ lint_state.lines, kvpair_range_server_units
+ ),
+ f'Multiple Version Control fields defined ("{kvpair.field_name}")',
+ severity=DiagnosticSeverity.Warning,
+ source="debputy",
+ data=DiagnosticData(
+ quickfixes=[
+ propose_remove_range_quick_fix(
+ proposed_title=f'Remove "{kvpair.field_name}"'
+ )
+ ]
+ ),
+ )
)
- return v, section_range
- return None, None
def _binary_package_checks(
@@ -445,8 +466,7 @@ def _binary_package_checks(
stanza_position: "TEPosition",
source_stanza: Deb822ParagraphElement,
representation_field_range: Range,
- position_codec: "LintCapablePositionCodec",
- lines: List[str],
+ lint_state: LintState,
diagnostics: List[Diagnostic],
) -> None:
package_name = stanza.get("Package", "")
@@ -454,11 +474,10 @@ def _binary_package_checks(
section_kvpair = stanza.get_kvpair_element("Section", use_get=True)
section: Optional[str] = None
if section_kvpair is not None:
- section, section_range = _extract_first_value_and_position(
+ section, section_range = extract_first_value_and_position(
section_kvpair,
stanza_position,
- position_codec,
- lines,
+ lint_state,
)
else:
section_range = representation_field_range
@@ -476,11 +495,10 @@ def _binary_package_checks(
)
package_type_range = None
if package_type_kvpair is not None:
- _, package_type_range = _extract_first_value_and_position(
+ _, package_type_range = extract_first_value_and_position(
package_type_kvpair,
stanza_position,
- position_codec,
- lines,
+ lint_state,
)
if package_type_range is None:
package_type_range = representation_field_range
@@ -528,16 +546,15 @@ def _diagnostics_for_paragraph(
other_known_fields: Mapping[str, DctrlKnownField],
is_binary_paragraph: bool,
doc_reference: str,
- position_codec: "LintCapablePositionCodec",
- lines: List[str],
+ lint_state: LintState,
diagnostics: List[Diagnostic],
) -> None:
representation_field = _paragraph_representation_field(stanza)
representation_field_range = representation_field.range_in_parent().relative_to(
stanza_position
)
- representation_field_range = position_codec.range_to_client_units(
- lines,
+ representation_field_range = lint_state.position_codec.range_to_client_units(
+ lint_state.lines,
te_range_to_lsp(representation_field_range),
)
for known_field in known_fields.values():
@@ -563,8 +580,14 @@ def _diagnostics_for_paragraph(
stanza_position,
source_stanza,
representation_field_range,
- position_codec,
- lines,
+ lint_state,
+ diagnostics,
+ )
+ else:
+ _source_package_checks(
+ stanza,
+ stanza_position,
+ lint_state,
diagnostics,
)
@@ -583,8 +606,8 @@ def _diagnostics_for_paragraph(
)
field_position_te = field_range_te.start_pos
field_range_server_units = te_range_to_lsp(field_range_te)
- field_range = position_codec.range_to_client_units(
- lines,
+ field_range = lint_state.position_codec.range_to_client_units(
+ lint_state.lines,
field_range_server_units,
)
field_name_typo_detected = False
@@ -609,8 +632,8 @@ def _diagnostics_for_paragraph(
field_position_te, kvpair.field_token.size()
)
)
- field_range = position_codec.range_to_client_units(
- lines,
+ field_range = lint_state.position_codec.range_to_client_units(
+ lint_state.lines,
token_range_server_units,
)
field_name_typo_detected = True
@@ -659,8 +682,7 @@ def _diagnostics_for_paragraph(
stanza,
stanza_position,
kvpair_position,
- position_codec,
- lines,
+ lint_state,
field_name_typo_reported=field_name_typo_detected,
)
)
@@ -689,8 +711,8 @@ def _diagnostics_for_paragraph(
word_range_server_units = te_range_to_lsp(
TERange.from_position_and_size(word_pos_te, word_range_te)
)
- word_range = position_codec.range_to_client_units(
- lines,
+ word_range = lint_state.position_codec.range_to_client_units(
+ lint_state.lines,
word_range_server_units,
)
diagnostics.append(
@@ -700,10 +722,11 @@ def _diagnostics_for_paragraph(
severity=DiagnosticSeverity.Hint,
source="debputy",
data=DiagnosticData(
+ lint_severity="spelling",
quickfixes=[
propose_correct_text_quick_fix(c)
for c in corrections
- ]
+ ],
),
)
)
@@ -827,9 +850,10 @@ def _scan_for_syntax_errors_and_token_level_diagnostics(
severity=DiagnosticSeverity.Hint,
source="debputy",
data=DiagnosticData(
+ lint_severity="spelling",
quickfixes=[
propose_correct_text_quick_fix(c) for c in corrections
- ]
+ ],
),
)
)
@@ -875,8 +899,7 @@ def _lint_debian_control(
other_known_fields,
is_binary_paragraph,
doc_reference,
- position_codec,
- lines,
+ lint_state,
diagnostics,
)
diff --git a/src/debputy/lsp/lsp_debian_control_reference_data.py b/src/debputy/lsp/lsp_debian_control_reference_data.py
index bd2a43d..5b2e5f3 100644
--- a/src/debputy/lsp/lsp_debian_control_reference_data.py
+++ b/src/debputy/lsp/lsp_debian_control_reference_data.py
@@ -28,7 +28,7 @@ from typing import (
from debputy.filesystem_scan import VirtualPathBase
from debputy.linting.lint_util import LintState
from debputy.lsp.vendoring._deb822_repro.types import TE
-from debian.debian_support import DpkgArchTable
+from debian.debian_support import DpkgArchTable, Version
from lsprotocol.types import (
DiagnosticSeverity,
Diagnostic,
@@ -52,6 +52,7 @@ from debputy.lsp.lsp_reference_keyword import (
from debputy.lsp.quickfixes import (
propose_correct_text_quick_fix,
propose_remove_line_quick_fix,
+ propose_remove_range_quick_fix,
)
from debputy.lsp.text_edit import apply_text_edits
from debputy.lsp.text_util import (
@@ -86,6 +87,7 @@ from debputy.lsp.vendoring._deb822_repro.tokens import (
)
from debputy.lsp.vendoring._deb822_repro.types import FormatterCallback
from debputy.lsp.vendoring.wrap_and_sort import _sort_packages_key
+from debputy.path_matcher import BasenameGlobMatch
from debputy.plugin.api import VirtualPath
from debputy.util import PKGNAME_REGEX, _info
@@ -109,6 +111,8 @@ S = TypeVar("S", bound="StanzaMetadata")
# FIXME: should go into python3-debian
_RE_COMMA = re.compile("([^,]*),([^,]*)")
+_RE_SV = re.compile(r"(\d+[.]\d+[.]\d+)([.]\d+)?")
+CURRENT_STANDARDS_VERSION = Version("4.7.0")
@_value_line_tokenizer
@@ -151,8 +155,7 @@ CustomFieldCheck = Callable[
"TERange",
Deb822ParagraphElement,
"TEPosition",
- "LintCapablePositionCodec",
- List[str],
+ LintState,
],
Iterable[Diagnostic],
]
@@ -389,49 +392,105 @@ def dpkg_arch_and_wildcards() -> FrozenSet[Union[str, Keyword]]:
return frozenset(all_architectures_and_wildcards(dpkg_arch_table._arch2table))
-def _extract_first_value_and_position(
+def extract_first_value_and_position(
kvpair: Deb822KeyValuePairElement,
stanza_pos: "TEPosition",
- position_codec: "LintCapablePositionCodec",
- lines: List[str],
+ lint_state: LintState,
+ *,
+ interpretation: Interpretation[
+ Deb822ParsedTokenList[Any, Any]
+ ] = LIST_SPACE_SEPARATED_INTERPRETATION,
) -> Tuple[Optional[str], Optional[Range]]:
kvpair_pos = kvpair.position_in_parent().relative_to(stanza_pos)
value_element_pos = kvpair.value_element.position_in_parent().relative_to(
kvpair_pos
)
- for value_ref in kvpair.interpret_as(
- LIST_SPACE_SEPARATED_INTERPRETATION
- ).iter_value_references():
+ for value_ref in kvpair.interpret_as(interpretation).iter_value_references():
v = value_ref.value
section_value_loc = value_ref.locatable
value_range_te = section_value_loc.range_in_parent().relative_to(
value_element_pos
)
- value_range_server_units = te_range_to_lsp(value_range_te)
- value_range = position_codec.range_to_client_units(
- lines, value_range_server_units
+ section_range_server_units = te_range_to_lsp(value_range_te)
+ section_range = lint_state.position_codec.range_to_client_units(
+ lint_state.lines,
+ section_range_server_units,
)
- return v, value_range
+ return v, section_range
return None, None
+def _sv_field_validation(
+ _known_field: "F",
+ kvpair: Deb822KeyValuePairElement,
+ _field_range: "TERange",
+ _stanza: Deb822ParagraphElement,
+ stanza_position: "TEPosition",
+ lint_state: LintState,
+) -> Iterable[Diagnostic]:
+ sv_value, sv_value_range = extract_first_value_and_position(
+ kvpair,
+ stanza_position,
+ lint_state,
+ )
+ m = _RE_SV.fullmatch(sv_value)
+ if m is None:
+ yield Diagnostic(
+ sv_value_range,
+ f'Not a valid version. Current version is "{CURRENT_STANDARDS_VERSION}"',
+ severity=DiagnosticSeverity.Warning,
+ source="debputy",
+ )
+ return
+
+ sv_version = Version(sv_value)
+ if sv_version < CURRENT_STANDARDS_VERSION:
+ yield Diagnostic(
+ sv_value_range,
+ f"Latest Standards-Version is {CURRENT_STANDARDS_VERSION}",
+ severity=DiagnosticSeverity.Information,
+ source="debputy",
+ )
+ return
+ extra = m.group(2)
+ if extra:
+ extra_len = lint_state.position_codec.client_num_units(extra)
+ yield Diagnostic(
+ Range(
+ Position(
+ sv_value_range.end.line,
+ sv_value_range.end.character - extra_len,
+ ),
+ sv_value_range.end,
+ ),
+ "Unnecessary version segment. This part of the version is only used for editorial changes",
+ severity=DiagnosticSeverity.Information,
+ source="debputy",
+ data=DiagnosticData(
+ quickfixes=[
+ propose_remove_range_quick_fix(
+ proposed_title="Remove unnecessary version part"
+ )
+ ]
+ ),
+ )
+
+
def _dctrl_ma_field_validation(
_known_field: "F",
_kvpair: Deb822KeyValuePairElement,
_field_range: "TERange",
stanza: Deb822ParagraphElement,
stanza_position: "TEPosition",
- position_codec: "LintCapablePositionCodec",
- lines: List[str],
+ lint_state: LintState,
) -> Iterable[Diagnostic]:
ma_kvpair = stanza.get_kvpair_element("Multi-Arch", use_get=True)
arch = stanza.get("Architecture", "any")
if arch == "all" and ma_kvpair is not None:
- ma_value, ma_value_range = _extract_first_value_and_position(
+ ma_value, ma_value_range = extract_first_value_and_position(
ma_kvpair,
stanza_position,
- position_codec,
- lines,
+ lint_state,
)
if ma_value == "same":
yield Diagnostic(
@@ -448,14 +507,13 @@ def _udeb_only_field_validation(
field_range_te: "TERange",
stanza: Deb822ParagraphElement,
_stanza_position: "TEPosition",
- position_codec: "LintCapablePositionCodec",
- lines: List[str],
+ lint_state: LintState,
) -> Iterable[Diagnostic]:
package_type = stanza.get("Package-Type")
if package_type != "udeb":
field_range_server_units = te_range_to_lsp(field_range_te)
- field_range = position_codec.range_to_client_units(
- lines,
+ field_range = lint_state.position_codec.range_to_client_units(
+ lint_state.lines,
field_range_server_units,
)
yield Diagnostic(
@@ -495,14 +553,13 @@ def _arch_not_all_only_field_validation(
field_range_te: "TERange",
stanza: Deb822ParagraphElement,
_stanza_position: "TEPosition",
- position_codec: "LintCapablePositionCodec",
- lines: List[str],
+ lint_state: LintState,
) -> Iterable[Diagnostic]:
architecture = stanza.get("Architecture")
if architecture == "all":
field_range_server_units = te_range_to_lsp(field_range_te)
- field_range = position_codec.range_to_client_units(
- lines,
+ field_range = lint_state.position_codec.range_to_client_units(
+ lint_state.lines,
field_range_server_units,
)
yield Diagnostic(
@@ -525,8 +582,7 @@ def _each_value_match_regex_validation(
field_range_te: "TERange",
_stanza: Deb822ParagraphElement,
_stanza_position: "TEPosition",
- position_codec: "LintCapablePositionCodec",
- lines: List[str],
+ lint_state: LintState,
) -> Iterable[Diagnostic]:
value_element_pos = kvpair.value_element.position_in_parent().relative_to(
@@ -545,8 +601,8 @@ def _each_value_match_regex_validation(
value_element_pos
)
value_range_server_units = te_range_to_lsp(value_range_te)
- value_range = position_codec.range_to_client_units(
- lines, value_range_server_units
+ value_range = lint_state.position_codec.range_to_client_units(
+ lint_state.lines, value_range_server_units
)
yield Diagnostic(
value_range,
@@ -558,6 +614,129 @@ def _each_value_match_regex_validation(
return _validator
+class Dep5Matcher(BasenameGlobMatch):
+ def __init__(self, basename_glob: str) -> None:
+ super().__init__(
+ basename_glob,
+ only_when_in_directory=None,
+ path_type=None,
+ recursive_match=False,
+ )
+
+
+def _match_dep5_segment(
+ current_dir: VirtualPathBase, basename_glob: str
+) -> Iterable[VirtualPathBase]:
+ if "*" in basename_glob or "?" in basename_glob:
+ return Dep5Matcher(basename_glob).finditer(current_dir)
+ else:
+ res = current_dir.get(basename_glob)
+ if res is None:
+ return tuple()
+ return (res,)
+
+
+_RE_SLASHES = re.compile(r"//+")
+
+
+def _dep5_unnecessary_symbols(
+ value: str,
+ value_range: TERange,
+ lint_state: LintState,
+) -> Iterable[Diagnostic]:
+ slash_check_index = 0
+ if value.startswith(("./", "/")):
+ prefix_len = 1 if value[0] == "/" else 2
+ if value[prefix_len - 1 : prefix_len + 2].startswith("//"):
+ _, slashes_end = _RE_SLASHES.search(value).span()
+ prefix_len = slashes_end
+
+ slash_check_index = prefix_len
+ prefix_range = te_range_to_lsp(
+ TERange(
+ value_range.start_pos,
+ TEPosition(
+ value_range.start_pos.line_position,
+ value_range.start_pos.cursor_position + prefix_len,
+ ),
+ )
+ )
+ yield Diagnostic(
+ lint_state.position_codec.range_to_client_units(
+ lint_state.lines,
+ prefix_range,
+ ),
+ f'Unnecessary prefix "{value[0:prefix_len]}"',
+ DiagnosticSeverity.Warning,
+ source="debputy",
+ data=DiagnosticData(
+ quickfixes=[
+ propose_remove_range_quick_fix(
+ proposed_title=f'Delete "{value[0:prefix_len]}"'
+ )
+ ]
+ ),
+ )
+
+ for m in _RE_SLASHES.finditer(value, slash_check_index):
+ m_start, m_end = m.span(0)
+
+ prefix_range = te_range_to_lsp(
+ TERange(
+ TEPosition(
+ value_range.start_pos.line_position,
+ value_range.start_pos.cursor_position + m_start,
+ ),
+ TEPosition(
+ value_range.start_pos.line_position,
+ value_range.start_pos.cursor_position + m_end,
+ ),
+ )
+ )
+ yield Diagnostic(
+ lint_state.position_codec.range_to_client_units(
+ lint_state.lines,
+ prefix_range,
+ ),
+ 'Simplify to a single "/"',
+ DiagnosticSeverity.Warning,
+ source="debputy",
+ data=DiagnosticData(quickfixes=[propose_correct_text_quick_fix("/")]),
+ )
+
+
+def _dep5_files_check(
+ known_field: "F",
+ kvpair: Deb822KeyValuePairElement,
+ field_range_te: "TERange",
+ _stanza: Deb822ParagraphElement,
+ _stanza_position: "TEPosition",
+ lint_state: LintState,
+) -> Iterable[Diagnostic]:
+ interpreter = known_field.field_value_class.interpreter()
+ assert interpreter is not None
+ full_value_range = kvpair.value_element.range_in_parent().relative_to(
+ field_range_te.start_pos
+ )
+ values_with_ranges = []
+ for value_ref in kvpair.interpret_as(interpreter).iter_value_references():
+ value_range = value_ref.locatable.range_in_parent().relative_to(
+ full_value_range.start_pos
+ )
+ value = value_ref.value
+ values_with_ranges.append((value_ref.value, value_range))
+ yield from _dep5_unnecessary_symbols(value, value_range, lint_state)
+
+ source_root = lint_state.source_root
+ if source_root is None:
+ return
+ i = 0
+ limit = len(values_with_ranges)
+ while i < limit:
+ value, value_range = values_with_ranges[i]
+ i += 1
+
+
def _combined_custom_field_check(*checks: CustomFieldCheck) -> CustomFieldCheck:
def _validator(
known_field: "F",
@@ -565,8 +744,7 @@ def _combined_custom_field_check(*checks: CustomFieldCheck) -> CustomFieldCheck:
field_range_te: "TERange",
stanza: Deb822ParagraphElement,
stanza_position: "TEPosition",
- position_codec: "LintCapablePositionCodec",
- lines: List[str],
+ lint_state: LintState,
) -> Iterable[Diagnostic]:
for check in checks:
yield from check(
@@ -575,8 +753,7 @@ def _combined_custom_field_check(*checks: CustomFieldCheck) -> CustomFieldCheck:
field_range_te,
stanza,
stanza_position,
- position_codec,
- lines,
+ lint_state,
)
return _validator
@@ -1039,8 +1216,7 @@ class Deb822KnownField:
stanza: Deb822ParagraphElement,
stanza_position: "TEPosition",
kvpair_position: "TEPosition",
- position_codec: "LintCapablePositionCodec",
- lines: List[str],
+ lint_state: LintState,
*,
field_name_typo_reported: bool = False,
) -> Iterable[Diagnostic]:
@@ -1052,8 +1228,7 @@ class Deb822KnownField:
field_name_token,
field_range_te,
field_name_typo_reported,
- position_codec,
- lines,
+ lint_state,
)
if self.custom_field_check is not None:
yield from self.custom_field_check(
@@ -1062,15 +1237,13 @@ class Deb822KnownField:
field_range_te,
stanza,
stanza_position,
- position_codec,
- lines,
+ lint_state,
)
if not self.spellcheck_value:
yield from self._known_value_diagnostics(
kvpair,
kvpair_position,
- position_codec,
- lines,
+ lint_state,
)
def _diagnostics_for_field_name(
@@ -1078,15 +1251,14 @@ class Deb822KnownField:
token: Deb822FieldNameToken,
token_range: "TERange",
typo_detected: bool,
- position_codec: "LintCapablePositionCodec",
- lines: List[str],
+ lint_state: LintState,
) -> Iterable[Diagnostic]:
field_name = token.text
# Defeat the case-insensitivity from python-debian
field_name_cased = str(field_name)
token_range_server_units = te_range_to_lsp(token_range)
- token_range = position_codec.range_to_client_units(
- lines,
+ token_range = lint_state.position_codec.range_to_client_units(
+ lint_state.lines,
token_range_server_units,
)
if self.deprecated_with_no_replacement:
@@ -1125,8 +1297,7 @@ class Deb822KnownField:
self,
kvpair: Deb822KeyValuePairElement,
kvpair_position: "TEPosition",
- position_codec: "LintCapablePositionCodec",
- lines: List[str],
+ lint_state: LintState,
) -> Iterable[Diagnostic]:
unknown_value_severity = self.unknown_value_diagnostic_severity
interpreter = self.field_value_class.interpreter()
@@ -1146,8 +1317,8 @@ class Deb822KnownField:
continue
if last_token_non_ws_sep_token is not None:
sep_range_te = token.range_in_parent().relative_to(value_off)
- value_range = position_codec.range_to_client_units(
- lines,
+ value_range = lint_state.position_codec.range_to_client_units(
+ lint_state.lines,
te_range_to_lsp(sep_range_te),
)
yield Diagnostic(
@@ -1176,8 +1347,8 @@ class Deb822KnownField:
value_loc = value_ref.locatable
range_position_te = value_loc.range_in_parent().relative_to(value_off)
value_range_in_server_units = te_range_to_lsp(range_position_te)
- value_range = position_codec.range_to_client_units(
- lines,
+ value_range = lint_state.position_codec.range_to_client_units(
+ lint_state.lines,
value_range_in_server_units,
)
yield Diagnostic(
@@ -1194,8 +1365,8 @@ class Deb822KnownField:
value_loc = first_exclusive_value_ref.locatable
value_range_te = value_loc.range_in_parent().relative_to(value_off)
value_range_in_server_units = te_range_to_lsp(value_range_te)
- value_range = position_codec.range_to_client_units(
- lines,
+ value_range = lint_state.position_codec.range_to_client_units(
+ lint_state.lines,
value_range_in_server_units,
)
yield Diagnostic(
@@ -1272,8 +1443,8 @@ class Deb822KnownField:
value_loc = value_ref.locatable
value_range_te = value_loc.range_in_parent().relative_to(value_off)
value_range_in_server_units = te_range_to_lsp(value_range_te)
- value_range = position_codec.range_to_client_units(
- lines,
+ value_range = lint_state.position_codec.range_to_client_units(
+ lint_state.lines,
value_range_in_server_units,
)
yield from (Diagnostic(value_range, **issue_data) for issue_data in issues)
@@ -1473,6 +1644,7 @@ SOURCE_FIELDS = _fields(
"Standards-Version",
FieldValueClass.SINGLE_VALUE,
missing_field_severity=DiagnosticSeverity.Error,
+ custom_field_check=_sv_field_validation,
synopsis_doc="Debian Policy version this package complies with",
hover_text=textwrap.dedent(
"""\
@@ -2328,7 +2500,7 @@ BINARY_FIELDS = _fields(
**Example**:
```
Package: foo
- Provide: debputy-plugin-foo
+ Provides: debputy-plugin-foo
Enhances: debputy
```
"""
@@ -3129,6 +3301,7 @@ _DEP5_FILES_FIELDS = _fields(
"Files",
FieldValueClass.DEP5_FILE_LIST,
is_stanza_name=True,
+ custom_field_check=_dep5_files_check,
missing_field_severity=DiagnosticSeverity.Error,
hover_text=textwrap.dedent(
"""\
diff --git a/src/debputy/lsp/lsp_debian_copyright.py b/src/debputy/lsp/lsp_debian_copyright.py
index 843627e..5895669 100644
--- a/src/debputy/lsp/lsp_debian_copyright.py
+++ b/src/debputy/lsp/lsp_debian_copyright.py
@@ -142,8 +142,7 @@ def _diagnostics_for_paragraph(
other_known_fields: Mapping[str, Deb822KnownField],
is_files_or_license_paragraph: bool,
doc_reference: str,
- position_codec: "LintCapablePositionCodec",
- lines: List[str],
+ lint_state: LintState,
diagnostics: List[Diagnostic],
) -> None:
representation_field = _paragraph_representation_field(stanza)
@@ -155,8 +154,8 @@ def _diagnostics_for_paragraph(
representation_field_pos, representation_field.size()
)
)
- representation_field_range = position_codec.range_to_client_units(
- lines,
+ representation_field_range = lint_state.position_codec.range_to_client_units(
+ lint_state.lines,
representation_field_range_server_units,
)
for known_field in known_fields.values():
@@ -188,8 +187,8 @@ def _diagnostics_for_paragraph(
)
field_position_te = field_range_te.start_pos
field_range_server_units = te_range_to_lsp(field_range_te)
- field_range = position_codec.range_to_client_units(
- lines,
+ field_range = lint_state.position_codec.range_to_client_units(
+ lint_state.lines,
field_range_server_units,
)
field_name_typo_detected = False
@@ -214,8 +213,8 @@ def _diagnostics_for_paragraph(
field_position_te, kvpair.field_token.size()
)
)
- field_range = position_codec.range_to_client_units(
- lines,
+ field_range = lint_state.position_codec.range_to_client_units(
+ lint_state.lines,
token_range_server_units,
)
field_name_typo_detected = True
@@ -266,8 +265,7 @@ def _diagnostics_for_paragraph(
stanza,
stanza_position,
kvpair_position,
- position_codec,
- lines,
+ lint_state,
field_name_typo_reported=field_name_typo_detected,
)
)
@@ -296,8 +294,8 @@ def _diagnostics_for_paragraph(
word_range_server_units = te_range_to_lsp(
TERange.from_position_and_size(word_pos_te, word_range_te)
)
- word_range = position_codec.range_to_client_units(
- lines,
+ word_range = lint_state.position_codec.range_to_client_units(
+ lint_state.lines,
word_range_server_units,
)
diagnostics.append(
@@ -307,10 +305,11 @@ def _diagnostics_for_paragraph(
severity=DiagnosticSeverity.Hint,
source="debputy",
data=DiagnosticData(
+ lint_severity="spelling",
quickfixes=[
propose_correct_text_quick_fix(c)
for c in corrections
- ]
+ ],
),
)
)
@@ -418,9 +417,10 @@ def _scan_for_syntax_errors_and_token_level_diagnostics(
severity=DiagnosticSeverity.Hint,
source="debputy",
data=DiagnosticData(
+ lint_severity="spelling",
quickfixes=[
propose_correct_text_quick_fix(c) for c in corrections
- ]
+ ],
),
)
)
@@ -470,8 +470,7 @@ def _lint_debian_copyright(
other_known_fields,
is_files_or_license_paragraph,
doc_reference,
- position_codec,
- lines,
+ lint_state,
diagnostics,
)
if not is_dep5:
diff --git a/src/debputy/lsp/lsp_debian_patches_series.py b/src/debputy/lsp/lsp_debian_patches_series.py
new file mode 100644
index 0000000..c703e37
--- /dev/null
+++ b/src/debputy/lsp/lsp_debian_patches_series.py
@@ -0,0 +1,454 @@
+import itertools
+import re
+from typing import (
+ Union,
+ Sequence,
+ Optional,
+ Iterable,
+ List,
+ Mapping,
+)
+
+from debputy.filesystem_scan import VirtualPathBase
+from debputy.linting.lint_util import LintState
+from debputy.lsp.debputy_ls import DebputyLanguageServer
+from debputy.lsp.diagnostics import DiagnosticData
+from debputy.lsp.lsp_features import (
+ lint_diagnostics,
+ lsp_standard_handler,
+ lsp_completer,
+ lsp_semantic_tokens_full,
+ SEMANTIC_TOKEN_TYPES_IDS,
+)
+from debputy.lsp.quickfixes import (
+ propose_remove_range_quick_fix,
+ propose_correct_text_quick_fix,
+)
+from debputy.lsp.text_util import (
+ SemanticTokensState,
+)
+from lsprotocol.types import (
+ CompletionItem,
+ Diagnostic,
+ CompletionList,
+ CompletionParams,
+ TEXT_DOCUMENT_WILL_SAVE_WAIT_UNTIL,
+ SemanticTokensParams,
+ SemanticTokens,
+ SemanticTokenTypes,
+ Position,
+ Range,
+ DiagnosticSeverity,
+ CompletionItemKind,
+ CompletionItemLabelDetails,
+)
+
+try:
+ from debputy.lsp.vendoring._deb822_repro.locatable import (
+ Position as TEPosition,
+ Range as TERange,
+ START_POSITION,
+ )
+
+ from pygls.server import LanguageServer
+ from pygls.workspace import TextDocument
+except ImportError:
+ pass
+
+
+_LANGUAGE_IDS = [
+ "debian/patches/series",
+ # quilt path name
+ "patches/series",
+]
+
+
+def _as_hook_targets(command_name: str) -> Iterable[str]:
+ for prefix, suffix in itertools.product(
+ ["override_", "execute_before_", "execute_after_"],
+ ["", "-arch", "-indep"],
+ ):
+ yield f"{prefix}{command_name}{suffix}"
+
+
+# lsp_standard_handler(_LANGUAGE_IDS, TEXT_DOCUMENT_CODE_ACTION)
+lsp_standard_handler(_LANGUAGE_IDS, TEXT_DOCUMENT_WILL_SAVE_WAIT_UNTIL)
+
+_RE_LINE_COMMENT = re.compile(r"^\s*(#(?:.*\S)?)\s*$")
+_RE_PATCH_LINE = re.compile(
+ r"""
+ ^ \s* (?P<patch_name> \S+ ) \s*
+ (?: (?P<options> [^#\s]+ ) \s* )?
+ (?: (?P<comment> \# (?:.*\S)? ) \s* )?
+""",
+ re.VERBOSE,
+)
+_RE_UNNECESSARY_LEADING_PREFIX = re.compile(r"(?:(?:[.]{1,2})?/+)+")
+_RE_UNNECESSARY_SLASHES = re.compile("//+")
+
+
+def is_valid_file(path: str) -> bool:
+ return path.endswith("/patches/series")
+
+
+def _all_patch_files(
+ debian_patches: VirtualPathBase,
+) -> Iterable[VirtualPathBase]:
+ if not debian_patches.is_dir:
+ return
+
+ for patch_file in debian_patches.all_paths():
+ if patch_file.is_dir or patch_file.path in (
+ "debian/patches/series",
+ "./debian/patches/series",
+ ):
+ continue
+
+ if patch_file.name.endswith("~"):
+ continue
+ if patch_file.name.startswith((".#", "#")):
+ continue
+ parent = patch_file.parent_dir
+ if (
+ parent is not None
+ and parent.path in ("debian/patches", "./debian/patches")
+ and patch_file.name.endswith(".series")
+ ):
+ continue
+ yield patch_file
+
+
+def _listed_patches(
+ lines: List[str],
+) -> Iterable[str]:
+ for line in lines:
+ m = _RE_PATCH_LINE.match(line)
+ if m is None:
+ continue
+ filename = m.group(1)
+ if filename.startswith("#"):
+ continue
+ filename = _RE_UNNECESSARY_LEADING_PREFIX.sub("", filename, count=1)
+ filename = _RE_UNNECESSARY_SLASHES.sub("/", filename)
+ if not filename:
+ continue
+ yield filename
+
+
+@lint_diagnostics(_LANGUAGE_IDS)
+def _lint_debian_patches_series(lint_state: LintState) -> Optional[List[Diagnostic]]:
+ if not is_valid_file(lint_state.path):
+ return None
+
+ source_root = lint_state.source_root
+ if source_root is None:
+ return None
+
+ dpatches = source_root.lookup("debian/patches/")
+ if dpatches is None or not dpatches.is_dir:
+ return None
+
+ position_codec = lint_state.position_codec
+ diagnostics = []
+ used_patches = set()
+ all_patches = {pf.path for pf in _all_patch_files(dpatches)}
+
+ for line_no, line in enumerate(lint_state.lines):
+ m = _RE_PATCH_LINE.match(line)
+ if not m:
+ continue
+ groups = m.groupdict()
+ orig_filename = groups["patch_name"]
+ filename = orig_filename
+ patch_start_col, patch_end_col = m.span("patch_name")
+ orig_filename_start_col = patch_start_col
+ if filename.startswith("#"):
+ continue
+ if filename.startswith(("../", "./", "/")):
+ sm = _RE_UNNECESSARY_LEADING_PREFIX.match(filename)
+ assert sm is not None
+ slash_start, slash_end = sm.span(0)
+ orig_filename_start_col = slash_end
+ prefix = filename[:orig_filename_start_col]
+ filename = filename[orig_filename_start_col:]
+ slash_range = position_codec.range_to_client_units(
+ lint_state.lines,
+ Range(
+ Position(
+ line_no,
+ patch_start_col + slash_start,
+ ),
+ Position(
+ line_no,
+ patch_start_col + slash_end,
+ ),
+ ),
+ )
+ skip_use_check = False
+ if ".." in prefix:
+ diagnostic_title = f'Disallowed prefix "{prefix}"'
+ severity = DiagnosticSeverity.Error
+ skip_use_check = True
+ else:
+ diagnostic_title = f'Unnecessary prefix "{prefix}"'
+ severity = DiagnosticSeverity.Warning
+ diagnostics.append(
+ Diagnostic(
+ slash_range,
+ diagnostic_title,
+ source="debputy",
+ severity=severity,
+ data=DiagnosticData(
+ quickfixes=[
+ propose_remove_range_quick_fix(
+ proposed_title=f'Remove prefix "{prefix}"'
+ )
+ ]
+ ),
+ )
+ )
+ if skip_use_check:
+ continue
+ if "//" in filename:
+ for usm in _RE_UNNECESSARY_SLASHES.finditer(filename):
+ start_col, end_cold = usm.span()
+ slash_range = position_codec.range_to_client_units(
+ lint_state.lines,
+ Range(
+ Position(
+ line_no,
+ orig_filename_start_col + start_col,
+ ),
+ Position(
+ line_no,
+ orig_filename_start_col + end_cold,
+ ),
+ ),
+ )
+ diagnostics.append(
+ Diagnostic(
+ slash_range,
+ "Unnecessary slashes",
+ source="debputy",
+ severity=DiagnosticSeverity.Warning,
+ data=DiagnosticData(
+ quickfixes=[propose_correct_text_quick_fix("/")]
+ ),
+ )
+ )
+ filename = _RE_UNNECESSARY_SLASHES.sub("/", filename)
+
+ patch_name_range = position_codec.range_to_client_units(
+ lint_state.lines,
+ Range(
+ Position(
+ line_no,
+ patch_start_col,
+ ),
+ Position(
+ line_no,
+ patch_end_col,
+ ),
+ ),
+ )
+ if not filename.lower().endswith((".diff", ".patch")):
+ diagnostics.append(
+ Diagnostic(
+ patch_name_range,
+ f'Patch not using ".patch" or ".diff" as extension: "{filename}"',
+ source="debputy",
+ severity=DiagnosticSeverity.Hint,
+ data=DiagnosticData(
+ quickfixes=[propose_correct_text_quick_fix(f"{filename}.patch")]
+ ),
+ )
+ )
+ patch_path = f"{dpatches.path}/{filename}"
+ if patch_path not in all_patches:
+ diagnostics.append(
+ Diagnostic(
+ patch_name_range,
+ f'Non-existing patch "{filename}"',
+ source="debputy",
+ severity=DiagnosticSeverity.Error,
+ )
+ )
+ elif patch_path in used_patches:
+ diagnostics.append(
+ Diagnostic(
+ patch_name_range,
+ f'Duplicate patch: "{filename}"',
+ source="debputy",
+ severity=DiagnosticSeverity.Error,
+ )
+ )
+ else:
+ used_patches.add(patch_path)
+
+ unused_patches = all_patches - used_patches
+ for unused_patch in sorted(unused_patches):
+ patch_name = unused_patch[len(dpatches.path) + 1 :]
+ line_count = len(lint_state.lines)
+ file_range = Range(
+ Position(
+ 0,
+ 0,
+ ),
+ Position(
+ line_count,
+ len(lint_state.lines[-1]) if line_count else 0,
+ ),
+ )
+ diagnostics.append(
+ Diagnostic(
+ file_range,
+ f'Unused patch: "{patch_name}"',
+ source="debputy",
+ severity=DiagnosticSeverity.Warning,
+ )
+ )
+
+ return diagnostics
+
+
+@lsp_completer(_LANGUAGE_IDS)
+def _debian_patches_series_completions(
+ ls: "DebputyLanguageServer",
+ params: CompletionParams,
+) -> Optional[Union[CompletionList, Sequence[CompletionItem]]]:
+ doc = ls.workspace.get_text_document(params.text_document.uri)
+ if not is_valid_file(doc.path):
+ return None
+ lint_state = ls.lint_state(doc)
+ source_root = lint_state.source_root
+ dpatches = source_root.lookup("debian/patches") if source_root is not None else None
+ if dpatches is None:
+ return None
+ lines = doc.lines
+ position = doc.position_codec.position_from_client_units(lines, params.position)
+ line = lines[position.line]
+ if line.startswith("#"):
+ return None
+ try:
+ line.rindex(" #", 0, position.character)
+ return None # In an end of line comment
+ except ValueError:
+ pass
+ already_used = set(_listed_patches(lines))
+ # `debian/patches + "/"`
+ dpatches_dir_len = len(dpatches.path) + 1
+ all_patch_files_gen = (
+ p.path[dpatches_dir_len:] for p in _all_patch_files(dpatches)
+ )
+ return [
+ CompletionItem(
+ p,
+ kind=CompletionItemKind.File,
+ insert_text=f"{p}\n",
+ label_details=CompletionItemLabelDetails(
+ description=f"debian/patches/{p}",
+ ),
+ )
+ for p in all_patch_files_gen
+ if p not in already_used
+ ]
+
+
+@lsp_semantic_tokens_full(_LANGUAGE_IDS)
+def _debian_patches_semantic_tokens_full(
+ ls: "DebputyLanguageServer",
+ request: SemanticTokensParams,
+) -> Optional[SemanticTokens]:
+ doc = ls.workspace.get_text_document(request.text_document.uri)
+ if not is_valid_file(doc.path):
+ return None
+ lines = doc.lines
+ position_codec = doc.position_codec
+
+ tokens: List[int] = []
+ string_token_code = SEMANTIC_TOKEN_TYPES_IDS[SemanticTokenTypes.String.value]
+ comment_token_code = SEMANTIC_TOKEN_TYPES_IDS[SemanticTokenTypes.Comment.value]
+ options_token_code = SEMANTIC_TOKEN_TYPES_IDS[SemanticTokenTypes.Keyword.value]
+ sem_token_state = SemanticTokensState(
+ ls,
+ doc,
+ lines,
+ tokens,
+ )
+
+ for line_no, line in enumerate(lines):
+ if line.isspace():
+ continue
+ m = _RE_LINE_COMMENT.match(line)
+ if m:
+ start_col, end_col = m.span(1)
+ start_pos = position_codec.position_to_client_units(
+ sem_token_state.lines,
+ Position(
+ line_no,
+ start_col,
+ ),
+ )
+ sem_token_state.emit_token(
+ start_pos,
+ position_codec.client_num_units(line[start_col:end_col]),
+ comment_token_code,
+ )
+ continue
+ m = _RE_PATCH_LINE.match(line)
+ if not m:
+ continue
+ groups = m.groupdict()
+ _emit_group(
+ line_no,
+ string_token_code,
+ sem_token_state,
+ "patch_name",
+ groups,
+ m,
+ )
+ _emit_group(
+ line_no,
+ options_token_code,
+ sem_token_state,
+ "options",
+ groups,
+ m,
+ )
+ _emit_group(
+ line_no,
+ comment_token_code,
+ sem_token_state,
+ "comment",
+ groups,
+ m,
+ )
+
+ return SemanticTokens(tokens)
+
+
+def _emit_group(
+ line_no: int,
+ token_code: int,
+ sem_token_state: SemanticTokensState,
+ group_name: str,
+ groups: Mapping[str, str],
+ match: re.Match,
+) -> None:
+ value = groups.get(group_name)
+ if not value:
+ return None
+ patch_start_col, patch_end_col = match.span(group_name)
+ position_codec = sem_token_state.doc.position_codec
+ patch_start_pos = position_codec.position_to_client_units(
+ sem_token_state.lines,
+ Position(
+ line_no,
+ patch_start_col,
+ ),
+ )
+ sem_token_state.emit_token(
+ patch_start_pos,
+ position_codec.client_num_units(value),
+ token_code,
+ )
diff --git a/src/debputy/lsp/lsp_debian_tests_control.py b/src/debputy/lsp/lsp_debian_tests_control.py
index 20a198c..3d418cb 100644
--- a/src/debputy/lsp/lsp_debian_tests_control.py
+++ b/src/debputy/lsp/lsp_debian_tests_control.py
@@ -138,8 +138,7 @@ def _diagnostics_for_paragraph(
stanza_position: "TEPosition",
known_fields: Mapping[str, Deb822KnownField],
doc_reference: str,
- position_codec: "LintCapablePositionCodec",
- lines: List[str],
+ lint_state: LintState,
diagnostics: List[Diagnostic],
) -> None:
representation_field = _paragraph_representation_field(stanza)
@@ -151,8 +150,8 @@ def _diagnostics_for_paragraph(
representation_field_pos, representation_field.size()
)
)
- representation_field_range = position_codec.range_to_client_units(
- lines,
+ representation_field_range = lint_state.position_codec.range_to_client_units(
+ lint_state.lines,
representation_field_range_server_units,
)
for known_field in known_fields.values():
@@ -203,8 +202,8 @@ def _diagnostics_for_paragraph(
)
field_position_te = field_range_te.start_pos
field_range_server_units = te_range_to_lsp(field_range_te)
- field_range = position_codec.range_to_client_units(
- lines,
+ field_range = lint_state.position_codec.range_to_client_units(
+ lint_state.lines,
field_range_server_units,
)
field_name_typo_detected = False
@@ -229,8 +228,8 @@ def _diagnostics_for_paragraph(
field_position_te, kvpair.field_token.size()
)
)
- field_range = position_codec.range_to_client_units(
- lines,
+ field_range = lint_state.position_codec.range_to_client_units(
+ lint_state.lines,
token_range_server_units,
)
field_name_typo_detected = True
@@ -266,8 +265,7 @@ def _diagnostics_for_paragraph(
stanza,
stanza_position,
kvpair_position,
- position_codec,
- lines,
+ lint_state,
field_name_typo_reported=field_name_typo_detected,
)
)
@@ -296,8 +294,8 @@ def _diagnostics_for_paragraph(
word_range_server_units = te_range_to_lsp(
TERange.from_position_and_size(word_pos_te, word_range)
)
- word_range = position_codec.range_to_client_units(
- lines,
+ word_range = lint_state.position_codec.range_to_client_units(
+ lint_state.lines,
word_range_server_units,
)
diagnostics.append(
@@ -307,10 +305,11 @@ def _diagnostics_for_paragraph(
severity=DiagnosticSeverity.Hint,
source="debputy",
data=DiagnosticData(
+ lint_severity="spelling",
quickfixes=[
propose_correct_text_quick_fix(c)
for c in corrections
- ]
+ ],
),
)
)
@@ -417,9 +416,10 @@ def _scan_for_syntax_errors_and_token_level_diagnostics(
severity=DiagnosticSeverity.Hint,
source="debputy",
data=DiagnosticData(
+ lint_severity="spelling",
quickfixes=[
propose_correct_text_quick_fix(c) for c in corrections
- ]
+ ],
),
)
)
@@ -455,8 +455,7 @@ def _lint_debian_tests_control(
paragraph_pos,
known_fields,
doc_reference,
- position_codec,
- lines,
+ lint_state,
diagnostics,
)
return diagnostics
diff --git a/src/debputy/lsp/lsp_features.py b/src/debputy/lsp/lsp_features.py
index 63e4cd2..41313f3 100644
--- a/src/debputy/lsp/lsp_features.py
+++ b/src/debputy/lsp/lsp_features.py
@@ -20,6 +20,7 @@ from lsprotocol.types import (
DidOpenTextDocumentParams,
SemanticTokensLegend,
TEXT_DOCUMENT_FORMATTING,
+ SemanticTokenTypes,
)
from debputy.commands.debputy_cmd.context import CommandContext
@@ -39,7 +40,12 @@ from debputy.lsp.text_util import on_save_trim_end_of_line_whitespace
C = TypeVar("C", bound=Callable)
SEMANTIC_TOKENS_LEGEND = SemanticTokensLegend(
- token_types=["keyword", "enumMember", "comment"],
+ token_types=[
+ SemanticTokenTypes.Keyword.value,
+ SemanticTokenTypes.EnumMember.value,
+ SemanticTokenTypes.Comment.value,
+ SemanticTokenTypes.String.value,
+ ],
token_modifiers=[],
)
SEMANTIC_TOKEN_TYPES_IDS = {
diff --git a/src/debputy/lsp/lsp_generic_deb822.py b/src/debputy/lsp/lsp_generic_deb822.py
index 5b1a22a..c8b476a 100644
--- a/src/debputy/lsp/lsp_generic_deb822.py
+++ b/src/debputy/lsp/lsp_generic_deb822.py
@@ -31,6 +31,7 @@ from lsprotocol.types import (
SemanticTokens,
TextEdit,
MessageType,
+ SemanticTokenTypes,
)
from debputy.linting.lint_util import LintState
@@ -46,6 +47,7 @@ from debputy.lsp.lsp_features import SEMANTIC_TOKEN_TYPES_IDS
from debputy.lsp.text_util import (
te_position_to_lsp,
trim_end_of_line_whitespace,
+ SemanticTokensState,
)
from debputy.lsp.vendoring._deb822_repro.locatable import (
START_POSITION,
@@ -446,47 +448,35 @@ def deb822_folding_ranges(
return folding_ranges
-@dataclasses.dataclass(slots=True)
-class SemanticTokenState:
- ls: "DebputyLanguageServer"
- file_metadata: Deb822FileMetadata[Any]
- doc: "TextDocument"
- lines: List[str]
- tokens: List[int]
- keyword_token_code: int
- known_value_token_code: int
- comment_token_code: int
- _previous_line: int = 0
- _previous_col: int = 0
-
- def emit_token(
- self,
- start_pos: Position,
- len_client_units: int,
- token_code: int,
- *,
- token_modifiers: int = 0,
- ) -> None:
- line_delta = start_pos.line - self._previous_line
- self._previous_line = start_pos.line
- previous_col = self._previous_col
+class Deb822SemanticTokensState(SemanticTokensState):
- if line_delta:
- previous_col = 0
-
- column_delta = start_pos.character - previous_col
- self._previous_col = start_pos.character
+ __slots__ = (
+ "file_metadata",
+ "keyword_token_code",
+ "known_value_token_code",
+ "comment_token_code",
+ )
- tokens = self.tokens
- tokens.append(line_delta) # Line delta
- tokens.append(column_delta) # Token column delta
- tokens.append(len_client_units) # Token length
- tokens.append(token_code)
- tokens.append(token_modifiers)
+ def __init__(
+ self,
+ ls: "DebputyLanguageServer",
+ doc: "TextDocument",
+ lines: List[str],
+ tokens: List[int],
+ file_metadata: Deb822FileMetadata[Any],
+ keyword_token_code: int,
+ known_value_token_code: int,
+ comment_token_code: int,
+ ) -> None:
+ super().__init__(ls, doc, lines, tokens)
+ self.file_metadata = file_metadata
+ self.keyword_token_code = keyword_token_code
+ self.known_value_token_code = known_value_token_code
+ self.comment_token_code = comment_token_code
def _deb822_paragraph_semantic_tokens_full(
- sem_token_state: SemanticTokenState,
+ sem_token_state: Deb822SemanticTokensState,
stanza: Deb822ParagraphElement,
stanza_idx: int,
) -> None:
@@ -612,15 +602,15 @@ def deb822_semantic_tokens_full(
return None
tokens: List[int] = []
- comment_token_code = SEMANTIC_TOKEN_TYPES_IDS["comment"]
- sem_token_state = SemanticTokenState(
+ comment_token_code = SEMANTIC_TOKEN_TYPES_IDS[SemanticTokenTypes.Comment.value]
+ sem_token_state = Deb822SemanticTokensState(
ls,
- file_metadata,
doc,
lines,
tokens,
- SEMANTIC_TOKEN_TYPES_IDS["keyword"],
- SEMANTIC_TOKEN_TYPES_IDS["enumMember"],
+ file_metadata,
+ SEMANTIC_TOKEN_TYPES_IDS[SemanticTokenTypes.Keyword],
+ SEMANTIC_TOKEN_TYPES_IDS[SemanticTokenTypes.EnumMember],
comment_token_code,
)
diff --git a/src/debputy/lsp/quickfixes.py b/src/debputy/lsp/quickfixes.py
index a9fcf7b..8787d9f 100644
--- a/src/debputy/lsp/quickfixes.py
+++ b/src/debputy/lsp/quickfixes.py
@@ -10,6 +10,7 @@ from typing import (
Optional,
List,
cast,
+ NotRequired,
)
from lsprotocol.types import (
@@ -44,6 +45,7 @@ except ImportError:
CodeActionName = Literal[
"correct-text",
"remove-line",
+ "remove-range",
"insert-text-on-line-after-diagnostic",
]
@@ -62,6 +64,11 @@ class RemoveLineCodeAction(TypedDict):
code_action: Literal["remove-line"]
+class RemoveRangeCodeAction(TypedDict):
+ code_action: Literal["remove-range"]
+ proposed_title: NotRequired[str]
+
+
def propose_correct_text_quick_fix(correct_value: str) -> CorrectTextCodeAction:
return {
"code_action": "correct-text",
@@ -84,6 +91,17 @@ def propose_remove_line_quick_fix() -> RemoveLineCodeAction:
}
+def propose_remove_range_quick_fix(
+ *, proposed_title: Optional[str]
+) -> RemoveRangeCodeAction:
+ r: RemoveRangeCodeAction = {
+ "code_action": "remove-range",
+ }
+ if proposed_title:
+ r["proposed_title"] = proposed_title
+ return r
+
+
CODE_ACTION_HANDLERS: Dict[
CodeActionName,
Callable[
@@ -230,6 +248,35 @@ def _remove_line_code_action(
)
+@_code_handler_for("remove-range")
+def _remove_range_code_action(
+ code_action_data: RemoveRangeCodeAction,
+ code_action_params: CodeActionParams,
+ diagnostic: Diagnostic,
+) -> Iterable[Union[CodeAction, Command]]:
+ edit = TextEdit(
+ diagnostic.range,
+ "",
+ )
+ title = code_action_data.get("proposed_title", "Delete")
+ yield CodeAction(
+ title=title,
+ kind=CodeActionKind.QuickFix,
+ diagnostics=[diagnostic],
+ edit=WorkspaceEdit(
+ changes={code_action_params.text_document.uri: [edit]},
+ document_changes=[
+ TextDocumentEdit(
+ text_document=OptionalVersionedTextDocumentIdentifier(
+ uri=code_action_params.text_document.uri,
+ ),
+ edits=[edit],
+ )
+ ],
+ ),
+ )
+
+
def provide_standard_quickfixes_from_diagnostics(
code_action_params: CodeActionParams,
) -> Optional[List[Union[Command, CodeAction]]]:
diff --git a/src/debputy/lsp/spellchecking.py b/src/debputy/lsp/spellchecking.py
index b767802..4cf71f2 100644
--- a/src/debputy/lsp/spellchecking.py
+++ b/src/debputy/lsp/spellchecking.py
@@ -160,7 +160,8 @@ def spellcheck_line(
severity=DiagnosticSeverity.Hint,
source="debputy",
data=DiagnosticData(
- quickfixes=[propose_correct_text_quick_fix(c) for c in corrections]
+ lint_severity="spelling",
+ quickfixes=[propose_correct_text_quick_fix(c) for c in corrections],
),
)
diff --git a/src/debputy/lsp/style_prefs.py b/src/debputy/lsp/style_prefs.py
index 755e67c..1bcd800 100644
--- a/src/debputy/lsp/style_prefs.py
+++ b/src/debputy/lsp/style_prefs.py
@@ -627,7 +627,9 @@ def determine_effective_style(
return maint_style.as_effective_pref(), None
uploaders = source_package.fields.get("Uploaders")
if uploaders is None:
- detected_style = maint_style.as_effective_pref() if maint_style is not None else None
+ detected_style = (
+ maint_style.as_effective_pref() if maint_style is not None else None
+ )
return detected_style, None
all_styles: List[Optional[EffectivePreference]] = []
if maint_style is not None:
diff --git a/src/debputy/lsp/text_util.py b/src/debputy/lsp/text_util.py
index e58990f..dd87571 100644
--- a/src/debputy/lsp/text_util.py
+++ b/src/debputy/lsp/text_util.py
@@ -15,6 +15,7 @@ try:
Position as TEPosition,
Range as TERange,
)
+ from debputy.lsp.debputy_ls import DebputyLanguageServer
except ImportError:
pass
@@ -138,3 +139,46 @@ def te_range_to_lsp(te_range: "TERange") -> Range:
te_position_to_lsp(te_range.start_pos),
te_position_to_lsp(te_range.end_pos),
)
+
+
+class SemanticTokensState:
+ __slots__ = ("ls", "doc", "lines", "tokens", "_previous_line", "_previous_col")
+
+ def __init__(
+ self,
+ ls: "DebputyLanguageServer",
+ doc: "TextDocument",
+ lines: List[str],
+ tokens: List[int],
+ ) -> None:
+ self.ls = ls
+ self.doc = doc
+ self.lines = lines
+ self.tokens = tokens
+ self._previous_line = 0
+ self._previous_col = 0
+
+ def emit_token(
+ self,
+ start_pos: Position,
+ len_client_units: int,
+ token_code: int,
+ *,
+ token_modifiers: int = 0,
+ ) -> None:
+ line_delta = start_pos.line - self._previous_line
+ self._previous_line = start_pos.line
+ previous_col = self._previous_col
+
+ if line_delta:
+ previous_col = 0
+
+ column_delta = start_pos.character - previous_col
+ self._previous_col = start_pos.character
+
+ tokens = self.tokens
+ tokens.append(line_delta) # Line delta
+ tokens.append(column_delta) # Token column delta
+ tokens.append(len_client_units) # Token length
+ tokens.append(token_code)
+ tokens.append(token_modifiers)