-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathrequired_refs.py
More file actions
157 lines (133 loc) · 5.6 KB
/
required_refs.py
File metadata and controls
157 lines (133 loc) · 5.6 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
"""Required standards-references check (Q2 resolution: start permissive).
For each repo, look up the per-type requirements from
``standards/required-refs.json``. For each ``(file, required_ref)`` pair:
* file missing in the repo -> ``error``
* file present but lacks a link to the required standards doc -> ``error``
* otherwise: silent
Today the requirements block is empty (zero tool repos link to
``standards/*.md``), so this check is silent in practice. The plumbing is
ready for the moment that changes — at that point, add entries to
``required-refs.json`` and the check immediately starts enforcing them
without any code change.
The loader is in this module rather than ``config.py`` to keep the data
colocated with its consumer; it is only used here.
"""
from __future__ import annotations
import json
import re
from pathlib import Path
from typing import Iterable, List, Mapping, Sequence
from ..types import Finding, RepoSnapshot
NAME = "required-refs"
class RequiredRefsError(Exception):
"""Raised when required-refs.json is present but malformed."""
def load_required_refs(path: Path | None) -> Mapping[str, Mapping[str, Sequence[str]]]:
"""Load and validate ``standards/required-refs.json``.
Returns a mapping ``{repo_type: {filename: [required_ref, ...]}}``.
Missing file -> empty mapping. Malformed JSON or wrong schema ->
``RequiredRefsError``.
"""
if path is None or not path.is_file():
return {}
try:
data = json.loads(path.read_text(encoding="utf-8"))
except json.JSONDecodeError as exc:
raise RequiredRefsError(f"malformed JSON in {path}: {exc}") from exc
if not isinstance(data, dict):
raise RequiredRefsError(f"{path}: expected object at root")
reqs = data.get("requirements", {})
if not isinstance(reqs, dict):
raise RequiredRefsError(f"{path}: 'requirements' must be an object")
out: dict[str, dict[str, list[str]]] = {}
for repo_type, file_map in reqs.items():
if not isinstance(file_map, dict):
raise RequiredRefsError(
f"{path}: requirements[{repo_type!r}] must be an object"
)
out[repo_type] = {}
for fname, refs in file_map.items():
if not isinstance(refs, list):
raise RequiredRefsError(
f"{path}: requirements[{repo_type!r}][{fname!r}] must be a list"
)
out[repo_type][fname] = [str(r) for r in refs]
return out
def _file_links_to(content: bytes, required_ref: str) -> bool:
"""Return True if ``content`` contains any markdown link whose target
resolves to ``required_ref`` (matched by trailing basename)."""
basename = required_ref.split("/")[-1]
if not basename:
return False
# Match both inline and reference-style link targets that end with
# the required basename (optionally followed by #fragment or whitespace).
pattern = re.compile(
rb"standards/" + re.escape(basename.encode("utf-8")) + rb"(?:#[^\s)]*)?",
)
return pattern.search(content) is not None
class RequiredRefsCheck:
name: str = NAME
def run(self, snapshot: RepoSnapshot) -> Iterable[Finding]:
if NAME in snapshot.config.skip_checks:
return ()
requirements = snapshot.meta_required_refs.get(snapshot.repo_type, {})
if not requirements:
return ()
out: List[Finding] = []
for file_name, required_refs in requirements.items():
if not required_refs:
continue
rel = Path(file_name)
file = snapshot.files.get(rel)
pragma = None
if file is not None:
pragma = next(
(p for p in file.pragmas if p.check_name == NAME), None
)
if pragma is not None:
out.append(
Finding(
repo=snapshot.slug,
file=rel,
check=NAME,
severity="info",
message=(
"skipped by drift-ignore pragma"
+ (f" (reason: {pragma.reason})" if pragma.reason else "")
),
)
)
continue
if file is None:
out.append(
Finding(
repo=snapshot.slug,
file=rel,
check=NAME,
severity="error",
message=(
f"{file_name} is required for {snapshot.repo_type} "
f"repos but is not present"
),
suggested_fix=(
f"create {file_name} and link to "
f"{', '.join(required_refs)}"
),
)
)
continue
for ref in required_refs:
if not _file_links_to(file.content, ref):
out.append(
Finding(
repo=snapshot.slug,
file=rel,
check=NAME,
severity="error",
message=(
f"{file_name} must link to {ref} "
f"(required for {snapshot.repo_type})"
),
suggested_fix=f"add a link to {ref} in {file_name}",
)
)
return out