forked from useblocks/sphinx-needs
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathneedsfile.py
304 lines (253 loc) · 9.89 KB
/
needsfile.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
"""
Cares about the correct handling with ``needs.json`` files.
Creates, checks and imports ``needs.json`` files.
"""
from __future__ import annotations
import json
import os
import sys
from collections.abc import Iterable
from copy import deepcopy
from datetime import datetime
from functools import lru_cache
from typing import Any
from jsonschema import Draft7Validator
from sphinx.config import Config
from sphinx_needs.config import NeedsSphinxConfig
from sphinx_needs.data import NeedsCoreFields, NeedsInfoType
from sphinx_needs.logging import get_logger, log_warning
log = get_logger(__name__)
def generate_needs_schema(
needs_config: NeedsSphinxConfig, exclude_properties: Iterable[str] = ()
) -> dict[str, Any]:
"""Generate a JSON schema for all fields in each need item.
It is based on:
* the core fields defined in NeedsCoreFields
* the extra options defined dynamically
* the global options defined dynamically
* the extra links defined dynamically
"""
properties: dict[str, Any] = {}
for name, extra_params in needs_config.extra_options.items():
properties[name] = {
"type": "string",
"description": extra_params.description,
"field_type": "extra",
"default": "",
}
# TODO currently extra options can overlap with core fields,
# in which case they are ignored,
# (this is the case for `type` added by the github service)
# hence this is why we add the core options after the extra options
for name, core_params in NeedsCoreFields.items():
properties[name] = deepcopy(core_params["schema"])
properties[name]["description"] = f"{core_params['description']}"
properties[name]["field_type"] = "core"
for link in needs_config.extra_links:
properties[link["option"]] = {
"type": "array",
"items": {"type": "string"},
"description": "Link field",
"field_type": "links",
"default": [],
}
properties[link["option"] + "_back"] = {
"type": "array",
"items": {"type": "string"},
"description": "Backlink field",
"field_type": "backlinks",
"default": [],
}
for name in needs_config.global_options:
if name not in properties:
properties[name] = {
"type": "string",
"description": "Added by needs_global_options configuration",
"field_type": "global",
"default": "",
}
for name in exclude_properties:
if name in properties:
del properties[name]
return {
"$schema": "http://json-schema.org/draft-07/schema#",
"type": "object",
"properties": properties,
}
class NeedsList:
def __init__(
self,
config: Config,
outdir: str | os.PathLike[str],
confdir: str | os.PathLike[str],
add_schema: bool = True,
) -> None:
self.config = config
self.needs_config = NeedsSphinxConfig(config)
self.outdir = outdir
self.confdir = confdir
self.current_version = config.version
self.project = config.project
self.needs_list = {
"project": self.project,
"current_version": self.current_version,
"versions": {},
}
if not self.needs_config.reproducible_json:
self.needs_list["created"] = ""
self.log = log
self._exclude_need_keys = set(self.needs_config.json_exclude_fields)
self._schema = (
generate_needs_schema(
self.needs_config, exclude_properties=self._exclude_need_keys
)
if add_schema
else None
)
self._need_defaults = (
{
name: value["default"]
for name, value in self._schema["properties"].items()
if "default" in value
}
if self._schema
else {}
)
def update_or_add_version(self, version: str) -> None:
from sphinx_needs import __version__
if version not in self.needs_list["versions"]:
self.needs_list["versions"][version] = {
"needs_amount": 0,
"needs": {},
"creator": {
"program": "sphinx_needs",
"version": __version__,
},
}
if self._schema:
self.needs_list["versions"][version]["needs_schema"] = self._schema
if self.needs_config.json_remove_defaults:
self.needs_list["versions"][version]["needs_defaults_removed"] = True
if not self.needs_config.reproducible_json:
self.needs_list["versions"][version]["created"] = ""
if "needs" not in self.needs_list["versions"][version]:
self.needs_list["versions"][version]["needs"] = {}
if not self.needs_config.reproducible_json:
self.needs_list["versions"][version]["created"] = datetime.now().isoformat()
def add_need(self, version: str, need_info: NeedsInfoType) -> None:
self.update_or_add_version(version)
writable_needs = {
key: value
for key, value in need_info.items()
if key not in self._exclude_need_keys
}
if self.needs_config.json_remove_defaults:
writable_needs = {
key: value
for key, value in writable_needs.items()
if not (
key in self._need_defaults and value == self._need_defaults[key]
)
}
self.needs_list["versions"][version]["needs"][need_info["id"]] = writable_needs
self.needs_list["versions"][version]["needs_amount"] = len(
self.needs_list["versions"][version]["needs"]
)
def wipe_version(self, version: str) -> None:
if version in self.needs_list["versions"]:
del self.needs_list["versions"][version]
def _finalise(self) -> None:
# We need to rewrite some data, because this kind of data gets overwritten during needs.json import
if not self.needs_config.reproducible_json:
self.needs_list["created"] = datetime.now().isoformat()
else:
self.needs_list.pop("created", None)
self.needs_list["current_version"] = self.current_version
self.needs_list["project"] = self.project
def write_json(self, needs_file: str = "needs.json", needs_path: str = "") -> None:
self._finalise()
needs_dir = needs_path if needs_path else self.outdir
with open(os.path.join(needs_dir, needs_file), "w") as f:
json.dump(self.needs_list, f, sort_keys=True)
def dump_json(self) -> str:
self._finalise()
return json.dumps(self.needs_list, sort_keys=True)
def load_json(self, file: str) -> None:
if not os.path.isabs(file):
file = os.path.join(self.confdir, file)
if not os.path.exists(file):
log_warning(
self.log, f"Could not load needs json file {file}", "json_load", None
)
else:
errors = check_needs_file(file)
# We only care for schema errors here, all other possible errors
# are not important for need-imports.
if errors.schema:
self.log.info(f"Schema validation errors detected in file {file}:")
for error in errors.schema:
self.log.info(f' {error.message} -> {".".join(error.path)}')
with open(file) as needs_file:
try:
needs_list = json.load(needs_file)
except json.JSONDecodeError:
log_warning(
self.log,
f"Could not decode json file {file}",
"json_load",
None,
)
else:
self.needs_list = needs_list
self.log.debug(f"needs.json file loaded: {file}")
class Errors:
def __init__(self, schema_errors: list[Any]):
self.schema = schema_errors
def check_needs_file(path: str) -> Errors:
"""
Checks a given json-file, if it passes our needs.json structure tests.
Current checks:
* Schema validation
:param path: File path to a needs.json file
:return: Dict, with error reports
"""
with open(path) as needs_file:
try:
data = json.load(needs_file)
except json.JSONDecodeError as e:
raise SphinxNeedsFileException(
f'Problems loading json file "{path}". '
f"Maybe it is empty or has an invalid json format. Original exception: {e}"
)
return check_needs_data(data)
@lru_cache
def _load_schema() -> dict[str, Any]:
schema_path = os.path.join(os.path.dirname(__file__), "needsfile.json")
with open(schema_path) as schema_file:
return json.load(schema_file) # type: ignore[no-any-return]
def check_needs_data(data: Any) -> Errors:
"""
Checks a given json-file, if it passes our needs.json structure tests.
Current checks:
* Schema validation
:param data: Loaded needs.json file
:return: Dict, with error reports
"""
needs_schema = _load_schema()
validator = Draft7Validator(needs_schema)
schema_errors = list(validator.iter_errors(data))
# In future there may be additional types of validations.
# So lets already use a class for all errors
return Errors(schema_errors)
if "main" in __name__:
"""
Allows a simple call via CLI::
python needsfile.py docs/needs/needs.json
"""
try:
needs_file = sys.argv[1]
except IndexError:
needs_file = "needs.json"
check_needs_file(needs_file)
class SphinxNeedsFileException(BaseException):
"""Exception for any file handling problems inside Sphinx-Needs"""