Skip to content

Commit 1d7cc6d

Browse files
authored
Merge pull request #194 from timvink/update
Update to new plugin system
2 parents f62d9e6 + 7415852 commit 1d7cc6d

File tree

1 file changed

+60
-9
lines changed
  • src/mkdocs_git_revision_date_localized_plugin

1 file changed

+60
-9
lines changed

src/mkdocs_git_revision_date_localized_plugin/plugin.py

Lines changed: 60 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,6 @@
99
import re
1010
import os
1111
import time
12-
import multiprocessing
1312
from pathlib import Path
1413

1514
from mkdocs import __version__ as mkdocs_version
@@ -58,6 +57,29 @@ def __init__(self):
5857
super().__init__()
5958
self.last_revision_commits = {}
6059
self.created_commits = {}
60+
self.is_serve_dirty_build = False
61+
62+
def on_startup(self, *, command: str, dirty: bool) -> None:
63+
"""
64+
Run on startup.
65+
66+
Note that "The presence of an on_startup method (even if empty)
67+
migrates the plugin to the new system where the plugin object is
68+
kept across builds within one mkdocs serve."
69+
70+
Args:
71+
command (str): The mkdocs command being run.
72+
dirty (bool): Whether the build is dirty.
73+
"""
74+
# Track if this is an incremental rebuild during mkdocs serve
75+
# dirty=True means it's a rebuild triggered by file changes
76+
# dirty=False means it's a clean/initial build
77+
self.is_serve_dirty_build = dirty
78+
79+
# Clear cache on clean builds to ensure fresh data
80+
if not dirty:
81+
self.last_revision_commits = {}
82+
self.created_commits = {}
6183

6284
def on_config(self, config: config_options.Config, **kwargs) -> Dict[str, Any]:
6385
"""
@@ -153,6 +175,7 @@ def on_config(self, config: config_options.Config, **kwargs) -> Dict[str, Any]:
153175
return config
154176

155177
def parallel_compute_commit_timestamps(self, files, original_source: Optional[Dict] = None, is_first_commit=False):
178+
import multiprocessing
156179
pool = multiprocessing.Pool(processes=min(10, multiprocessing.cpu_count()))
157180
results = []
158181
for f in files:
@@ -165,15 +188,20 @@ def parallel_compute_commit_timestamps(self, files, original_source: Optional[Di
165188
elif exclude(f.src_path, self.config.get("exclude", [])):
166189
continue
167190
else:
191+
temp_abs_src_path = str(Path(f.abs_src_path).absolute())
168192
abs_src_path = f.abs_src_path
193+
169194
# Support plugins like monorepo that might have moved the files from the original source that is under git
170195
if original_source and abs_src_path in original_source:
171196
abs_src_path = original_source[abs_src_path]
172197

173198
assert Path(abs_src_path).exists()
174199
abs_src_path = str(Path(abs_src_path).absolute())
175200
result = pool.apply_async(self.util.get_git_commit_timestamp, args=(abs_src_path, is_first_commit))
201+
# Store both the original path and temp path (if different) so cache lookups work either way
176202
results.append((abs_src_path, result))
203+
if temp_abs_src_path != abs_src_path:
204+
results.append((temp_abs_src_path, result))
177205
pool.close()
178206
pool.join()
179207
if is_first_commit:
@@ -190,6 +218,13 @@ def on_files(self, files: Files, config: MkDocsConfig):
190218
if not self.config.get("enabled") or not self.config.get("enable_parallel_processing"):
191219
return
192220

221+
# Skip parallel processing on incremental rebuilds (dirty builds during mkdocs serve)
222+
# This avoids the overhead of creating a new multiprocessing pool on every file save
223+
# The cache from the initial build will be reused
224+
if self.is_serve_dirty_build:
225+
logging.debug("[git-revision-date-localized] Skipping parallel processing on incremental rebuild, using cache")
226+
return
227+
193228
# Support monorepo/techdocs, which copies the docs_dir to a temporary directory
194229
mono_repo_plugin = config.get("plugins", {}).get("monorepo", None)
195230
if mono_repo_plugin is not None and hasattr(mono_repo_plugin, "merger") and mono_repo_plugin.merger is not None:
@@ -261,10 +296,18 @@ def on_page_markdown(self, markdown: str, page: Page, config: config_options.Con
261296
if getattr(page.file, "generated_by", None):
262297
last_revision_hash, last_revision_timestamp = "", int(time.time())
263298
else:
264-
last_revision_hash, last_revision_timestamp = self.last_revision_commits.get(
265-
str(Path(page.file.abs_src_path).absolute()), (None, None)
266-
)
267-
if last_revision_timestamp is None:
299+
# Use cached results if parallel processing is enabled and cache is populated
300+
if self.config.get("enable_parallel_processing") and self.last_revision_commits:
301+
last_revision_hash, last_revision_timestamp = self.last_revision_commits.get(
302+
str(Path(page.file.abs_src_path).absolute()), (None, None)
303+
)
304+
if last_revision_timestamp is None:
305+
last_revision_hash, last_revision_timestamp = self.util.get_git_commit_timestamp(
306+
path=page.file.abs_src_path,
307+
is_first_commit=False,
308+
)
309+
else:
310+
# Directly call git if parallel processing is disabled or cache is empty
268311
last_revision_hash, last_revision_timestamp = self.util.get_git_commit_timestamp(
269312
path=page.file.abs_src_path,
270313
is_first_commit=False,
@@ -337,10 +380,18 @@ def on_page_markdown(self, markdown: str, page: Page, config: config_options.Con
337380
if getattr(page.file, "generated_by", None):
338381
first_revision_hash, first_revision_timestamp = "", int(time.time())
339382
else:
340-
first_revision_hash, first_revision_timestamp = self.created_commits.get(
341-
str(Path(page.file.abs_src_path).absolute()), (None, None)
342-
)
343-
if first_revision_timestamp is None:
383+
# Use cached results if parallel processing is enabled and cache is populated
384+
if self.config.get("enable_creation_date") and self.config.get("enable_parallel_processing") and self.created_commits:
385+
first_revision_hash, first_revision_timestamp = self.created_commits.get(
386+
str(Path(page.file.abs_src_path).absolute()), (None, None)
387+
)
388+
if first_revision_timestamp is None:
389+
first_revision_hash, first_revision_timestamp = self.util.get_git_commit_timestamp(
390+
path=page.file.abs_src_path,
391+
is_first_commit=True,
392+
)
393+
else:
394+
# Directly call git if parallel processing is disabled or cache is empty
344395
first_revision_hash, first_revision_timestamp = self.util.get_git_commit_timestamp(
345396
path=page.file.abs_src_path,
346397
is_first_commit=True,

0 commit comments

Comments
 (0)