-
Notifications
You must be signed in to change notification settings - Fork 2.7k
feat: support for rustdoc mergeable cross-crate info #16309
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: master
Are you sure you want to change the base?
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -1,6 +1,7 @@ | ||
| //! [`BuildRunner`] is the mutable state used during the build process. | ||
|
|
||
| use std::collections::{HashMap, HashSet}; | ||
| use std::ffi::OsStr; | ||
| use std::path::{Path, PathBuf}; | ||
| use std::sync::{Arc, Mutex}; | ||
|
|
||
|
|
@@ -230,6 +231,8 @@ impl<'a, 'gctx> BuildRunner<'a, 'gctx> { | |
| } | ||
| } | ||
|
|
||
| self.collect_doc_merge_info()?; | ||
|
|
||
| // Collect the result of the build into `self.compilation`. | ||
| for unit in &self.bcx.roots { | ||
| self.collect_tests_and_executables(unit)?; | ||
|
|
@@ -335,6 +338,132 @@ impl<'a, 'gctx> BuildRunner<'a, 'gctx> { | |
| Ok(()) | ||
| } | ||
|
|
||
| fn collect_doc_merge_info(&mut self) -> CargoResult<()> { | ||
| if !self.bcx.gctx.cli_unstable().rustdoc_mergeable_info { | ||
| return Ok(()); | ||
| } | ||
|
|
||
| if !self.bcx.build_config.intent.is_doc() { | ||
| return Ok(()); | ||
| } | ||
|
|
||
| if self.bcx.build_config.intent.wants_doc_json_output() { | ||
| // rustdoc JSON output doesn't support merge (yet?) | ||
| return Ok(()); | ||
| } | ||
|
|
||
| let mut doc_merge_info = HashMap::new(); | ||
|
|
||
| let unit_iter = if self.bcx.build_config.intent.wants_deps_docs() { | ||
| itertools::Either::Left(self.bcx.unit_graph.keys()) | ||
| } else { | ||
| itertools::Either::Right(self.bcx.roots.iter()) | ||
| }; | ||
|
|
||
| for unit in unit_iter { | ||
| let has_doc_parts = unit.mode.is_doc() | ||
| && self | ||
| .outputs(unit)? | ||
| .iter() | ||
| .any(|o| matches!(o.flavor, FileFlavor::DocParts)); | ||
| if !has_doc_parts { | ||
| continue; | ||
| } | ||
|
|
||
| doc_merge_info.entry(unit.kind).or_insert_with(|| { | ||
| let out_dir = self | ||
| .files() | ||
| .layout(unit.kind) | ||
| .artifact_dir() | ||
| .expect("artifact-dir was not locked") | ||
| .doc() | ||
| .to_owned(); | ||
| let docdeps_dir = self.files().docdeps_dir(unit); | ||
|
|
||
| let mut requires_merge = false; | ||
|
|
||
| // HACK: get mtime of crates.js to inform outside | ||
| // whether we need to merge cross-crate info. | ||
| // The content of `crates.js` looks like | ||
| // | ||
| // ``` | ||
| // window.ALL_CRATES = ["cargo","cargo_util","cargo_util_schemas","crates_io"] | ||
| // ``` | ||
| // | ||
| // and will be updated when any new crate got documented | ||
| // even with the legacy `--merge=shared` mode. | ||
| let crates_js = out_dir.join("crates.js"); | ||
|
Member
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. cc @notriddle I feel pretty bad about this. Perhaps cargo should log the mtime/checksum somewhere instead of checking the |
||
| let crates_js_mtime = paths::mtime(&crates_js); | ||
|
|
||
| let mut num_crates = 0; | ||
|
|
||
| for entry in walkdir::WalkDir::new(docdeps_dir).max_depth(1) { | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Isn't that just a
Member
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Was thinking of this layout #16309 (comment), but then started with this flatten one, hence walkdir. I am fine switching back to std if people think walkdir is not worthy atm. |
||
| let Ok(entry) = entry else { | ||
| tracing::debug!("failed to read entry at {}", docdeps_dir.display()); | ||
| continue; | ||
| }; | ||
|
|
||
| if !entry.file_type().is_file() | ||
| || entry.path().extension() != Some(OsStr::new("json")) | ||
| { | ||
| continue; | ||
| } | ||
|
|
||
| num_crates += 1; | ||
|
|
||
| if requires_merge { | ||
| continue; | ||
| } | ||
|
|
||
| let crates_js_mtime = match crates_js_mtime { | ||
| Ok(mtime) => mtime, | ||
| Err(ref err) => { | ||
| tracing::debug!( | ||
| ?err, | ||
| "failed to read mtime of {}", | ||
| crates_js.display() | ||
| ); | ||
| requires_merge = true; | ||
| continue; | ||
| } | ||
| }; | ||
|
|
||
| let parts_mtime = match paths::mtime(entry.path()) { | ||
| Ok(mtime) => mtime, | ||
| Err(err) => { | ||
| tracing::debug!( | ||
| ?err, | ||
| "failed to read mtime of {}", | ||
| entry.path().display() | ||
| ); | ||
| requires_merge = true; | ||
| continue; | ||
| } | ||
| }; | ||
|
|
||
| if parts_mtime > crates_js_mtime { | ||
| requires_merge = true; | ||
| continue; | ||
| } | ||
| } | ||
|
|
||
| if requires_merge { | ||
| compilation::DocMergeInfo::Merge { | ||
| num_crates, | ||
| parts_dir: docdeps_dir.to_owned(), | ||
| out_dir, | ||
| } | ||
| } else { | ||
| compilation::DocMergeInfo::Fresh | ||
| } | ||
| }); | ||
| } | ||
|
|
||
| self.compilation.doc_merge_info = doc_merge_info; | ||
|
|
||
| Ok(()) | ||
| } | ||
|
|
||
| /// Returns the executable for the specified unit (if any). | ||
| pub fn get_executable(&mut self, unit: &Unit) -> CargoResult<Option<PathBuf>> { | ||
| let is_binary = unit.target.is_executable(); | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -106,6 +106,11 @@ pub struct Compilation<'gctx> { | |
| /// Libraries to test with rustdoc. | ||
| pub to_doc_test: Vec<Doctest>, | ||
|
|
||
| /// Compilation information for running `rustdoc --merge=finalize`. | ||
| /// | ||
| /// See `-Zrustdoc-mergeable-info` for more. | ||
| pub doc_merge_info: HashMap<CompileKind, DocMergeInfo>, | ||
|
|
||
| /// The target host triple. | ||
| pub host: String, | ||
|
|
||
|
|
@@ -143,6 +148,7 @@ impl<'gctx> Compilation<'gctx> { | |
| root_crate_names: Vec::new(), | ||
| extra_env: HashMap::new(), | ||
| to_doc_test: Vec::new(), | ||
| doc_merge_info: Default::default(), | ||
| gctx: bcx.gctx, | ||
| host: bcx.host_triple().to_string(), | ||
| rustc_process, | ||
|
|
@@ -383,6 +389,25 @@ impl<'gctx> Compilation<'gctx> { | |
| } | ||
| } | ||
|
|
||
| /// Compilation information for running `rustdoc --merge=finalize`. | ||
| #[derive(Default)] | ||
| pub enum DocMergeInfo { | ||
| /// Doc merge disabled. | ||
| #[default] | ||
| None, | ||
|
Comment on lines
+395
to
+397
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
Member
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Or rename to |
||
| /// Nothing is stale. | ||
| Fresh, | ||
| /// Doc merge is required | ||
| Merge { | ||
| /// Number of crates to merge. | ||
| num_crates: u64, | ||
| /// Output directory holding every cross-crate info JSON file. | ||
| parts_dir: PathBuf, | ||
| /// Output directory for rustdoc. | ||
| out_dir: PathBuf, | ||
| }, | ||
| } | ||
|
|
||
| /// Prepares a `rustc_tool` process with additional environment variables | ||
| /// that are only relevant in a context that has a unit | ||
| fn fill_rustc_tool_env(mut cmd: ProcessBuilder, unit: &Unit) -> ProcessBuilder { | ||
|
|
||
Uh oh!
There was an error while loading. Please reload this page.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
FileFlavor::DocPartsis for filtering purpose, though I am not sure what our design principle for for adding new FileFlavor.There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Side note: looks like we repeat this multiple times and feel like we should define what this is supposed to mean and turn it into a function
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Digging around the use of
FileFlavor, it sounds likeNormalis for final artifacts (bins, cdylibs, etc),Linkableis for rlibs, and the rest are different types of supporting file types. I guess it makes sense have a new type here.