Skip to content

Commit

Permalink
Auto merge of #9525 - willcrichton:example-analyzer, r=alexcrichton
Browse files Browse the repository at this point in the history
Scrape code examples from examples/ directory for Rustdoc

Adds support for the functionality described in rust-lang/rfcs#3123

Matching changes to rustdoc are here: rust-lang/rust#85833
  • Loading branch information
bors committed Oct 28, 2021
2 parents 6c1bc24 + 33718c7 commit 0a98b1d
Show file tree
Hide file tree
Showing 16 changed files with 435 additions and 28 deletions.
37 changes: 36 additions & 1 deletion src/bin/cargo/commands/doc.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
use crate::command_prelude::*;

use cargo::ops::{self, DocOptions};
use anyhow::anyhow;
use cargo::ops::{self, CompileFilter, DocOptions, FilterRule, LibRule};

pub fn cli() -> App {
subcommand("doc")
Expand All @@ -19,6 +20,13 @@ pub fn cli() -> App {
)
.arg(opt("no-deps", "Don't build documentation for dependencies"))
.arg(opt("document-private-items", "Document private items"))
.arg(
opt(
"scrape-examples",
"Scrape examples to include as function documentation",
)
.value_name("FLAGS"),
)
.arg_jobs()
.arg_targets_lib_bin_example(
"Document only this package's library",
Expand Down Expand Up @@ -48,6 +56,33 @@ pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult {
args.compile_options(config, mode, Some(&ws), ProfileChecking::Custom)?;
compile_opts.rustdoc_document_private_items = args.is_present("document-private-items");

// TODO(wcrichto): move scrape example configuration into Cargo.toml before stabilization
// See: https://github.com/rust-lang/cargo/pull/9525#discussion_r728470927
compile_opts.rustdoc_scrape_examples = match args.value_of("scrape-examples") {
Some(s) => Some(match s {
"all" => CompileFilter::new_all_targets(),
"examples" => CompileFilter::new(
LibRule::False,
FilterRule::none(),
FilterRule::none(),
FilterRule::All,
FilterRule::none(),
),
_ => {
return Err(CliError::from(anyhow!(
r#"--scrape-examples must take "all" or "examples" as an argument"#
)));
}
}),
None => None,
};

if compile_opts.rustdoc_scrape_examples.is_some() {
config
.cli_unstable()
.fail_if_stable_opt("--scrape-examples", 9910)?;
}

let doc_opts = DocOptions {
open_result: args.is_present("open"),
compile_opts,
Expand Down
8 changes: 8 additions & 0 deletions src/cargo/core/compiler/build_config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -149,6 +149,8 @@ pub enum CompileMode {
Doc { deps: bool },
/// A target that will be tested with `rustdoc`.
Doctest,
/// An example or library that will be scraped for function calls by `rustdoc`.
Docscrape,
/// A marker for Units that represent the execution of a `build.rs` script.
RunCustomBuild,
}
Expand All @@ -166,6 +168,7 @@ impl ser::Serialize for CompileMode {
Bench => "bench".serialize(s),
Doc { .. } => "doc".serialize(s),
Doctest => "doctest".serialize(s),
Docscrape => "docscrape".serialize(s),
RunCustomBuild => "run-custom-build".serialize(s),
}
}
Expand All @@ -187,6 +190,11 @@ impl CompileMode {
self == CompileMode::Doctest
}

/// Returns `true` if this is scraping examples for documentation.
pub fn is_doc_scrape(self) -> bool {
self == CompileMode::Docscrape
}

/// Returns `true` if this is any type of test (test, benchmark, doc test, or
/// check test).
pub fn is_any_test(self) -> bool {
Expand Down
5 changes: 5 additions & 0 deletions src/cargo/core/compiler/build_context/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,9 @@ pub struct BuildContext<'a, 'cfg> {
/// The dependency graph of units to compile.
pub unit_graph: UnitGraph,

/// Reverse-dependencies of documented units, used by the rustdoc --scrape-examples flag.
pub scrape_units: Vec<Unit>,

/// The list of all kinds that are involved in this build
pub all_kinds: HashSet<CompileKind>,
}
Expand All @@ -61,6 +64,7 @@ impl<'a, 'cfg> BuildContext<'a, 'cfg> {
target_data: RustcTargetData<'cfg>,
roots: Vec<Unit>,
unit_graph: UnitGraph,
scrape_units: Vec<Unit>,
) -> CargoResult<BuildContext<'a, 'cfg>> {
let all_kinds = unit_graph
.keys()
Expand All @@ -79,6 +83,7 @@ impl<'a, 'cfg> BuildContext<'a, 'cfg> {
target_data,
roots,
unit_graph,
scrape_units,
all_kinds,
})
}
Expand Down
5 changes: 4 additions & 1 deletion src/cargo/core/compiler/build_context/target_info.rs
Original file line number Diff line number Diff line change
Expand Up @@ -452,7 +452,10 @@ impl TargetInfo {
}
}
CompileMode::Check { .. } => Ok((vec![FileType::new_rmeta()], Vec::new())),
CompileMode::Doc { .. } | CompileMode::Doctest | CompileMode::RunCustomBuild => {
CompileMode::Doc { .. }
| CompileMode::Doctest
| CompileMode::Docscrape
| CompileMode::RunCustomBuild => {
panic!("asked for rustc output for non-rustc mode")
}
}
Expand Down
15 changes: 14 additions & 1 deletion src/cargo/core/compiler/context/compilation_files.rs
Original file line number Diff line number Diff line change
Expand Up @@ -191,7 +191,9 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> {
/// Returns the directory where the artifacts for the given unit are
/// initially created.
pub fn out_dir(&self, unit: &Unit) -> PathBuf {
if unit.mode.is_doc() {
// Docscrape units need to have doc/ set as the out_dir so sources for reverse-dependencies
// will be put into doc/ and not into deps/ where the *.examples files are stored.
if unit.mode.is_doc() || unit.mode.is_doc_scrape() {
self.layout(unit.kind).doc().to_path_buf()
} else if unit.mode.is_doc_test() {
panic!("doc tests do not have an out dir");
Expand Down Expand Up @@ -417,6 +419,17 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> {
// but Cargo does not know about that.
vec![]
}
CompileMode::Docscrape => {
let path = self
.deps_dir(unit)
.join(format!("{}.examples", unit.buildkey()));
vec![OutputFile {
path,
hardlink: None,
export_path: None,
flavor: FileFlavor::Normal,
}]
}
CompileMode::Test
| CompileMode::Build
| CompileMode::Bench
Expand Down
42 changes: 42 additions & 0 deletions src/cargo/core/compiler/context/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,10 @@ pub struct Context<'a, 'cfg> {
/// compilation is happening (only object, only bitcode, both, etc), and is
/// precalculated early on.
pub lto: HashMap<Unit, Lto>,

/// Map of Doc/Docscrape units to metadata for their -Cmetadata flag.
/// See Context::find_metadata_units for more details.
pub metadata_for_doc_units: HashMap<Unit, Metadata>,
}

impl<'a, 'cfg> Context<'a, 'cfg> {
Expand Down Expand Up @@ -120,6 +124,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
rustc_clients: HashMap::new(),
pipelining,
lto: HashMap::new(),
metadata_for_doc_units: HashMap::new(),
})
}

Expand All @@ -134,6 +139,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
self.prepare()?;
custom_build::build_map(&mut self)?;
self.check_collisions()?;
self.compute_metadata_for_doc_units();

// We need to make sure that if there were any previous docs
// already compiled, they were compiled with the same Rustc version that we're currently
Expand Down Expand Up @@ -620,4 +626,40 @@ impl<'a, 'cfg> Context<'a, 'cfg> {

Ok(client)
}

/// Finds metadata for Doc/Docscrape units.
///
/// rustdoc needs a -Cmetadata flag in order to recognize StableCrateIds that refer to
/// items in the crate being documented. The -Cmetadata flag used by reverse-dependencies
/// will be the metadata of the Cargo unit that generated the current library's rmeta file,
/// which should be a Check unit.
///
/// If the current crate has reverse-dependencies, such a Check unit should exist, and so
/// we use that crate's metadata. If not, we use the crate's Doc unit so at least examples
/// scraped from the current crate can be used when documenting the current crate.
pub fn compute_metadata_for_doc_units(&mut self) {
for unit in self.bcx.unit_graph.keys() {
if !unit.mode.is_doc() && !unit.mode.is_doc_scrape() {
continue;
}

let matching_units = self
.bcx
.unit_graph
.keys()
.filter(|other| {
unit.pkg == other.pkg
&& unit.target == other.target
&& !other.mode.is_doc_scrape()
})
.collect::<Vec<_>>();
let metadata_unit = matching_units
.iter()
.find(|other| other.mode.is_check())
.or_else(|| matching_units.iter().find(|other| other.mode.is_doc()))
.unwrap_or(&unit);
self.metadata_for_doc_units
.insert(unit.clone(), self.files().metadata(metadata_unit));
}
}
}
38 changes: 37 additions & 1 deletion src/cargo/core/compiler/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,7 @@ fn compile<'cfg>(
let force = exec.force_rebuild(unit) || force_rebuild;
let mut job = fingerprint::prepare_target(cx, unit, force)?;
job.before(if job.freshness() == Freshness::Dirty {
let work = if unit.mode.is_doc() {
let work = if unit.mode.is_doc() || unit.mode.is_doc_scrape() {
rustdoc(cx, unit)?
} else {
rustc(cx, unit, exec)?
Expand Down Expand Up @@ -647,6 +647,42 @@ fn rustdoc(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult<Work> {
rustdoc.args(args);
}

let metadata = cx.metadata_for_doc_units[&unit];
rustdoc.arg("-C").arg(format!("metadata={}", metadata));

let scrape_output_path = |unit: &Unit| -> CargoResult<PathBuf> {
let output_dir = cx.files().deps_dir(unit);
Ok(output_dir.join(format!("{}.examples", unit.buildkey())))
};

if unit.mode.is_doc_scrape() {
debug_assert!(cx.bcx.scrape_units.contains(unit));

rustdoc.arg("-Zunstable-options");

rustdoc
.arg("--scrape-examples-output-path")
.arg(scrape_output_path(unit)?);

// Only scrape example for items from crates in the workspace, to reduce generated file size
for pkg in cx.bcx.ws.members() {
rustdoc
.arg("--scrape-examples-target-crate")
.arg(pkg.name());
}
} else if cx.bcx.scrape_units.len() > 0 && cx.bcx.ws.is_member(&unit.pkg) {
// We only pass scraped examples to packages in the workspace
// since examples are only coming from reverse-dependencies of workspace packages

rustdoc.arg("-Zunstable-options");

for scrape_unit in &cx.bcx.scrape_units {
rustdoc
.arg("--with-examples")
.arg(scrape_output_path(scrape_unit)?);
}
}

build_deps_args(&mut rustdoc, cx, unit)?;
rustdoc::add_root_urls(cx, unit, &mut rustdoc)?;

Expand Down
1 change: 1 addition & 0 deletions src/cargo/core/compiler/timings.rs
Original file line number Diff line number Diff line change
Expand Up @@ -176,6 +176,7 @@ impl<'cfg> Timings<'cfg> {
CompileMode::Bench => target.push_str(" (bench)"),
CompileMode::Doc { .. } => target.push_str(" (doc)"),
CompileMode::Doctest => target.push_str(" (doc test)"),
CompileMode::Docscrape => target.push_str(" (doc scrape)"),
CompileMode::RunCustomBuild => target.push_str(" (run)"),
}
let unit_time = UnitTime {
Expand Down
33 changes: 32 additions & 1 deletion src/cargo/core/compiler/unit_dependencies.rs
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@ struct State<'a, 'cfg> {
target_data: &'a RustcTargetData<'cfg>,
profiles: &'a Profiles,
interner: &'a UnitInterner,
scrape_units: &'a [Unit],

/// A set of edges in `unit_dependencies` where (a, b) means that the
/// dependency from a to b was added purely because it was a dev-dependency.
Expand All @@ -61,6 +62,7 @@ pub fn build_unit_dependencies<'a, 'cfg>(
features: &'a ResolvedFeatures,
std_resolve: Option<&'a (Resolve, ResolvedFeatures)>,
roots: &[Unit],
scrape_units: &[Unit],
std_roots: &HashMap<CompileKind, Vec<Unit>>,
global_mode: CompileMode,
target_data: &'a RustcTargetData<'cfg>,
Expand Down Expand Up @@ -91,6 +93,7 @@ pub fn build_unit_dependencies<'a, 'cfg>(
target_data,
profiles,
interner,
scrape_units,
dev_dependency_edges: HashSet::new(),
};

Expand Down Expand Up @@ -253,6 +256,7 @@ fn compute_deps(
if !dep.is_transitive()
&& !unit.target.is_test()
&& !unit.target.is_example()
&& !unit.mode.is_doc_scrape()
&& !unit.mode.is_any_test()
{
return false;
Expand Down Expand Up @@ -467,6 +471,25 @@ fn compute_deps_doc(
if unit.target.is_bin() || unit.target.is_example() {
ret.extend(maybe_lib(unit, state, unit_for)?);
}

// Add all units being scraped for examples as a dependency of Doc units.
if state.ws.is_member(&unit.pkg) {
for scrape_unit in state.scrape_units.iter() {
// This needs to match the FeaturesFor used in cargo_compile::generate_targets.
let unit_for = UnitFor::new_host(scrape_unit.target.proc_macro());
deps_of(scrape_unit, state, unit_for)?;
ret.push(new_unit_dep(
state,
scrape_unit,
&scrape_unit.pkg,
&scrape_unit.target,
unit_for,
scrape_unit.kind,
scrape_unit.mode,
)?);
}
}

Ok(ret)
}

Expand Down Expand Up @@ -558,7 +581,7 @@ fn dep_build_script(
/// Choose the correct mode for dependencies.
fn check_or_build_mode(mode: CompileMode, target: &Target) -> CompileMode {
match mode {
CompileMode::Check { .. } | CompileMode::Doc { .. } => {
CompileMode::Check { .. } | CompileMode::Doc { .. } | CompileMode::Docscrape => {
if target.for_host() {
// Plugin and proc macro targets should be compiled like
// normal.
Expand Down Expand Up @@ -695,6 +718,14 @@ fn connect_run_custom_build_deps(state: &mut State<'_, '_>) {
&& other.unit.target.is_linkable()
&& other.unit.pkg.manifest().links().is_some()
})
// Avoid cycles when using the doc --scrape-examples feature:
// Say a workspace has crates A and B where A has a build-dependency on B.
// The Doc units for A and B will have a dependency on the Docscrape for both A and B.
// So this would add a dependency from B-build to A-build, causing a cycle:
// B (build) -> A (build) -> B(build)
// See the test scrape_examples_avoid_build_script_cycle for a concrete example.
// To avoid this cycle, we filter out the B -> A (docscrape) dependency.
.filter(|(_parent, other)| !other.unit.mode.is_doc_scrape())
// Skip dependencies induced via dev-dependencies since
// connections between `links` and build scripts only happens
// via normal dependencies. Otherwise since dev-dependencies can
Expand Down
4 changes: 3 additions & 1 deletion src/cargo/core/profiles.rs
Original file line number Diff line number Diff line change
Expand Up @@ -323,7 +323,9 @@ impl Profiles {
(InternedString::new("dev"), None)
}
}
CompileMode::Doc { .. } => (InternedString::new("doc"), None),
CompileMode::Doc { .. } | CompileMode::Docscrape => {
(InternedString::new("doc"), None)
}
}
} else {
(self.requested_profile, None)
Expand Down

0 comments on commit 0a98b1d

Please sign in to comment.