diff --git a/src/cargo/ops/cargo_compile.rs b/src/cargo/ops/cargo_compile.rs index b14bb5d690f..981afe23580 100644 --- a/src/cargo/ops/cargo_compile.rs +++ b/src/cargo/ops/cargo_compile.rs @@ -507,7 +507,7 @@ pub fn create_bcx<'a, 'cfg>( // TODO: In theory, Cargo should also dedupe the roots, but I'm uncertain // what heuristics to use in that case. if build_config.mode == (CompileMode::Doc { deps: true }) { - remove_duplicate_doc(build_config, &mut unit_graph); + remove_duplicate_doc(build_config, &units, &mut unit_graph); } if build_config @@ -1508,14 +1508,11 @@ fn opt_patterns_and_names( /// - Different sources. See `collision_doc_sources` test. /// /// Ideally this would not be necessary. -fn remove_duplicate_doc(build_config: &BuildConfig, unit_graph: &mut UnitGraph) { - // NOTE: There is some risk that this can introduce problems because it - // may create orphans in the unit graph (parts of the tree get detached - // from the roots). I currently can't think of any ways this will cause a - // problem because all other parts of Cargo traverse the graph starting - // from the roots. Perhaps this should scan for detached units and remove - // them too? - // +fn remove_duplicate_doc( + build_config: &BuildConfig, + root_units: &[Unit], + unit_graph: &mut UnitGraph, +) { // First, create a mapping of crate_name -> Unit so we can see where the // duplicates are. let mut all_docs: HashMap> = HashMap::new(); @@ -1601,4 +1598,18 @@ fn remove_duplicate_doc(build_config: &BuildConfig, unit_graph: &mut UnitGraph) for unit_deps in unit_graph.values_mut() { unit_deps.retain(|unit_dep| !removed_units.contains(&unit_dep.unit)); } + // Remove any orphan units that were detached from the graph. + let mut visited = HashSet::new(); + fn visit(unit: &Unit, graph: &UnitGraph, visited: &mut HashSet) { + if !visited.insert(unit.clone()) { + return; + } + for dep in &graph[unit] { + visit(&dep.unit, graph, visited); + } + } + for unit in root_units { + visit(unit, unit_graph, &mut visited); + } + unit_graph.retain(|unit, _| visited.contains(unit)); } diff --git a/tests/testsuite/collisions.rs b/tests/testsuite/collisions.rs index 4315498169a..d7deebc175c 100644 --- a/tests/testsuite/collisions.rs +++ b/tests/testsuite/collisions.rs @@ -3,9 +3,8 @@ //! Ideally these should never happen, but I don't think we'll ever be able to //! prevent all collisions. -use cargo_test_support::basic_manifest; -use cargo_test_support::project; use cargo_test_support::registry::Package; +use cargo_test_support::{basic_manifest, cross_compile, project}; use std::env; #[cargo_test] @@ -431,3 +430,51 @@ the same path; see . ) .run(); } + +#[cargo_test] +fn collision_doc_target() { + // collision in doc with --target, doesn't fail due to orphans + if cross_compile::disabled() { + return; + } + + Package::new("orphaned", "1.0.0").publish(); + Package::new("bar", "1.0.0") + .dep("orphaned", "1.0") + .publish(); + Package::new("bar", "2.0.0").publish(); + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + + [dependencies] + bar2 = { version = "2.0", package="bar" } + bar = "1.0" + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("doc --target") + .arg(cross_compile::alternate()) + .with_stderr_unordered( + "\ +[UPDATING] [..] +[DOWNLOADING] crates ... +[DOWNLOADED] orphaned v1.0.0 [..] +[DOWNLOADED] bar v2.0.0 [..] +[DOWNLOADED] bar v1.0.0 [..] +[CHECKING] orphaned v1.0.0 +[DOCUMENTING] bar v2.0.0 +[CHECKING] bar v2.0.0 +[CHECKING] bar v1.0.0 +[DOCUMENTING] foo v0.1.0 [..] +[FINISHED] [..] +", + ) + .run(); +}