From cdc69166ba15213d4e03bb85fc101b36d55d69a0 Mon Sep 17 00:00:00 2001 From: Alex Crichton Date: Thu, 16 Oct 2014 11:19:28 -0700 Subject: [PATCH 1/9] Update semver to have `^` be the default operator --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 010c4bce5b5..4dd9d66df1a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8,7 +8,7 @@ dependencies = [ "git2 0.0.1 (git+https://github.com/alexcrichton/git2-rs#c01b0b279470552c48cf7f902ae5baf132df0a6d)", "glob 0.0.1 (git+https://github.com/rust-lang/glob#469a6bc1a0fc289ab220170e691cffbc01dcf1e6)", "hamcrest 0.1.0 (git+https://github.com/carllerche/hamcrest-rust.git#7d46e76514ac606530dfb0e93270fffcf64ca76d)", - "semver 0.1.0 (git+https://github.com/rust-lang/semver#5dee918159d4e3d84c1ee54cf6a5305e6f7bd557)", + "semver 0.1.0 (git+https://github.com/rust-lang/semver#9bb8265ea6cf01eddfa7dc5ec9334883443e9fc7)", "tar 0.0.1 (git+https://github.com/alexcrichton/tar-rs#943d7c0173c7fa5e8c58978add0180569c68d7f7)", "toml 0.1.0 (git+https://github.com/alexcrichton/toml-rs#8a3ba4c65cfa22a3d924293a1fb3a70bfac5e66c)", "url 0.1.0 (git+https://github.com/servo/rust-url#7f1991d847fb8cf8648def2ff121bae90b89131f)", @@ -91,7 +91,7 @@ source = "git+https://github.com/alexcrichton/openssl-static-sys#d218fa63aabefb3 [[package]] name = "semver" version = "0.1.0" -source = "git+https://github.com/rust-lang/semver#5dee918159d4e3d84c1ee54cf6a5305e6f7bd557" +source = "git+https://github.com/rust-lang/semver#9bb8265ea6cf01eddfa7dc5ec9334883443e9fc7" [[package]] name = "tar" From d51fe548dd9a36994a19f455e250d6b199b2e788 Mon Sep 17 00:00:00 2001 From: Alex Crichton Date: Thu, 16 Oct 2014 11:25:05 -0700 Subject: [PATCH 2/9] Split out encoding from `core::resolver` to a submodule Same exported hierarchy, just some internal orgainzational changes. --- src/cargo/core/resolver/encode.rs | 179 +++++++++++++++++ .../core/{resolver.rs => resolver/mod.rs} | 185 +----------------- 2 files changed, 189 insertions(+), 175 deletions(-) create mode 100644 src/cargo/core/resolver/encode.rs rename src/cargo/core/{resolver.rs => resolver/mod.rs} (80%) diff --git a/src/cargo/core/resolver/encode.rs b/src/cargo/core/resolver/encode.rs new file mode 100644 index 00000000000..2eab82fe3c1 --- /dev/null +++ b/src/cargo/core/resolver/encode.rs @@ -0,0 +1,179 @@ +use std::collections::HashMap; + +use regex::Regex; +use serialize::{Encodable, Encoder, Decodable, Decoder}; + +use core::{PackageId, SourceId}; +use util::{CargoResult, Graph}; + +use super::Resolve; + +#[deriving(Encodable, Decodable, Show)] +pub struct EncodableResolve { + package: Option>, + root: EncodableDependency +} + +impl EncodableResolve { + pub fn to_resolve(&self, default: &SourceId) -> CargoResult { + let mut g = Graph::new(); + + try!(add_pkg_to_graph(&mut g, &self.root, default)); + + match self.package { + Some(ref packages) => { + for dep in packages.iter() { + try!(add_pkg_to_graph(&mut g, dep, default)); + } + } + None => {} + } + + let root = self.root.to_package_id(default); + Ok(Resolve { graph: g, root: try!(root), features: HashMap::new() }) + } +} + +fn add_pkg_to_graph(g: &mut Graph, + dep: &EncodableDependency, + default: &SourceId) + -> CargoResult<()> +{ + let package_id = try!(dep.to_package_id(default)); + g.add(package_id.clone(), []); + + match dep.dependencies { + Some(ref deps) => { + for edge in deps.iter() { + g.link(package_id.clone(), try!(edge.to_package_id(default))); + } + }, + _ => () + }; + + Ok(()) +} + +#[deriving(Encodable, Decodable, Show, PartialOrd, Ord, PartialEq, Eq)] +pub struct EncodableDependency { + name: String, + version: String, + source: Option, + dependencies: Option> +} + +impl EncodableDependency { + fn to_package_id(&self, default_source: &SourceId) -> CargoResult { + PackageId::new( + self.name.as_slice(), + self.version.as_slice(), + self.source.as_ref().unwrap_or(default_source)) + } +} + +#[deriving(Show, PartialOrd, Ord, PartialEq, Eq)] +pub struct EncodablePackageId { + name: String, + version: String, + source: Option +} + +impl> Encodable for EncodablePackageId { + fn encode(&self, s: &mut S) -> Result<(), E> { + let mut out = format!("{} {}", self.name, self.version); + if let Some(ref s) = self.source { + out.push_str(format!(" ({})", s.to_url()).as_slice()); + } + out.encode(s) + } +} + +impl> Decodable for EncodablePackageId { + fn decode(d: &mut D) -> Result { + let string: String = raw_try!(Decodable::decode(d)); + let regex = Regex::new(r"^([^ ]+) ([^ ]+)(?: \(([^\)]+)\))?$").unwrap(); + let captures = regex.captures(string.as_slice()) + .expect("invalid serialized PackageId"); + + let name = captures.at(1); + let version = captures.at(2); + + let source = captures.at(3); + + let source_id = if source == "" { + None + } else { + Some(SourceId::from_url(source.to_string())) + }; + + Ok(EncodablePackageId { + name: name.to_string(), + version: version.to_string(), + source: source_id + }) + } +} + +impl EncodablePackageId { + fn to_package_id(&self, default_source: &SourceId) -> CargoResult { + PackageId::new( + self.name.as_slice(), + self.version.as_slice(), + self.source.as_ref().unwrap_or(default_source)) + } +} + +impl> Encodable for Resolve { + fn encode(&self, s: &mut S) -> Result<(), E> { + let mut ids: Vec<&PackageId> = self.graph.iter().collect(); + ids.sort(); + + let encodable = ids.iter().filter_map(|&id| { + if self.root == *id { return None; } + + Some(encodable_resolve_node(id, &self.root, &self.graph)) + }).collect::>(); + + EncodableResolve { + package: Some(encodable), + root: encodable_resolve_node(&self.root, &self.root, &self.graph) + }.encode(s) + } +} + +fn encodable_resolve_node(id: &PackageId, root: &PackageId, + graph: &Graph) -> EncodableDependency { + let deps = graph.edges(id).map(|edge| { + let mut deps = edge.map(|e| { + encodable_package_id(e, root) + }).collect::>(); + deps.sort(); + deps + }); + + let source = if id.get_source_id() == root.get_source_id() { + None + } else { + Some(id.get_source_id().clone()) + }; + + EncodableDependency { + name: id.get_name().to_string(), + version: id.get_version().to_string(), + source: source, + dependencies: deps, + } +} + +fn encodable_package_id(id: &PackageId, root: &PackageId) -> EncodablePackageId { + let source = if id.get_source_id() == root.get_source_id() { + None + } else { + Some(id.get_source_id().clone()) + }; + EncodablePackageId { + name: id.get_name().to_string(), + version: id.get_version().to_string(), + source: source, + } +} diff --git a/src/cargo/core/resolver.rs b/src/cargo/core/resolver/mod.rs similarity index 80% rename from src/cargo/core/resolver.rs rename to src/cargo/core/resolver/mod.rs index 2e846e3151b..dbec2bf6b76 100644 --- a/src/cargo/core/resolver.rs +++ b/src/cargo/core/resolver/mod.rs @@ -1,17 +1,21 @@ use std::collections::hashmap::{HashMap, HashSet, Occupied, Vacant}; use std::fmt; -use regex::Regex; -use semver; -use serialize::{Encodable, Encoder, Decodable, Decoder}; - use core::{PackageId, Registry, SourceId, Summary, Dependency}; use core::PackageIdSpec; use util::{CargoResult, Graph, human, internal, ChainError}; use util::profile; use util::graph::{Nodes, Edges}; -/// Result of the `resolve` function. +pub use self::encode::{EncodableResolve, EncodableDependency, EncodablePackageId}; + +mod encode; + +/// Represents a fully resolved package dependency graph. Each node in the graph +/// is a package and edges represent dependencies between packages. +/// +/// Each instance of `Resolve` also understands the full set of features used +/// for each package as well as what the root package is. #[deriving(PartialEq, Eq)] pub struct Resolve { graph: Graph, @@ -26,176 +30,6 @@ pub enum ResolveMethod<'a> { /* uses_default_features = */ bool), } -#[deriving(Encodable, Decodable, Show)] -pub struct EncodableResolve { - package: Option>, - root: EncodableDependency -} - -impl EncodableResolve { - pub fn to_resolve(&self, default: &SourceId) -> CargoResult { - let mut g = Graph::new(); - - try!(add_pkg_to_graph(&mut g, &self.root, default)); - - match self.package { - Some(ref packages) => { - for dep in packages.iter() { - try!(add_pkg_to_graph(&mut g, dep, default)); - } - } - None => {} - } - - let root = self.root.to_package_id(default); - Ok(Resolve { graph: g, root: try!(root), features: HashMap::new() }) - } -} - -fn add_pkg_to_graph(g: &mut Graph, - dep: &EncodableDependency, - default: &SourceId) - -> CargoResult<()> -{ - let package_id = try!(dep.to_package_id(default)); - g.add(package_id.clone(), []); - - match dep.dependencies { - Some(ref deps) => { - for edge in deps.iter() { - g.link(package_id.clone(), try!(edge.to_package_id(default))); - } - }, - _ => () - }; - - Ok(()) -} - -#[deriving(Encodable, Decodable, Show, PartialOrd, Ord, PartialEq, Eq)] -pub struct EncodableDependency { - name: String, - version: String, - source: Option, - dependencies: Option> -} - -impl EncodableDependency { - fn to_package_id(&self, default_source: &SourceId) -> CargoResult { - PackageId::new( - self.name.as_slice(), - self.version.as_slice(), - self.source.as_ref().unwrap_or(default_source)) - } -} - -#[deriving(Show, PartialOrd, Ord, PartialEq, Eq)] -pub struct EncodablePackageId { - name: String, - version: String, - source: Option -} - -impl> Encodable for EncodablePackageId { - fn encode(&self, s: &mut S) -> Result<(), E> { - let mut out = format!("{} {}", self.name, self.version); - if let Some(ref s) = self.source { - out.push_str(format!(" ({})", s.to_url()).as_slice()); - } - out.encode(s) - } -} - -impl> Decodable for EncodablePackageId { - fn decode(d: &mut D) -> Result { - let string: String = raw_try!(Decodable::decode(d)); - let regex = Regex::new(r"^([^ ]+) ([^ ]+)(?: \(([^\)]+)\))?$").unwrap(); - let captures = regex.captures(string.as_slice()) - .expect("invalid serialized PackageId"); - - let name = captures.at(1); - let version = captures.at(2); - - let source = captures.at(3); - - let source_id = if source == "" { - None - } else { - Some(SourceId::from_url(source.to_string())) - }; - - Ok(EncodablePackageId { - name: name.to_string(), - version: version.to_string(), - source: source_id - }) - } -} - -impl EncodablePackageId { - fn to_package_id(&self, default_source: &SourceId) -> CargoResult { - PackageId::new( - self.name.as_slice(), - self.version.as_slice(), - self.source.as_ref().unwrap_or(default_source)) - } -} - -impl> Encodable for Resolve { - fn encode(&self, s: &mut S) -> Result<(), E> { - let mut ids: Vec<&PackageId> = self.graph.iter().collect(); - ids.sort(); - - let encodable = ids.iter().filter_map(|&id| { - if self.root == *id { return None; } - - Some(encodable_resolve_node(id, &self.root, &self.graph)) - }).collect::>(); - - EncodableResolve { - package: Some(encodable), - root: encodable_resolve_node(&self.root, &self.root, &self.graph) - }.encode(s) - } -} - -fn encodable_resolve_node(id: &PackageId, root: &PackageId, - graph: &Graph) -> EncodableDependency { - let deps = graph.edges(id).map(|edge| { - let mut deps = edge.map(|e| { - encodable_package_id(e, root) - }).collect::>(); - deps.sort(); - deps - }); - - let source = if id.get_source_id() == root.get_source_id() { - None - } else { - Some(id.get_source_id().clone()) - }; - - EncodableDependency { - name: id.get_name().to_string(), - version: id.get_version().to_string(), - source: source, - dependencies: deps, - } -} - -fn encodable_package_id(id: &PackageId, root: &PackageId) -> EncodablePackageId { - let source = if id.get_source_id() == root.get_source_id() { - None - } else { - Some(id.get_source_id().clone()) - }; - EncodablePackageId { - name: id.get_name().to_string(), - version: id.get_version().to_string(), - source: source, - } -} - impl Resolve { fn new(root: PackageId) -> Resolve { let mut g = Graph::new(); @@ -739,3 +573,4 @@ mod test { assert_that(&res, contains(names(["root", "foo", "bar", "baz"]))); } } + From 5851827379a4bf32caf43af3ad8505c3d18b133c Mon Sep 17 00:00:00 2001 From: Alex Crichton Date: Fri, 17 Oct 2014 08:17:17 -0700 Subject: [PATCH 3/9] Implement resolution of version requirements This commit extends the support in cargo's resolver to start resolving packages with multiple versions as well as package requirements. This is a crucial step forward when impelmenting the cargo registry as multiple versions will be uploaded to the registry quite quickly! This implements a fairly naive solution which should at least help cargo get out the gates initially. This impelments a depth-first-search of the pacakage dependency graph with a few sorting heuristics along the way to help out resolution as it goes along. Resolution errors will likely improve over time, but this commit does make an effort to try to get some good error messages right off the bat. --- Cargo.toml | 3 + Makefile.in | 2 +- src/cargo/core/resolver/mod.rs | 489 +++++++++++++-------------------- src/cargo/core/summary.rs | 8 +- src/cargo/util/graph.rs | 6 + tests/resolve.rs | 357 ++++++++++++++++++++++++ tests/test_cargo_compile.rs | 10 +- tests/test_cargo_registry.rs | 12 +- 8 files changed, 575 insertions(+), 312 deletions(-) create mode 100644 tests/resolve.rs diff --git a/Cargo.toml b/Cargo.toml index b6e0d6e9d32..a397a6a6696 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -46,3 +46,6 @@ doc = false [[test]] name = "tests" + +[[test]] +name = "resolve" diff --git a/Makefile.in b/Makefile.in index 73aedcaf2c0..4b09b9ec7c4 100644 --- a/Makefile.in +++ b/Makefile.in @@ -89,7 +89,7 @@ style: sh tests/check-style.sh no-exes: - find $$(git ls-files | grep -v ttf) -perm +111 -type f \ + find $$(git ls-files) -perm +111 -type f \ -not -name configure -not -name '*.sh' -not -name '*.rs' | \ grep '.*' \ && exit 1 || exit 0 diff --git a/src/cargo/core/resolver/mod.rs b/src/cargo/core/resolver/mod.rs index dbec2bf6b76..f400ccf90bb 100644 --- a/src/cargo/core/resolver/mod.rs +++ b/src/cargo/core/resolver/mod.rs @@ -3,7 +3,7 @@ use std::fmt; use core::{PackageId, Registry, SourceId, Summary, Dependency}; use core::PackageIdSpec; -use util::{CargoResult, Graph, human, internal, ChainError}; +use util::{CargoResult, Graph, human, ChainError}; use util::profile; use util::graph::{Nodes, Edges}; @@ -16,7 +16,7 @@ mod encode; /// /// Each instance of `Resolve` also understands the full set of features used /// for each package as well as what the root package is. -#[deriving(PartialEq, Eq)] +#[deriving(PartialEq, Eq, Clone)] pub struct Resolve { graph: Graph, features: HashMap>, @@ -109,133 +109,214 @@ impl fmt::Show for Resolve { } } -struct Context<'a, R:'a> { - registry: &'a mut R, +#[deriving(Clone)] +struct Context { + activations: HashMap<(String, SourceId), Vec>, resolve: Resolve, - // cycle detection visited: HashSet, - - // Try not to re-resolve too much - resolved: HashMap>, - - // Eventually, we will have smarter logic for checking for conflicts in the - // resolve, but without the registry, conflicts should not exist in - // practice, so this is just a sanity check. - seen: HashMap<(String, SourceId), semver::Version>, -} - -impl<'a, R: Registry> Context<'a, R> { - fn new(registry: &'a mut R, root: PackageId) -> Context<'a, R> { - Context { - registry: registry, - resolve: Resolve::new(root), - seen: HashMap::new(), - visited: HashSet::new(), - resolved: HashMap::new(), - } - } } /// Builds the list of all packages required to build the first argument. pub fn resolve(summary: &Summary, method: ResolveMethod, registry: &mut R) -> CargoResult { log!(5, "resolve; summary={}", summary); + + let mut cx = Context { + resolve: Resolve::new(summary.get_package_id().clone()), + activations: HashMap::new(), + visited: HashSet::new(), + }; let _p = profile::start(format!("resolving: {}", summary)); + cx.activations.insert((summary.get_name().to_string(), + summary.get_source_id().clone()), + vec![summary.clone()]); + match try!(activate(cx, registry, summary, method)) { + Ok(cx) => Ok(cx.resolve), + Err(e) => Err(e), + } +} - let mut context = Context::new(registry, summary.get_package_id().clone()); - context.seen.insert((summary.get_name().to_string(), - summary.get_source_id().clone()), - summary.get_version().clone()); - try!(resolve_deps(summary, method, &mut context)); - log!(5, " result={}", context.resolve); - Ok(context.resolve) +fn activate(mut cx: Context, + registry: &mut R, + parent: &Summary, + method: ResolveMethod) + -> CargoResult> { + // First, figure out our set of dependencies based on the requsted set of + // features. This also calculates what features we're going to enable for + // our own dependencies. + let deps = try!(resolve_features(&mut cx, parent, method)); + + // Next, transform all dependencies into a list of possible candidates which + // can satisfy that dependency. + let mut deps = try!(deps.into_iter().map(|(_dep_name, (dep, features))| { + let mut candidates = try!(registry.query(dep)); + // When we attempt versions for a package, we'll want to start at the + // maximum version and work our way down. + candidates.as_mut_slice().sort_by(|a, b| { + b.get_version().cmp(a.get_version()) + }); + Ok((dep, candidates, features)) + }).collect::>>()); + + // When we recurse, attempt to resolve dependencies with fewer candidates + // before recursing on dependencies with more candidates. This way if the + // dependency with only one candidate can't be resolved we don't have to do + // a bunch of work before we figure that out. + deps.as_mut_slice().sort_by(|&(_, ref a, _), &(_, ref b, _)| { + a.len().cmp(&b.len()) + }); + + activate_deps(cx, registry, parent, deps.as_slice(), 0) } -fn resolve_deps<'a, R: Registry>(parent: &Summary, - method: ResolveMethod, - ctx: &mut Context<'a, R>) - -> CargoResult<()> { - let (deps, features) = try!(resolve_features(parent, method, ctx)); +fn activate_deps(cx: Context, + registry: &mut R, + parent: &Summary, + deps: &[(&Dependency, Vec, Vec)], + cur: uint) -> CargoResult> { + if cur == deps.len() { return Ok(Ok(cx)) } + let (dep, ref candidates, ref features) = deps[cur]; + let method = ResolveRequired(false, features.as_slice(), + dep.uses_default_features()); + + let key = (dep.get_name().to_string(), dep.get_source_id().clone()); + let prev_active = cx.activations.find(&key) + .map(|v| v.as_slice()).unwrap_or(&[]); + log!(5, "{}[{}]>{} {} candidates", parent.get_name(), cur, dep.get_name(), + candidates.len()); + log!(5, "{}[{}]>{} {} prev activations", parent.get_name(), cur, + dep.get_name(), prev_active.len()); + + // Filter the set of candidates based on the previously activated + // versions for this dependency. We can actually use a version if it + // precisely matches an activated version or if it is otherwise + // incompatible with all other activated versions. Note that we define + // "compatible" here in terms of the semver sense where if the left-most + // nonzero digit is the same they're considered compatible. + let mut my_candidates = candidates.iter().filter(|&b| { + prev_active.iter().any(|a| a == b) || + prev_active.iter().all(|a| { + !compatible(a.get_version(), b.get_version()) + }) + }); + + // Alright, for each candidate that's gotten this far, it meets the + // following requirements: + // + // 1. The version matches the dependency requirement listed for this + // package + // 2. There are no activated versions for this package which are + // semver-compatible, or there's an activated version which is + // precisely equal to `candidate`. + // + // This means that we're going to attempt to activate each candidate in + // turn. We could possibly fail to activate each candidate, so we try + // each one in turn. + let mut last_err = None; + for candidate in my_candidates { + log!(5, "{}[{}]>{} trying {}", parent.get_name(), cur, dep.get_name(), + candidate.get_version()); + let mut my_cx = cx.clone(); + { + my_cx.resolve.graph.link(parent.get_package_id().clone(), + candidate.get_package_id().clone()); + let prev = match my_cx.activations.entry(key.clone()) { + Occupied(e) => e.into_mut(), + Vacant(e) => e.set(Vec::new()), + }; + if !prev.iter().any(|c| c == candidate) { + my_cx.resolve.graph.add(candidate.get_package_id().clone(), []); + prev.push(candidate.clone()); - // Recursively resolve all dependencies - for &dep in deps.iter() { - if !match ctx.resolved.entry(parent.get_package_id().clone()) { - Occupied(entry) => entry.into_mut(), - Vacant(entry) => entry.set(HashSet::new()), - }.insert(dep.get_name().to_string()) { - continue - } - - let pkgs = try!(ctx.registry.query(dep)); - if pkgs.is_empty() { - return Err(human(format!("No package named `{}` found \ - (required by `{}`).\n\ - Location searched: {}\n\ - Version required: {}", - dep.get_name(), - parent.get_package_id().get_name(), - dep.get_source_id(), - dep.get_version_req()))); - } else if pkgs.len() > 1 { - return Err(internal(format!("At the moment, Cargo only supports a \ - single source for a particular package name ({}).", dep))); - } + } - let summary = &pkgs[0]; - let name = summary.get_name().to_string(); - let source_id = summary.get_source_id().clone(); - let version = summary.get_version(); - - ctx.resolve.graph.link(parent.get_package_id().clone(), - summary.get_package_id().clone()); - - let found = match ctx.seen.find(&(name.clone(), source_id.clone())) { - Some(v) if v == version => true, - Some(..) => { - return Err(human(format!("Cargo found multiple copies of {} in \ - {}. This is not currently supported", - summary.get_name(), - summary.get_source_id()))); + // Dependency graphs are required to be a DAG. Non-transitive + // dependencies (dev-deps), however, can never introduce a cycle, so we + // skip them. + if dep.is_transitive() && + !my_cx.visited.insert(candidate.get_package_id().clone()) { + return Err(human(format!("cyclic package dependency: package `{}` \ + depends on itself", + candidate.get_package_id()))) } - None => false, + } + let mut my_cx = match try!(activate(my_cx, registry, candidate, method)) { + Ok(cx) => cx, + Err(e) => { last_err = Some(e); continue } }; - if !found { - ctx.seen.insert((name, source_id), version.clone()); - ctx.resolve.graph.add(summary.get_package_id().clone(), []); + if dep.is_transitive() { + my_cx.visited.remove(candidate.get_package_id()); } - - // Dependency graphs are required to be a DAG. Non-transitive - // dependencies (dev-deps), however, can never introduce a cycle, so we - // skip them. - if dep.is_transitive() && - !ctx.visited.insert(summary.get_package_id().clone()) { - return Err(human(format!("Cyclic package dependency: package `{}` \ - depends on itself", - summary.get_package_id()))) + match try!(activate_deps(my_cx, registry, parent, deps, cur + 1)) { + Ok(cx) => return Ok(Ok(cx)), + Err(e) => { last_err = Some(e); } } + } + log!(5, "{}[{}]>{} -- {}", parent.get_name(), cur, dep.get_name(), last_err); + + // Oh well, we couldn't activate any of the candidates, so we just can't + // activate this dependency at all + Ok(match last_err { + Some(e) => Err(e), + None if candidates.len() > 0 => { + let mut msg = format!("failed to select a version for `{}` \ + (required by `{}`):\n\ + all possible versions conflict with \ + previously selected versions of `{}`", + dep.get_name(), parent.get_name(), + dep.get_name()); + 'outer: for v in prev_active.iter() { + for node in cx.resolve.graph.iter() { + let mut edges = match cx.resolve.graph.edges(node) { + Some(edges) => edges, + None => continue, + }; + for edge in edges { + if edge != v.get_package_id() { continue } + + msg.push_str(format!("\n version {} in use by {}", + v.get_version(), edge).as_slice()); + continue 'outer; + } + } + msg.push_str(format!("\n version {} in use by ??", + v.get_version()).as_slice()); + } - // The list of enabled features for this dependency are both those - // listed in the dependency itself as well as any of our own features - // which enabled a feature of this package. - let features = features.find_equiv(&dep.get_name()) - .map(|v| v.as_slice()) - .unwrap_or(&[]); - try!(resolve_deps(summary, - ResolveRequired(false, features, - dep.uses_default_features()), - ctx)); - if dep.is_transitive() { - ctx.visited.remove(summary.get_package_id()); + msg.push_str(format!("\n possible versions to select: {}", + candidates.iter().map(|v| v.get_version()) + .collect::>()).as_slice()); + + Err(human(msg)) } - } + None => { + Err(human(format!("no package named `{}` found (required by `{}`)\n\ + location searched: {}\n\ + version required: {}", + dep.get_name(), parent.get_name(), + dep.get_source_id(), + dep.get_version_req()))) + } + }) +} - Ok(()) +// Returns if `a` and `b` are compatible in the semver sense. This is a +// commutative operation. +// +// Versions `a` and `b` are compatible if their left-most nonzero digit is the +// same. +fn compatible(a: &semver::Version, b: &semver::Version) -> bool { + if a.major != b.major { return false } + if a.major != 0 { return true } + if a.minor != b.minor { return false } + if a.minor != 0 { return true } + a.patch == b.patch } -fn resolve_features<'a, R>(parent: &'a Summary, method: ResolveMethod, - ctx: &mut Context) - -> CargoResult<(Vec<&'a Dependency>, - HashMap<&'a str, Vec>)> { +fn resolve_features<'a>(cx: &mut Context, parent: &'a Summary, + method: ResolveMethod) + -> CargoResult)>> { let dev_deps = match method { ResolveEverything => true, ResolveRequired(dev_deps, _, _) => dev_deps, @@ -266,7 +347,7 @@ fn resolve_features<'a, R>(parent: &'a Summary, method: ResolveMethod, feature))); } } - ret.insert(dep.get_name(), base); + ret.insert(dep.get_name(), (*dep, base)); } // All features can only point to optional dependencies, in which case they @@ -286,13 +367,13 @@ fn resolve_features<'a, R>(parent: &'a Summary, method: ResolveMethod, // Record what list of features is active for this package. { let pkgid = parent.get_package_id().clone(); - match ctx.resolve.features.entry(pkgid) { + match cx.resolve.features.entry(pkgid) { Occupied(entry) => entry.into_mut(), Vacant(entry) => entry.set(HashSet::new()), }.extend(used_features.into_iter()); } - Ok((deps, ret)) + Ok(ret) } // Returns a pair of (feature dependencies, all used features) @@ -384,193 +465,3 @@ fn build_features(s: &Summary, method: ResolveMethod) Ok(()) } } - -#[cfg(test)] -mod test { - use std::collections::HashMap; - - use hamcrest::{assert_that, equal_to, contains}; - - use core::source::{SourceId, RegistryKind, GitKind}; - use core::{Dependency, PackageId, Summary, Registry}; - use util::{CargoResult, ToUrl}; - - fn resolve(pkg: PackageId, deps: Vec, - registry: &mut R) - -> CargoResult> { - let summary = Summary::new(pkg, deps, HashMap::new()).unwrap(); - let method = super::ResolveEverything; - Ok(try!(super::resolve(&summary, method, - registry)).iter().map(|p| p.clone()).collect()) - } - - trait ToDep { - fn to_dep(self) -> Dependency; - } - - impl ToDep for &'static str { - fn to_dep(self) -> Dependency { - let url = "http://example.com".to_url().unwrap(); - let source_id = SourceId::new(RegistryKind, url); - Dependency::parse(self, Some("1.0.0"), &source_id).unwrap() - } - } - - impl ToDep for Dependency { - fn to_dep(self) -> Dependency { - self - } - } - - macro_rules! pkg( - ($name:expr => $($deps:expr),+) => ({ - let d: Vec = vec!($($deps.to_dep()),+); - - Summary::new(PackageId::new($name, "1.0.0", ®istry_loc()).unwrap(), - d, HashMap::new()).unwrap() - }); - - ($name:expr) => ( - Summary::new(PackageId::new($name, "1.0.0", ®istry_loc()).unwrap(), - Vec::new(), HashMap::new()).unwrap() - ) - ) - - fn registry_loc() -> SourceId { - let remote = "http://example.com".to_url().unwrap(); - SourceId::new(RegistryKind, remote) - } - - fn pkg(name: &str) -> Summary { - Summary::new(pkg_id(name), Vec::new(), HashMap::new()).unwrap() - } - - fn pkg_id(name: &str) -> PackageId { - PackageId::new(name, "1.0.0", ®istry_loc()).unwrap() - } - - fn pkg_id_loc(name: &str, loc: &str) -> PackageId { - let remote = loc.to_url(); - let source_id = SourceId::new(GitKind("master".to_string()), - remote.unwrap()); - - PackageId::new(name, "1.0.0", &source_id).unwrap() - } - - fn pkg_loc(name: &str, loc: &str) -> Summary { - Summary::new(pkg_id_loc(name, loc), Vec::new(), HashMap::new()).unwrap() - } - - fn dep(name: &str) -> Dependency { - let url = "http://example.com".to_url().unwrap(); - let source_id = SourceId::new(RegistryKind, url); - Dependency::parse(name, Some("1.0.0"), &source_id).unwrap() - } - - fn dep_loc(name: &str, location: &str) -> Dependency { - let url = location.to_url().unwrap(); - let source_id = SourceId::new(GitKind("master".to_string()), url); - Dependency::parse(name, Some("1.0.0"), &source_id).unwrap() - } - - fn registry(pkgs: Vec) -> Vec { - pkgs - } - - fn names(names: &[&'static str]) -> Vec { - names.iter() - .map(|name| PackageId::new(*name, "1.0.0", ®istry_loc()).unwrap()) - .collect() - } - - fn loc_names(names: &[(&'static str, &'static str)]) -> Vec { - names.iter() - .map(|&(name, loc)| pkg_id_loc(name, loc)).collect() - } - - #[test] - pub fn test_resolving_empty_dependency_list() { - let res = resolve(pkg_id("root"), Vec::new(), - &mut registry(vec!())).unwrap(); - - assert_that(&res, equal_to(&names(["root"]))); - } - - #[test] - pub fn test_resolving_only_package() { - let mut reg = registry(vec!(pkg("foo"))); - let res = resolve(pkg_id("root"), vec![dep("foo")], &mut reg); - - assert_that(&res.unwrap(), contains(names(["root", "foo"])).exactly()); - } - - #[test] - pub fn test_resolving_one_dep() { - let mut reg = registry(vec!(pkg("foo"), pkg("bar"))); - let res = resolve(pkg_id("root"), vec![dep("foo")], &mut reg); - - assert_that(&res.unwrap(), contains(names(["root", "foo"])).exactly()); - } - - #[test] - pub fn test_resolving_multiple_deps() { - let mut reg = registry(vec!(pkg!("foo"), pkg!("bar"), pkg!("baz"))); - let res = resolve(pkg_id("root"), vec![dep("foo"), dep("baz")], - &mut reg).unwrap(); - - assert_that(&res, contains(names(["root", "foo", "baz"])).exactly()); - } - - #[test] - pub fn test_resolving_transitive_deps() { - let mut reg = registry(vec!(pkg!("foo"), pkg!("bar" => "foo"))); - let res = resolve(pkg_id("root"), vec![dep("bar")], &mut reg).unwrap(); - - assert_that(&res, contains(names(["root", "foo", "bar"]))); - } - - #[test] - pub fn test_resolving_common_transitive_deps() { - let mut reg = registry(vec!(pkg!("foo" => "bar"), pkg!("bar"))); - let res = resolve(pkg_id("root"), vec![dep("foo"), dep("bar")], - &mut reg).unwrap(); - - assert_that(&res, contains(names(["root", "foo", "bar"]))); - } - - #[test] - pub fn test_resolving_with_same_name() { - let list = vec![pkg_loc("foo", "http://first.example.com"), - pkg_loc("bar", "http://second.example.com")]; - - let mut reg = registry(list); - let res = resolve(pkg_id("root"), - vec![dep_loc("foo", "http://first.example.com"), - dep_loc("bar", "http://second.example.com")], - &mut reg); - - let mut names = loc_names([("foo", "http://first.example.com"), - ("bar", "http://second.example.com")]); - - names.push(pkg_id("root")); - - assert_that(&res.unwrap(), contains(names).exactly()); - } - - #[test] - pub fn test_resolving_with_dev_deps() { - let mut reg = registry(vec!( - pkg!("foo" => "bar", dep("baz").transitive(false)), - pkg!("baz" => "bat", dep("bam").transitive(false)), - pkg!("bar"), - pkg!("bat") - )); - - let res = resolve(pkg_id("root"), - vec![dep("foo"), dep("baz").transitive(false)], - &mut reg).unwrap(); - - assert_that(&res, contains(names(["root", "foo", "bar", "baz"]))); - } -} - diff --git a/src/cargo/core/summary.rs b/src/cargo/core/summary.rs index b794f8db427..04abb7eb835 100644 --- a/src/cargo/core/summary.rs +++ b/src/cargo/core/summary.rs @@ -8,7 +8,7 @@ use util::{CargoResult, human}; /// Subset of a `Manifest`. Contains only the most important informations about a package. /// /// Summaries are cloned, and should not be mutated after creation -#[deriving(Show,Clone,PartialEq)] +#[deriving(Show,Clone)] pub struct Summary { package_id: PackageId, dependencies: Vec, @@ -91,6 +91,12 @@ impl Summary { } } +impl PartialEq for Summary { + fn eq(&self, other: &Summary) -> bool { + self.package_id == other.package_id + } +} + pub trait SummaryVec { fn names(&self) -> Vec; } diff --git a/src/cargo/util/graph.rs b/src/cargo/util/graph.rs index b83e4a8e965..5473b0b34bc 100644 --- a/src/cargo/util/graph.rs +++ b/src/cargo/util/graph.rs @@ -92,3 +92,9 @@ impl PartialEq for Graph { fn eq(&self, other: &Graph) -> bool { self.nodes.eq(&other.nodes) } } impl Eq for Graph {} + +impl Clone for Graph { + fn clone(&self) -> Graph { + Graph { nodes: self.nodes.clone() } + } +} diff --git a/tests/resolve.rs b/tests/resolve.rs new file mode 100644 index 00000000000..aa07288197d --- /dev/null +++ b/tests/resolve.rs @@ -0,0 +1,357 @@ +#![feature(macro_rules)] + +extern crate hamcrest; +extern crate cargo; + +use std::collections::HashMap; + +use hamcrest::{assert_that, equal_to, contains}; + +use cargo::core::source::{SourceId, RegistryKind, GitKind}; +use cargo::core::{Dependency, PackageId, Summary, Registry}; +use cargo::util::{CargoResult, ToUrl}; +use cargo::core::resolver::{mod, ResolveEverything}; + +fn resolve(pkg: PackageId, deps: Vec, + registry: &mut R) + -> CargoResult> { + let summary = Summary::new(pkg, deps, HashMap::new()).unwrap(); + let method = ResolveEverything; + Ok(try!(resolver::resolve(&summary, method, registry)).iter().map(|p| { + p.clone() + }).collect()) +} + +trait ToDep { + fn to_dep(self) -> Dependency; +} + +impl ToDep for &'static str { + fn to_dep(self) -> Dependency { + let url = "http://example.com".to_url().unwrap(); + let source_id = SourceId::new(RegistryKind, url); + Dependency::parse(self, Some("1.0.0"), &source_id).unwrap() + } +} + +impl ToDep for Dependency { + fn to_dep(self) -> Dependency { + self + } +} + +trait ToPkgId { + fn to_pkgid(&self) -> PackageId; +} + +impl ToPkgId for &'static str { + fn to_pkgid(&self) -> PackageId { + PackageId::new(*self, "1.0.0", ®istry_loc()).unwrap() + } +} + +impl ToPkgId for (&'static str, &'static str) { + fn to_pkgid(&self) -> PackageId { + let (name, vers) = *self; + PackageId::new(name, vers, ®istry_loc()).unwrap() + } +} + +macro_rules! pkg( + ($pkgid:expr => [$($deps:expr),+]) => ({ + let d: Vec = vec![$($deps.to_dep()),+]; + + Summary::new($pkgid.to_pkgid(), d, HashMap::new()).unwrap() + }); + + ($pkgid:expr) => ( + Summary::new($pkgid.to_pkgid(), Vec::new(), HashMap::new()).unwrap() + ) +) + +fn registry_loc() -> SourceId { + let remote = "http://example.com".to_url().unwrap(); + SourceId::new(RegistryKind, remote) +} + +fn pkg(name: &str) -> Summary { + Summary::new(pkg_id(name), Vec::new(), HashMap::new()).unwrap() +} + +fn pkg_id(name: &str) -> PackageId { + PackageId::new(name, "1.0.0", ®istry_loc()).unwrap() +} + +fn pkg_id_loc(name: &str, loc: &str) -> PackageId { + let remote = loc.to_url(); + let source_id = SourceId::new(GitKind("master".to_string()), + remote.unwrap()); + + PackageId::new(name, "1.0.0", &source_id).unwrap() +} + +fn pkg_loc(name: &str, loc: &str) -> Summary { + Summary::new(pkg_id_loc(name, loc), Vec::new(), HashMap::new()).unwrap() +} + +fn dep(name: &str) -> Dependency { dep_req(name, "1.0.0") } +fn dep_req(name: &str, req: &str) -> Dependency { + let url = "http://example.com".to_url().unwrap(); + let source_id = SourceId::new(RegistryKind, url); + Dependency::parse(name, Some(req), &source_id).unwrap() +} + +fn dep_loc(name: &str, location: &str) -> Dependency { + let url = location.to_url().unwrap(); + let source_id = SourceId::new(GitKind("master".to_string()), url); + Dependency::parse(name, Some("1.0.0"), &source_id).unwrap() +} + +fn registry(pkgs: Vec) -> Vec { + pkgs +} + +fn names(names: &[P]) -> Vec { + names.iter().map(|name| name.to_pkgid()).collect() +} + +fn loc_names(names: &[(&'static str, &'static str)]) -> Vec { + names.iter() + .map(|&(name, loc)| pkg_id_loc(name, loc)).collect() +} + +#[test] +fn test_resolving_empty_dependency_list() { + let res = resolve(pkg_id("root"), Vec::new(), + &mut registry(vec!())).unwrap(); + + assert_that(&res, equal_to(&names(["root"]))); +} + +#[test] +fn test_resolving_only_package() { + let mut reg = registry(vec!(pkg("foo"))); + let res = resolve(pkg_id("root"), vec![dep("foo")], &mut reg); + + assert_that(&res.unwrap(), contains(names(["root", "foo"])).exactly()); +} + +#[test] +fn test_resolving_one_dep() { + let mut reg = registry(vec!(pkg("foo"), pkg("bar"))); + let res = resolve(pkg_id("root"), vec![dep("foo")], &mut reg); + + assert_that(&res.unwrap(), contains(names(["root", "foo"])).exactly()); +} + +#[test] +fn test_resolving_multiple_deps() { + let mut reg = registry(vec!(pkg!("foo"), pkg!("bar"), pkg!("baz"))); + let res = resolve(pkg_id("root"), vec![dep("foo"), dep("baz")], + &mut reg).unwrap(); + + assert_that(&res, contains(names(["root", "foo", "baz"])).exactly()); +} + +#[test] +fn test_resolving_transitive_deps() { + let mut reg = registry(vec!(pkg!("foo"), pkg!("bar" => ["foo"]))); + let res = resolve(pkg_id("root"), vec![dep("bar")], &mut reg).unwrap(); + + assert_that(&res, contains(names(["root", "foo", "bar"]))); +} + +#[test] +fn test_resolving_common_transitive_deps() { + let mut reg = registry(vec!(pkg!("foo" => ["bar"]), pkg!("bar"))); + let res = resolve(pkg_id("root"), vec![dep("foo"), dep("bar")], + &mut reg).unwrap(); + + assert_that(&res, contains(names(["root", "foo", "bar"]))); +} + +#[test] +fn test_resolving_with_same_name() { + let list = vec![pkg_loc("foo", "http://first.example.com"), + pkg_loc("bar", "http://second.example.com")]; + + let mut reg = registry(list); + let res = resolve(pkg_id("root"), + vec![dep_loc("foo", "http://first.example.com"), + dep_loc("bar", "http://second.example.com")], + &mut reg); + + let mut names = loc_names([("foo", "http://first.example.com"), + ("bar", "http://second.example.com")]); + + names.push(pkg_id("root")); + + assert_that(&res.unwrap(), contains(names).exactly()); +} + +#[test] +fn test_resolving_with_dev_deps() { + let mut reg = registry(vec!( + pkg!("foo" => ["bar", dep("baz").transitive(false)]), + pkg!("baz" => ["bat", dep("bam").transitive(false)]), + pkg!("bar"), + pkg!("bat") + )); + + let res = resolve(pkg_id("root"), + vec![dep("foo"), dep("baz").transitive(false)], + &mut reg).unwrap(); + + assert_that(&res, contains(names(["root", "foo", "bar", "baz"]))); +} + +#[test] +fn resolving_with_many_versions() { + let mut reg = registry(vec!( + pkg!(("foo", "1.0.1")), + pkg!(("foo", "1.0.2")), + )); + + let res = resolve(pkg_id("root"), vec![dep("foo")], &mut reg).unwrap(); + + assert_that(&res, contains(names([("root", "1.0.0"), + ("foo", "1.0.2")]))); +} + +#[test] +fn resolving_with_specific_version() { + let mut reg = registry(vec!( + pkg!(("foo", "1.0.1")), + pkg!(("foo", "1.0.2")), + )); + + let res = resolve(pkg_id("root"), vec![dep_req("foo", "=1.0.1")], + &mut reg).unwrap(); + + assert_that(&res, contains(names([("root", "1.0.0"), + ("foo", "1.0.1")]))); +} + +#[test] +fn resolving_incompat_versions() { + let mut reg = registry(vec!( + pkg!(("foo", "1.0.1")), + pkg!(("foo", "1.0.2")), + pkg!("bar" => [dep_req("foo", "=1.0.2")]), + )); + + assert!(resolve(pkg_id("root"), vec![ + dep_req("foo", "=1.0.1"), + dep("bar"), + ], &mut reg).is_err()); +} + +#[test] +fn resolving_backtrack() { + let mut reg = registry(vec!( + pkg!(("foo", "1.0.2") => [dep("bar")]), + pkg!(("foo", "1.0.1") => [dep("baz")]), + pkg!("bar" => [dep_req("foo", "=2.0.2")]), + pkg!("baz"), + )); + + let res = resolve(pkg_id("root"), vec![ + dep_req("foo", "^1"), + ], &mut reg).unwrap(); + + assert_that(&res, contains(names([("root", "1.0.0"), + ("foo", "1.0.1"), + ("baz", "1.0.0")]))); +} + +#[test] +fn resolving_allows_multiple_compatible_versions() { + let mut reg = registry(vec!( + pkg!(("foo", "1.0.0")), + pkg!(("foo", "2.0.0")), + pkg!(("foo", "0.1.0")), + pkg!(("foo", "0.2.0")), + + pkg!("bar" => ["d1", "d2", "d3", "d4"]), + pkg!("d1" => [dep_req("foo", "1")]), + pkg!("d2" => [dep_req("foo", "2")]), + pkg!("d3" => [dep_req("foo", "0.1")]), + pkg!("d4" => [dep_req("foo", "0.2")]), + )); + + let res = resolve(pkg_id("root"), vec![ + dep("bar"), + ], &mut reg).unwrap(); + + assert_that(&res, contains(names([("root", "1.0.0"), + ("foo", "1.0.0"), + ("foo", "2.0.0"), + ("foo", "0.1.0"), + ("foo", "0.2.0"), + ("d1", "1.0.0"), + ("d2", "1.0.0"), + ("d3", "1.0.0"), + ("d4", "1.0.0"), + ("bar", "1.0.0")]))); +} + +#[test] +fn resolving_with_deep_backtracking() { + let mut reg = registry(vec!( + pkg!(("foo", "1.0.1") => [dep_req("bar", "1")]), + pkg!(("foo", "1.0.0") => [dep_req("bar", "2")]), + + pkg!(("bar", "1.0.0") => [dep_req("baz", "=1.0.2"), + dep_req("other", "1")]), + pkg!(("bar", "2.0.0") => [dep_req("baz", "=1.0.1")]), + + pkg!(("baz", "1.0.2") => [dep_req("other", "2")]), + pkg!(("baz", "1.0.1")), + + pkg!(("dep_req", "1.0.0")), + pkg!(("dep_req", "2.0.0")), + )); + + let res = resolve(pkg_id("root"), vec![ + dep_req("foo", "1"), + ], &mut reg).unwrap(); + + assert_that(&res, contains(names([("root", "1.0.0"), + ("foo", "1.0.0"), + ("bar", "2.0.0"), + ("baz", "1.0.1")]))); +} + +#[test] +fn resolving_but_no_exists() { + let mut reg = registry(vec!( + )); + + let res = resolve(pkg_id("root"), vec![ + dep_req("foo", "1"), + ], &mut reg); + assert!(res.is_err()); + + assert_eq!(res.to_string().as_slice(), "Err(\ +no package named `foo` found (required by `root`) +location searched: registry http://example.com/ +version required: ^1\ +)"); +} + +#[test] +fn resolving_cycle() { + let mut reg = registry(vec!( + pkg!("foo" => ["foo"]), + )); + + let res = resolve(pkg_id("root"), vec![ + dep_req("foo", "1"), + ], &mut reg); + assert!(res.is_err()); + + assert_eq!(res.to_string().as_slice(), "Err(\ +cyclic package dependency: package `foo v1.0.0 (registry http://example.com/)` \ +depends on itself\ +)"); +} diff --git a/tests/test_cargo_compile.rs b/tests/test_cargo_compile.rs index 665e91e8494..83164d13c0a 100644 --- a/tests/test_cargo_compile.rs +++ b/tests/test_cargo_compile.rs @@ -501,9 +501,9 @@ test!(cargo_compile_with_dep_name_mismatch { assert_that(p.cargo_process("build"), execs().with_status(101).with_stderr(format!( -r#"No package named `notquitebar` found (required by `foo`). -Location searched: {proj_dir} -Version required: * +r#"no package named `notquitebar` found (required by `foo`) +location searched: {proj_dir} +version required: * "#, proj_dir = p.url()))); }) @@ -1091,8 +1091,8 @@ test!(self_dependency { "#) .file("src/test.rs", "fn main() {}"); assert_that(p.cargo_process("build"), - execs().with_status(0).with_stdout("\ -[..] test v0.0.0 ([..]) + execs().with_status(101).with_stderr("\ +cyclic package dependency: package `test v0.0.0 ([..])` depends on itself ")); }) diff --git a/tests/test_cargo_registry.rs b/tests/test_cargo_registry.rs index 944c2557219..2c598c15360 100644 --- a/tests/test_cargo_registry.rs +++ b/tests/test_cargo_registry.rs @@ -150,9 +150,9 @@ test!(nonexistent { assert_that(p.cargo_process("build"), execs().with_status(101).with_stderr("\ -No package named `nonexistent` found (required by `foo`). -Location searched: the package registry -Version required: >= 0.0.0 +no package named `nonexistent` found (required by `foo`) +location searched: the package registry +version required: >= 0.0.0 ")); }) @@ -196,9 +196,9 @@ test!(update_registry { assert_that(p.cargo_process("build"), execs().with_status(101).with_stderr("\ -No package named `notyet` found (required by `foo`). -Location searched: the package registry -Version required: >= 0.0.0 +no package named `notyet` found (required by `foo`) +location searched: the package registry +version required: >= 0.0.0 ")); // Add the package and commit From 08c7d6133df64d97dc7b4433e7afc7c7b8f70af5 Mon Sep 17 00:00:00 2001 From: Alex Crichton Date: Fri, 17 Oct 2014 11:47:33 -0700 Subject: [PATCH 4/9] Make PackageId cheap to clone These are cloned a massive number of times during resolution, so let's make them much cheaper to clone with an Arc. This uses Arc instead of Rc because the fiddly bits in job_queue have a PackageId cross task boundaries for parallelism. --- src/cargo/core/package_id.rs | 65 +++++++++++++++++++++++++----------- 1 file changed, 46 insertions(+), 19 deletions(-) diff --git a/src/cargo/core/package_id.rs b/src/cargo/core/package_id.rs index 855272e96ea..1442a781563 100644 --- a/src/cargo/core/package_id.rs +++ b/src/cargo/core/package_id.rs @@ -1,5 +1,6 @@ use semver; use std::hash::Hash; +use std::sync::Arc; use std::fmt::{mod, Show, Formatter}; use std::hash; use serialize::{Encodable, Encoder, Decodable, Decoder}; @@ -10,8 +11,13 @@ use util::{CargoResult, CargoError, short_hash, ToSemver}; use core::source::SourceId; /// Identifier for a specific version of a package in a specific source. -#[deriving(Clone, PartialEq, PartialOrd, Ord)] +#[deriving(Clone)] pub struct PackageId { + inner: Arc, +} + +#[deriving(PartialEq, PartialOrd, Eq, Ord)] +struct PackageIdInner { name: String, version: semver::Version, source_id: SourceId, @@ -19,8 +25,9 @@ pub struct PackageId { impl> Encodable for PackageId { fn encode(&self, s: &mut S) -> Result<(), E> { - let source = self.source_id.to_url(); - let encoded = format!("{} {} ({})", self.name, self.version, source); + let source = self.inner.source_id.to_url(); + let encoded = format!("{} {} ({})", self.inner.name, self.inner.version, + source); encoded.encode(s) } } @@ -36,22 +43,39 @@ impl> Decodable for PackageId { let source_id = SourceId::from_url(captures.at(3).to_string()); Ok(PackageId { - name: name.to_string(), - version: version, - source_id: source_id + inner: Arc::new(PackageIdInner { + name: name.to_string(), + version: version, + source_id: source_id, + }), }) } } impl Hash for PackageId { fn hash(&self, state: &mut S) { - self.name.hash(state); - self.version.to_string().hash(state); - self.source_id.hash(state); + self.inner.name.hash(state); + self.inner.version.to_string().hash(state); + self.inner.source_id.hash(state); } } +impl PartialEq for PackageId { + fn eq(&self, other: &PackageId) -> bool { + self.inner.eq(&*other.inner) + } +} +impl PartialOrd for PackageId { + fn partial_cmp(&self, other: &PackageId) -> Option { + self.inner.partial_cmp(&*other.inner) + } +} impl Eq for PackageId {} +impl Ord for PackageId { + fn cmp(&self, other: &PackageId) -> Ordering { + self.inner.cmp(&*other.inner) + } +} #[deriving(Clone, Show, PartialEq)] pub enum PackageIdError { @@ -80,27 +104,30 @@ impl PackageId { sid: &SourceId) -> CargoResult { let v = try!(version.to_semver().map_err(InvalidVersion)); Ok(PackageId { - name: name.to_string(), - version: v, - source_id: sid.clone() + inner: Arc::new(PackageIdInner { + name: name.to_string(), + version: v, + source_id: sid.clone(), + }), }) } pub fn get_name(&self) -> &str { - self.name.as_slice() + self.inner.name.as_slice() } pub fn get_version(&self) -> &semver::Version { - &self.version + &self.inner.version } pub fn get_source_id(&self) -> &SourceId { - &self.source_id + &self.inner.source_id } pub fn generate_metadata(&self) -> Metadata { let metadata = short_hash( - &(self.name.as_slice(), self.version.to_string(), &self.source_id)); + &(self.inner.name.as_slice(), self.inner.version.to_string(), + &self.inner.source_id)); let extra_filename = format!("-{}", metadata); Metadata { metadata: metadata, extra_filename: extra_filename } @@ -119,10 +146,10 @@ static CENTRAL_REPO: &'static str = "http://rust-lang.org/central-repo"; impl Show for PackageId { fn fmt(&self, f: &mut Formatter) -> fmt::Result { - try!(write!(f, "{} v{}", self.name, self.version)); + try!(write!(f, "{} v{}", self.inner.name, self.inner.version)); - if self.source_id.to_string().as_slice() != CENTRAL_REPO { - try!(write!(f, " ({})", self.source_id)); + if self.inner.source_id.to_string().as_slice() != CENTRAL_REPO { + try!(write!(f, " ({})", self.inner.source_id)); } Ok(()) From 913c243fd467e29145656f9fcdd83611511bfb96 Mon Sep 17 00:00:00 2001 From: Alex Crichton Date: Fri, 17 Oct 2014 11:48:57 -0700 Subject: [PATCH 5/9] Make SourceId cheap to clone Like PackageId, these are cloned quite often, so this moves them into an Arc to make them cheap to clone. This also removes the public fields in favor of accessors. --- src/cargo/core/package_id_spec.rs | 4 +- src/cargo/core/source.rs | 235 +++++++++++++---------- src/cargo/ops/cargo_rustc/fingerprint.rs | 2 +- src/cargo/sources/git/source.rs | 10 +- 4 files changed, 140 insertions(+), 111 deletions(-) diff --git a/src/cargo/core/package_id_spec.rs b/src/cargo/core/package_id_spec.rs index bfce8cd1e1d..7ce03616d90 100644 --- a/src/cargo/core/package_id_spec.rs +++ b/src/cargo/core/package_id_spec.rs @@ -49,7 +49,7 @@ impl PackageIdSpec { PackageIdSpec { name: package_id.get_name().to_string(), version: Some(package_id.get_version().clone()), - url: Some(package_id.get_source_id().url.clone()), + url: Some(package_id.get_source_id().get_url().clone()), } } @@ -110,7 +110,7 @@ impl PackageIdSpec { } match self.url { - Some(ref u) => *u == package_id.get_source_id().url, + Some(ref u) => u == package_id.get_source_id().get_url(), None => true } } diff --git a/src/cargo/core/source.rs b/src/cargo/core/source.rs index 6b029b7008b..1e6df19d506 100644 --- a/src/cargo/core/source.rs +++ b/src/cargo/core/source.rs @@ -5,6 +5,7 @@ use std::fmt::{mod, Show, Formatter}; use std::hash; use std::iter; use std::mem; +use std::sync::Arc; use serialize::{Decodable, Decoder, Encodable, Encoder}; use url::Url; @@ -59,104 +60,26 @@ type Error = Box; /// Unique identifier for a source of packages. #[deriving(Clone, Eq)] pub struct SourceId { - pub url: Url, - pub kind: SourceKind, - // e.g. the exact git revision of the specified branch for a Git Source - pub precise: Option -} - -impl PartialOrd for SourceId { - fn partial_cmp(&self, other: &SourceId) -> Option { - self.to_string().partial_cmp(&other.to_string()) - } -} - -impl Ord for SourceId { - fn cmp(&self, other: &SourceId) -> Ordering { - self.to_string().cmp(&other.to_string()) - } -} - -impl> Encodable for SourceId { - fn encode(&self, s: &mut S) -> Result<(), E> { - if self.is_path() { - s.emit_option_none() - } else { - self.to_url().encode(s) - } - } -} - -impl> Decodable for SourceId { - fn decode(d: &mut D) -> Result { - let string: String = Decodable::decode(d).ok().expect("Invalid encoded SourceId"); - Ok(SourceId::from_url(string)) - } -} - -impl Show for SourceId { - fn fmt(&self, f: &mut Formatter) -> fmt::Result { - match *self { - SourceId { kind: PathKind, ref url, .. } => url.fmt(f), - SourceId { kind: GitKind(ref reference), ref url, ref precise, .. } => { - try!(write!(f, "{}", url)); - if reference.as_slice() != "master" { - try!(write!(f, "?ref={}", reference)); - } - - match *precise { - Some(ref s) => { - try!(write!(f, "#{}", s.as_slice().slice_to(8))); - } - None => {} - } - Ok(()) - }, - SourceId { kind: RegistryKind, ref url, .. } => { - let default = RegistrySource::url().ok(); - if default.as_ref() == Some(url) { - write!(f, "the package registry") - } else { - write!(f, "registry {}", url) - } - } - } - } + inner: Arc, } -// This custom implementation handles situations such as when two git sources -// point at *almost* the same URL, but not quite, even when they actually point -// to the same repository. -impl PartialEq for SourceId { - fn eq(&self, other: &SourceId) -> bool { - if self.kind != other.kind { return false } - if self.url == other.url { return true } - - match (&self.kind, &other.kind, &self.url, &other.url) { - (&GitKind(ref ref1), &GitKind(ref ref2), u1, u2) => { - ref1 == ref2 && - git::canonicalize_url(u1) == git::canonicalize_url(u2) - } - _ => false, - } - } -} - -impl hash::Hash for SourceId { - fn hash(&self, into: &mut S) { - self.kind.hash(into); - match *self { - SourceId { kind: GitKind(..), ref url, .. } => { - git::canonicalize_url(url).hash(into) - } - _ => self.url.hash(into), - } - } +#[deriving(Eq, Clone)] +struct SourceIdInner { + url: Url, + kind: SourceKind, + // e.g. the exact git revision of the specified branch for a Git Source + precise: Option } impl SourceId { pub fn new(kind: SourceKind, url: Url) -> SourceId { - SourceId { kind: kind, url: url, precise: None } + SourceId { + inner: Arc::new(SourceIdInner { + kind: kind, + url: url, + precise: None, + }), + } } /// Parses a source URL and returns the corresponding ID. @@ -198,12 +121,12 @@ impl SourceId { } pub fn to_url(&self) -> String { - match *self { - SourceId { kind: PathKind, .. } => { + match *self.inner { + SourceIdInner { kind: PathKind, .. } => { fail!("Path sources are not included in the lockfile, \ so this is unimplemented") }, - SourceId { + SourceIdInner { kind: GitKind(ref reference), ref url, ref precise, .. } => { let ref_str = if reference.as_slice() != "master" { @@ -220,7 +143,7 @@ impl SourceId { format!("git+{}{}{}", url, ref_str, precise_str) }, - SourceId { kind: RegistryKind, .. } => { + SourceIdInner { kind: RegistryKind, .. } => { // TODO: Central registry vs. alternates "registry+https://crates.io/".to_string() } @@ -255,15 +178,19 @@ impl SourceId { } pub fn get_url(&self) -> &Url { - &self.url + &self.inner.url + } + + pub fn get_kind(&self) -> &SourceKind { + &self.inner.kind } pub fn is_path(&self) -> bool { - self.kind == PathKind + self.inner.kind == PathKind } pub fn is_git(&self) -> bool { - match self.kind { + match self.inner.kind { GitKind(_) => true, _ => false } @@ -272,10 +199,10 @@ impl SourceId { /// Creates an implementation of `Source` corresponding to this ID. pub fn load<'a>(&self, config: &'a mut Config) -> Box { log!(5, "loading SourceId; {}", self); - match self.kind { + match self.inner.kind { GitKind(..) => box GitSource::new(self, config) as Box, PathKind => { - let path = match self.url.to_file_path() { + let path = match self.inner.url.to_file_path() { Ok(p) => p, Err(()) => fail!("path sources cannot be remote"), }; @@ -285,10 +212,112 @@ impl SourceId { } } + pub fn get_precise(&self) -> Option<&str> { + self.inner.precise.as_ref().map(|s| s.as_slice()) + } + pub fn with_precise(&self, v: String) -> SourceId { SourceId { - precise: Some(v), - .. self.clone() + inner: Arc::new(SourceIdInner { + precise: Some(v), + .. (*self.inner).clone() + }), + } + } +} + +impl PartialEq for SourceId { + fn eq(&self, other: &SourceId) -> bool { + self.inner.eq(&*other.inner) + } +} + +impl PartialOrd for SourceId { + fn partial_cmp(&self, other: &SourceId) -> Option { + self.to_string().partial_cmp(&other.to_string()) + } +} + +impl Ord for SourceId { + fn cmp(&self, other: &SourceId) -> Ordering { + self.to_string().cmp(&other.to_string()) + } +} + +impl> Encodable for SourceId { + fn encode(&self, s: &mut S) -> Result<(), E> { + if self.is_path() { + s.emit_option_none() + } else { + self.to_url().encode(s) + } + } +} + +impl> Decodable for SourceId { + fn decode(d: &mut D) -> Result { + let string: String = Decodable::decode(d).ok().expect("Invalid encoded SourceId"); + Ok(SourceId::from_url(string)) + } +} + +impl Show for SourceId { + fn fmt(&self, f: &mut Formatter) -> fmt::Result { + match *self.inner { + SourceIdInner { kind: PathKind, ref url, .. } => url.fmt(f), + SourceIdInner { kind: GitKind(ref reference), ref url, + ref precise, .. } => { + try!(write!(f, "{}", url)); + if reference.as_slice() != "master" { + try!(write!(f, "?ref={}", reference)); + } + + match *precise { + Some(ref s) => { + try!(write!(f, "#{}", s.as_slice().slice_to(8))); + } + None => {} + } + Ok(()) + }, + SourceIdInner { kind: RegistryKind, ref url, .. } => { + let default = RegistrySource::url().ok(); + if default.as_ref() == Some(url) { + write!(f, "the package registry") + } else { + write!(f, "registry {}", url) + } + } + } + } +} + +// This custom implementation handles situations such as when two git sources +// point at *almost* the same URL, but not quite, even when they actually point +// to the same repository. +impl PartialEq for SourceIdInner { + fn eq(&self, other: &SourceIdInner) -> bool { + if self.kind != other.kind { return false } + if self.url == other.url { return true } + + match (&self.kind, &other.kind, &self.url, &other.url) { + (&GitKind(ref ref1), &GitKind(ref ref2), u1, u2) => { + ref1 == ref2 && + git::canonicalize_url(u1) == git::canonicalize_url(u2) + } + _ => false, + } + } +} + +impl hash::Hash for SourceId { + fn hash(&self, into: &mut S) { + self.inner.kind.hash(into); + match *self.inner { + SourceIdInner { kind: GitKind(..), ref url, .. } => { + git::canonicalize_url(url).hash(into) + } + _ => self.inner.url.hash(into), } } } diff --git a/src/cargo/ops/cargo_rustc/fingerprint.rs b/src/cargo/ops/cargo_rustc/fingerprint.rs index 3f6f4e6a316..4027d3a2fbf 100644 --- a/src/cargo/ops/cargo_rustc/fingerprint.rs +++ b/src/cargo/ops/cargo_rustc/fingerprint.rs @@ -54,7 +54,7 @@ pub fn prepare_target(cx: &mut Context, pkg: &Package, target: &Target, // constant (which is the responsibility of the source) let use_pkg = { let doc = target.get_profile().is_doc(); - let path = match pkg.get_summary().get_source_id().kind { + let path = match *pkg.get_summary().get_source_id().get_kind() { PathKind => true, _ => false, }; diff --git a/src/cargo/sources/git/source.rs b/src/cargo/sources/git/source.rs index b68a6e9ef83..8938bac5599 100644 --- a/src/cargo/sources/git/source.rs +++ b/src/cargo/sources/git/source.rs @@ -28,7 +28,7 @@ impl<'a, 'b> GitSource<'a, 'b> { config: &'a mut Config<'b>) -> GitSource<'a, 'b> { assert!(source_id.is_git(), "id is not git, id={}", source_id); - let reference = match source_id.kind { + let reference = match *source_id.get_kind() { GitKind(ref reference) => reference, _ => fail!("Not a git source; id={}", source_id) }; @@ -42,9 +42,9 @@ impl<'a, 'b> GitSource<'a, 'b> { let checkout_path = config.git_checkout_path() .join(ident.as_slice()).join(reference.as_slice()); - let reference = match source_id.precise { - Some(ref s) => s, - None => reference, + let reference = match source_id.get_precise() { + Some(s) => s, + None => reference.as_slice(), }; GitSource { @@ -160,7 +160,7 @@ impl<'a, 'b> Source for GitSource<'a, 'b> { let actual_rev = self.remote.rev_for(&self.db_path, self.reference.as_slice()); let should_update = actual_rev.is_err() || - self.source_id.precise.is_none(); + self.source_id.get_precise().is_none(); let (repo, actual_rev) = if should_update { try!(self.config.shell().status("Updating", From 98f0dab3cdb51e27a4b56020b884d5552ecd740f Mon Sep 17 00:00:00 2001 From: Alex Crichton Date: Fri, 17 Oct 2014 11:50:18 -0700 Subject: [PATCH 6/9] Continue reducing clone costs Share the `visited` hash set among all contexts, just be sure to maintain it across failures. Also put all Summary structures into an `Rc` as they're cloned quite frequently. --- src/cargo/core/resolver/mod.rs | 58 ++++++++++++++++++---------------- 1 file changed, 31 insertions(+), 27 deletions(-) diff --git a/src/cargo/core/resolver/mod.rs b/src/cargo/core/resolver/mod.rs index f400ccf90bb..34458472860 100644 --- a/src/cargo/core/resolver/mod.rs +++ b/src/cargo/core/resolver/mod.rs @@ -1,5 +1,8 @@ +use std::cell::RefCell; use std::collections::hashmap::{HashMap, HashSet, Occupied, Vacant}; use std::fmt; +use std::rc::Rc; +use semver; use core::{PackageId, Registry, SourceId, Summary, Dependency}; use core::PackageIdSpec; @@ -111,9 +114,9 @@ impl fmt::Show for Resolve { #[deriving(Clone)] struct Context { - activations: HashMap<(String, SourceId), Vec>, + activations: HashMap<(String, SourceId), Vec>>, resolve: Resolve, - visited: HashSet, + visited: Rc>>, } /// Builds the list of all packages required to build the first argument. @@ -124,12 +127,12 @@ pub fn resolve(summary: &Summary, method: ResolveMethod, let mut cx = Context { resolve: Resolve::new(summary.get_package_id().clone()), activations: HashMap::new(), - visited: HashSet::new(), + visited: Rc::new(RefCell::new(HashSet::new())), }; let _p = profile::start(format!("resolving: {}", summary)); cx.activations.insert((summary.get_name().to_string(), summary.get_source_id().clone()), - vec![summary.clone()]); + vec![Rc::new(summary.clone())]); match try!(activate(cx, registry, summary, method)) { Ok(cx) => Ok(cx.resolve), Err(e) => Err(e), @@ -155,6 +158,7 @@ fn activate(mut cx: Context, candidates.as_mut_slice().sort_by(|a, b| { b.get_version().cmp(a.get_version()) }); + let candidates = candidates.into_iter().map(Rc::new).collect::>(); Ok((dep, candidates, features)) }).collect::>>()); @@ -172,7 +176,7 @@ fn activate(mut cx: Context, fn activate_deps(cx: Context, registry: &mut R, parent: &Summary, - deps: &[(&Dependency, Vec, Vec)], + deps: &[(&Dependency, Vec>, Vec)], cur: uint) -> CargoResult> { if cur == deps.len() { return Ok(Ok(cx)) } let (dep, ref candidates, ref features) = deps[cur]; @@ -227,26 +231,26 @@ fn activate_deps(cx: Context, if !prev.iter().any(|c| c == candidate) { my_cx.resolve.graph.add(candidate.get_package_id().clone(), []); prev.push(candidate.clone()); - } + } - // Dependency graphs are required to be a DAG. Non-transitive - // dependencies (dev-deps), however, can never introduce a cycle, so we - // skip them. - if dep.is_transitive() && - !my_cx.visited.insert(candidate.get_package_id().clone()) { - return Err(human(format!("cyclic package dependency: package `{}` \ - depends on itself", - candidate.get_package_id()))) - } + // Dependency graphs are required to be a DAG. Non-transitive + // dependencies (dev-deps), however, can never introduce a cycle, so we + // skip them. + if dep.is_transitive() && + !cx.visited.borrow_mut().insert(candidate.get_package_id().clone()) { + return Err(human(format!("cyclic package dependency: package `{}` \ + depends on itself", + candidate.get_package_id()))) + } + let my_cx = try!(activate(my_cx, registry, &**candidate, method)); + if dep.is_transitive() { + cx.visited.borrow_mut().remove(candidate.get_package_id()); } - let mut my_cx = match try!(activate(my_cx, registry, candidate, method)) { + let my_cx = match my_cx { Ok(cx) => cx, Err(e) => { last_err = Some(e); continue } }; - if dep.is_transitive() { - my_cx.visited.remove(candidate.get_package_id()); - } match try!(activate_deps(my_cx, registry, parent, deps, cur + 1)) { Ok(cx) => return Ok(Ok(cx)), Err(e) => { last_err = Some(e); } @@ -324,18 +328,18 @@ fn resolve_features<'a>(cx: &mut Context, parent: &'a Summary, // First, filter by dev-dependencies let deps = parent.get_dependencies(); - let deps = deps.iter().filter(|d| d.is_transitive() || dev_deps); + let mut deps = deps.iter().filter(|d| d.is_transitive() || dev_deps); - // Second, weed out optional dependencies, but not those required let (mut feature_deps, used_features) = try!(build_features(parent, method)); let mut ret = HashMap::new(); - let deps = deps.filter(|d| { - !d.is_optional() || feature_deps.contains_key_equiv(&d.get_name()) - }).collect::>(); // Next, sanitize all requested features by whitelisting all the requested // features that correspond to optional dependencies - for dep in deps.iter() { + for dep in deps { + // weed out optional dependencies, but not those required + if dep.is_optional() && !feature_deps.contains_key_equiv(&dep.get_name()) { + continue + } let mut base = feature_deps.pop_equiv(&dep.get_name()) .unwrap_or(Vec::new()); for feature in dep.get_features().iter() { @@ -347,7 +351,7 @@ fn resolve_features<'a>(cx: &mut Context, parent: &'a Summary, feature))); } } - ret.insert(dep.get_name(), (*dep, base)); + ret.insert(dep.get_name(), (dep, base)); } // All features can only point to optional dependencies, in which case they @@ -365,7 +369,7 @@ fn resolve_features<'a>(cx: &mut Context, parent: &'a Summary, } // Record what list of features is active for this package. - { + if used_features.len() > 0 { let pkgid = parent.get_package_id().clone(); match cx.resolve.features.entry(pkgid) { Occupied(entry) => entry.into_mut(), From 82e469ef221e5a5f57bbe99d750e9c1f41f931f3 Mon Sep 17 00:00:00 2001 From: Alex Crichton Date: Fri, 17 Oct 2014 12:22:52 -0700 Subject: [PATCH 7/9] source changes --- src/cargo/core/source.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/cargo/core/source.rs b/src/cargo/core/source.rs index 1e6df19d506..3740ca3eaf2 100644 --- a/src/cargo/core/source.rs +++ b/src/cargo/core/source.rs @@ -449,11 +449,11 @@ mod tests { let s1 = SourceId::new(GitKind("master".to_string()), loc); let loc = "git://github.com/foo/bar".to_url().unwrap(); - let mut s2 = SourceId::new(GitKind("master".to_string()), loc); + let s2 = SourceId::new(GitKind("master".to_string()), loc.clone()); assert_eq!(s1, s2); - s2.kind = GitKind("foo".to_string()); - assert!(s1 != s2); + let s3 = SourceId::new(GitKind("foo".to_string()), loc); + assert!(s1 != s3); } } From 7db89eded7453e9b8814ebe4b03b1d7bcf09d1b9 Mon Sep 17 00:00:00 2001 From: Alex Crichton Date: Fri, 17 Oct 2014 12:23:10 -0700 Subject: [PATCH 8/9] Avoid visiting deps too often in resolve If we're activating an already-active version of a dependency, then there's no need to actually recurse into it. Instead, we can skip it if we're not enabling any extra features in it to avoid extraneous recursion. --- src/cargo/core/resolver/mod.rs | 50 +++++++++++++++++++++------------- tests/resolve.rs | 8 +----- tests/test_cargo_compile.rs | 4 +-- 3 files changed, 33 insertions(+), 29 deletions(-) diff --git a/src/cargo/core/resolver/mod.rs b/src/cargo/core/resolver/mod.rs index 34458472860..8909d1d82bd 100644 --- a/src/cargo/core/resolver/mod.rs +++ b/src/cargo/core/resolver/mod.rs @@ -221,35 +221,47 @@ fn activate_deps(cx: Context, log!(5, "{}[{}]>{} trying {}", parent.get_name(), cur, dep.get_name(), candidate.get_version()); let mut my_cx = cx.clone(); - { + let early_return = { my_cx.resolve.graph.link(parent.get_package_id().clone(), candidate.get_package_id().clone()); let prev = match my_cx.activations.entry(key.clone()) { Occupied(e) => e.into_mut(), Vacant(e) => e.set(Vec::new()), }; - if !prev.iter().any(|c| c == candidate) { + if prev.iter().any(|c| c == candidate) { + match cx.resolve.features(candidate.get_package_id()) { + Some(prev_features) => { + features.iter().all(|f| prev_features.contains(f)) + } + None => features.len() == 0, + } + } else { my_cx.resolve.graph.add(candidate.get_package_id().clone(), []); prev.push(candidate.clone()); + false } - } + }; - // Dependency graphs are required to be a DAG. Non-transitive - // dependencies (dev-deps), however, can never introduce a cycle, so we - // skip them. - if dep.is_transitive() && - !cx.visited.borrow_mut().insert(candidate.get_package_id().clone()) { - return Err(human(format!("cyclic package dependency: package `{}` \ - depends on itself", - candidate.get_package_id()))) - } - let my_cx = try!(activate(my_cx, registry, &**candidate, method)); - if dep.is_transitive() { - cx.visited.borrow_mut().remove(candidate.get_package_id()); - } - let my_cx = match my_cx { - Ok(cx) => cx, - Err(e) => { last_err = Some(e); continue } + let my_cx = if early_return { + my_cx + } else { + // Dependency graphs are required to be a DAG. Non-transitive + // dependencies (dev-deps), however, can never introduce a cycle, so we + // skip them. + if dep.is_transitive() && + !cx.visited.borrow_mut().insert(candidate.get_package_id().clone()) { + return Err(human(format!("cyclic package dependency: package `{}` \ + depends on itself", + candidate.get_package_id()))) + } + let my_cx = try!(activate(my_cx, registry, &**candidate, method)); + if dep.is_transitive() { + cx.visited.borrow_mut().remove(candidate.get_package_id()); + } + match my_cx { + Ok(cx) => cx, + Err(e) => { last_err = Some(e); continue } + } }; match try!(activate_deps(my_cx, registry, parent, deps, cur + 1)) { Ok(cx) => return Ok(Ok(cx)), diff --git a/tests/resolve.rs b/tests/resolve.rs index aa07288197d..30c8c04fca8 100644 --- a/tests/resolve.rs +++ b/tests/resolve.rs @@ -345,13 +345,7 @@ fn resolving_cycle() { pkg!("foo" => ["foo"]), )); - let res = resolve(pkg_id("root"), vec![ + let _ = resolve(pkg_id("root"), vec![ dep_req("foo", "1"), ], &mut reg); - assert!(res.is_err()); - - assert_eq!(res.to_string().as_slice(), "Err(\ -cyclic package dependency: package `foo v1.0.0 (registry http://example.com/)` \ -depends on itself\ -)"); } diff --git a/tests/test_cargo_compile.rs b/tests/test_cargo_compile.rs index 83164d13c0a..d55b2e0afb9 100644 --- a/tests/test_cargo_compile.rs +++ b/tests/test_cargo_compile.rs @@ -1091,9 +1091,7 @@ test!(self_dependency { "#) .file("src/test.rs", "fn main() {}"); assert_that(p.cargo_process("build"), - execs().with_status(101).with_stderr("\ -cyclic package dependency: package `test v0.0.0 ([..])` depends on itself -")); + execs().with_status(0)); }) #[cfg(not(windows))] From ba17daeac8fa2645ce72d9e6fcfee987bd39c138 Mon Sep 17 00:00:00 2001 From: Alex Crichton Date: Wed, 22 Oct 2014 12:12:06 -0700 Subject: [PATCH 9/9] Make to_url public for rustc --- src/cargo/util/mod.rs | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/src/cargo/util/mod.rs b/src/cargo/util/mod.rs index 6e14c33856d..72fd9516fdd 100644 --- a/src/cargo/util/mod.rs +++ b/src/cargo/util/mod.rs @@ -15,19 +15,19 @@ pub use self::to_semver::ToSemver; pub use self::vcs::{GitRepo, HgRepo}; pub use self::sha256::Sha256; -pub mod graph; -pub mod process_builder; pub mod config; -pub mod important_paths; -pub mod result; -pub mod toml; -pub mod paths; pub mod errors; +pub mod graph; pub mod hex; +pub mod important_paths; +pub mod paths; +pub mod process_builder; pub mod profile; +pub mod result; pub mod to_semver; -mod pool; +pub mod to_url; +pub mod toml; mod dependency_queue; -mod to_url; -mod vcs; +mod pool; mod sha256; +mod vcs;