summaryrefslogtreecommitdiffstats
path: root/crates/resolver-tests
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-04 12:47:55 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-04 12:47:55 +0000
commit2aadc03ef15cb5ca5cc2af8a7c08e070742f0ac4 (patch)
tree033cc839730fda84ff08db877037977be94e5e3a /crates/resolver-tests
parentInitial commit. (diff)
downloadcargo-2aadc03ef15cb5ca5cc2af8a7c08e070742f0ac4.tar.xz
cargo-2aadc03ef15cb5ca5cc2af8a7c08e070742f0ac4.zip
Adding upstream version 0.70.1+ds1.upstream/0.70.1+ds1upstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'crates/resolver-tests')
-rw-r--r--crates/resolver-tests/Cargo.toml12
-rw-r--r--crates/resolver-tests/src/lib.rs991
-rw-r--r--crates/resolver-tests/tests/resolve.rs1504
3 files changed, 2507 insertions, 0 deletions
diff --git a/crates/resolver-tests/Cargo.toml b/crates/resolver-tests/Cargo.toml
new file mode 100644
index 0000000..cc50ad3
--- /dev/null
+++ b/crates/resolver-tests/Cargo.toml
@@ -0,0 +1,12 @@
+[package]
+name = "resolver-tests"
+version = "0.1.0"
+edition = "2018"
+
+[dependencies]
+cargo = { path = "../.." }
+cargo-util = { path = "../cargo-util" }
+is-terminal = "0.4.0"
+lazy_static = "1.3.0"
+proptest = "0.9.1"
+varisat = "0.2.1"
diff --git a/crates/resolver-tests/src/lib.rs b/crates/resolver-tests/src/lib.rs
new file mode 100644
index 0000000..3ffb6c5
--- /dev/null
+++ b/crates/resolver-tests/src/lib.rs
@@ -0,0 +1,991 @@
+#![allow(clippy::all)]
+
+use std::cell::RefCell;
+use std::cmp::PartialEq;
+use std::cmp::{max, min};
+use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet};
+use std::fmt;
+use std::fmt::Write;
+use std::rc::Rc;
+use std::task::Poll;
+use std::time::Instant;
+
+use cargo::core::dependency::DepKind;
+use cargo::core::resolver::{self, ResolveOpts, VersionPreferences};
+use cargo::core::source::{GitReference, QueryKind, SourceId};
+use cargo::core::Resolve;
+use cargo::core::{Dependency, PackageId, Registry, Summary};
+use cargo::util::{CargoResult, Config, Graph, IntoUrl};
+
+use proptest::collection::{btree_map, vec};
+use proptest::prelude::*;
+use proptest::sample::Index;
+use proptest::string::string_regex;
+use varisat::{self, ExtendFormula};
+
+pub fn resolve(deps: Vec<Dependency>, registry: &[Summary]) -> CargoResult<Vec<PackageId>> {
+ resolve_with_config(deps, registry, &Config::default().unwrap())
+}
+
+pub fn resolve_and_validated(
+ deps: Vec<Dependency>,
+ registry: &[Summary],
+ sat_resolve: Option<SatResolve>,
+) -> CargoResult<Vec<PackageId>> {
+ let resolve = resolve_with_config_raw(deps.clone(), registry, &Config::default().unwrap());
+
+ match resolve {
+ Err(e) => {
+ let sat_resolve = sat_resolve.unwrap_or_else(|| SatResolve::new(registry));
+ if sat_resolve.sat_resolve(&deps) {
+ panic!(
+ "the resolve err but the sat_resolve thinks this will work:\n{}",
+ sat_resolve.use_packages().unwrap()
+ );
+ }
+ Err(e)
+ }
+ Ok(resolve) => {
+ let mut stack = vec![pkg_id("root")];
+ let mut used = HashSet::new();
+ let mut links = HashSet::new();
+ while let Some(p) = stack.pop() {
+ assert!(resolve.contains(&p));
+ if used.insert(p) {
+ // in the tests all `links` crates end in `-sys`
+ if p.name().ends_with("-sys") {
+ assert!(links.insert(p.name()));
+ }
+ stack.extend(resolve.deps(p).map(|(dp, deps)| {
+ for d in deps {
+ assert!(d.matches_id(dp));
+ }
+ dp
+ }));
+ }
+ }
+ let out = resolve.sort();
+ assert_eq!(out.len(), used.len());
+
+ let mut pub_deps: HashMap<PackageId, HashSet<_>> = HashMap::new();
+ for &p in out.iter() {
+ // make the list of `p` public dependencies
+ let mut self_pub_dep = HashSet::new();
+ self_pub_dep.insert(p);
+ for (dp, deps) in resolve.deps(p) {
+ if deps.iter().any(|d| d.is_public()) {
+ self_pub_dep.extend(pub_deps[&dp].iter().cloned())
+ }
+ }
+ pub_deps.insert(p, self_pub_dep);
+
+ // check if `p` has a public dependencies conflicts
+ let seen_dep: BTreeSet<_> = resolve
+ .deps(p)
+ .flat_map(|(dp, _)| pub_deps[&dp].iter().cloned())
+ .collect();
+ let seen_dep: Vec<_> = seen_dep.iter().collect();
+ for a in seen_dep.windows(2) {
+ if a[0].name() == a[1].name() {
+ panic!(
+ "the package {:?} can publicly see {:?} and {:?}",
+ p, a[0], a[1]
+ )
+ }
+ }
+ }
+ let sat_resolve = sat_resolve.unwrap_or_else(|| SatResolve::new(registry));
+ if !sat_resolve.sat_is_valid_solution(&out) {
+ panic!(
+ "the sat_resolve err but the resolve thinks this will work:\n{:?}",
+ resolve
+ );
+ }
+ Ok(out)
+ }
+ }
+}
+
+pub fn resolve_with_config(
+ deps: Vec<Dependency>,
+ registry: &[Summary],
+ config: &Config,
+) -> CargoResult<Vec<PackageId>> {
+ let resolve = resolve_with_config_raw(deps, registry, config)?;
+ Ok(resolve.sort())
+}
+
+pub fn resolve_with_config_raw(
+ deps: Vec<Dependency>,
+ registry: &[Summary],
+ config: &Config,
+) -> CargoResult<Resolve> {
+ struct MyRegistry<'a> {
+ list: &'a [Summary],
+ used: HashSet<PackageId>,
+ }
+ impl<'a> Registry for MyRegistry<'a> {
+ fn query(
+ &mut self,
+ dep: &Dependency,
+ kind: QueryKind,
+ f: &mut dyn FnMut(Summary),
+ ) -> Poll<CargoResult<()>> {
+ for summary in self.list.iter() {
+ let matched = match kind {
+ QueryKind::Exact => dep.matches(summary),
+ QueryKind::Fuzzy => true,
+ };
+ if matched {
+ self.used.insert(summary.package_id());
+ f(summary.clone());
+ }
+ }
+ Poll::Ready(Ok(()))
+ }
+
+ fn describe_source(&self, _src: SourceId) -> String {
+ String::new()
+ }
+
+ fn is_replaced(&self, _src: SourceId) -> bool {
+ false
+ }
+
+ fn block_until_ready(&mut self) -> CargoResult<()> {
+ Ok(())
+ }
+ }
+ impl<'a> Drop for MyRegistry<'a> {
+ fn drop(&mut self) {
+ if std::thread::panicking() && self.list.len() != self.used.len() {
+ // we found a case that causes a panic and did not use all of the input.
+ // lets print the part of the input that was used for minimization.
+ println!(
+ "{:?}",
+ PrettyPrintRegistry(
+ self.list
+ .iter()
+ .filter(|s| { self.used.contains(&s.package_id()) })
+ .cloned()
+ .collect()
+ )
+ );
+ }
+ }
+ }
+ let mut registry = MyRegistry {
+ list: registry,
+ used: HashSet::new(),
+ };
+ let summary = Summary::new(
+ config,
+ pkg_id("root"),
+ deps,
+ &BTreeMap::new(),
+ None::<&String>,
+ )
+ .unwrap();
+ let opts = ResolveOpts::everything();
+ let start = Instant::now();
+ let resolve = resolver::resolve(
+ &[(summary, opts)],
+ &[],
+ &mut registry,
+ &VersionPreferences::default(),
+ Some(config),
+ true,
+ );
+
+ // The largest test in our suite takes less then 30 sec.
+ // So lets fail the test if we have ben running for two long.
+ assert!(start.elapsed().as_secs() < 60);
+ resolve
+}
+
+const fn num_bits<T>() -> usize {
+ std::mem::size_of::<T>() * 8
+}
+
+fn log_bits(x: usize) -> usize {
+ if x == 0 {
+ return 0;
+ }
+ assert!(x > 0);
+ (num_bits::<usize>() as u32 - x.leading_zeros()) as usize
+}
+
+fn sat_at_most_one(solver: &mut impl varisat::ExtendFormula, vars: &[varisat::Var]) {
+ if vars.len() <= 1 {
+ return;
+ } else if vars.len() == 2 {
+ solver.add_clause(&[vars[0].negative(), vars[1].negative()]);
+ return;
+ } else if vars.len() == 3 {
+ solver.add_clause(&[vars[0].negative(), vars[1].negative()]);
+ solver.add_clause(&[vars[0].negative(), vars[2].negative()]);
+ solver.add_clause(&[vars[1].negative(), vars[2].negative()]);
+ return;
+ }
+ // use the "Binary Encoding" from
+ // https://www.it.uu.se/research/group/astra/ModRef10/papers/Alan%20M.%20Frisch%20and%20Paul%20A.%20Giannoros.%20SAT%20Encodings%20of%20the%20At-Most-k%20Constraint%20-%20ModRef%202010.pdf
+ let bits: Vec<varisat::Var> = solver.new_var_iter(log_bits(vars.len())).collect();
+ for (i, p) in vars.iter().enumerate() {
+ for b in 0..bits.len() {
+ solver.add_clause(&[p.negative(), bits[b].lit(((1 << b) & i) > 0)]);
+ }
+ }
+}
+
+fn sat_at_most_one_by_key<K: std::hash::Hash + Eq>(
+ cnf: &mut impl varisat::ExtendFormula,
+ data: impl Iterator<Item = (K, varisat::Var)>,
+) -> HashMap<K, Vec<varisat::Var>> {
+ // no two packages with the same links set
+ let mut by_keys: HashMap<K, Vec<varisat::Var>> = HashMap::new();
+ for (p, v) in data {
+ by_keys.entry(p).or_default().push(v)
+ }
+ for key in by_keys.values() {
+ sat_at_most_one(cnf, key);
+ }
+ by_keys
+}
+
+/// Resolution can be reduced to the SAT problem. So this is an alternative implementation
+/// of the resolver that uses a SAT library for the hard work. This is intended to be easy to read,
+/// as compared to the real resolver.
+///
+/// For the subset of functionality that are currently made by `registry_strategy` this will,
+/// find a valid resolution if one exists. The big thing that the real resolver does,
+/// that this one does not do is work with features and optional dependencies.
+///
+/// The SAT library dose not optimize for the newer version,
+/// so the selected packages may not match the real resolver.
+#[derive(Clone)]
+pub struct SatResolve(Rc<RefCell<SatResolveInner>>);
+struct SatResolveInner {
+ solver: varisat::Solver<'static>,
+ var_for_is_packages_used: HashMap<PackageId, varisat::Var>,
+ by_name: HashMap<&'static str, Vec<PackageId>>,
+}
+
+impl SatResolve {
+ pub fn new(registry: &[Summary]) -> Self {
+ let mut cnf = varisat::CnfFormula::new();
+ let var_for_is_packages_used: HashMap<PackageId, varisat::Var> = registry
+ .iter()
+ .map(|s| (s.package_id(), cnf.new_var()))
+ .collect();
+
+ // no two packages with the same links set
+ sat_at_most_one_by_key(
+ &mut cnf,
+ registry
+ .iter()
+ .map(|s| (s.links(), var_for_is_packages_used[&s.package_id()]))
+ .filter(|(l, _)| l.is_some()),
+ );
+
+ // no two semver compatible versions of the same package
+ let by_activations_keys = sat_at_most_one_by_key(
+ &mut cnf,
+ var_for_is_packages_used
+ .iter()
+ .map(|(p, &v)| (p.as_activations_key(), v)),
+ );
+
+ let mut by_name: HashMap<&'static str, Vec<PackageId>> = HashMap::new();
+
+ for p in registry.iter() {
+ by_name
+ .entry(p.name().as_str())
+ .or_default()
+ .push(p.package_id())
+ }
+
+ let empty_vec = vec![];
+
+ let mut graph: Graph<PackageId, ()> = Graph::new();
+
+ let mut version_selected_for: HashMap<
+ PackageId,
+ HashMap<Dependency, HashMap<_, varisat::Var>>,
+ > = HashMap::new();
+ // active packages need each of there `deps` to be satisfied
+ for p in registry.iter() {
+ graph.add(p.package_id());
+ for dep in p.dependencies() {
+ // This can more easily be written as:
+ // !is_active(p) or one of the things that match dep is_active
+ // All the complexity, from here to the end, is to support public and private dependencies!
+ let mut by_key: HashMap<_, Vec<varisat::Lit>> = HashMap::new();
+ for &m in by_name
+ .get(dep.package_name().as_str())
+ .unwrap_or(&empty_vec)
+ .iter()
+ .filter(|&p| dep.matches_id(*p))
+ {
+ graph.link(p.package_id(), m);
+ by_key
+ .entry(m.as_activations_key())
+ .or_default()
+ .push(var_for_is_packages_used[&m].positive());
+ }
+ let keys: HashMap<_, _> = by_key.keys().map(|&k| (k, cnf.new_var())).collect();
+
+ // if `p` is active then we need to select one of the keys
+ let matches: Vec<_> = keys
+ .values()
+ .map(|v| v.positive())
+ .chain(Some(var_for_is_packages_used[&p.package_id()].negative()))
+ .collect();
+ cnf.add_clause(&matches);
+
+ // if a key is active then we need to select one of the versions
+ for (key, vars) in by_key.iter() {
+ let mut matches = vars.clone();
+ matches.push(keys[key].negative());
+ cnf.add_clause(&matches);
+ }
+
+ version_selected_for
+ .entry(p.package_id())
+ .or_default()
+ .insert(dep.clone(), keys);
+ }
+ }
+
+ let topological_order = graph.sort();
+
+ // we already ensure there is only one version for each `activations_key` so we can think of
+ // `publicly_exports` as being in terms of a set of `activations_key`s
+ let mut publicly_exports: HashMap<_, HashMap<_, varisat::Var>> = HashMap::new();
+
+ for &key in by_activations_keys.keys() {
+ // everything publicly depends on itself
+ let var = publicly_exports
+ .entry(key)
+ .or_default()
+ .entry(key)
+ .or_insert_with(|| cnf.new_var());
+ cnf.add_clause(&[var.positive()]);
+ }
+
+ // if a `dep` is public then `p` `publicly_exports` all the things that the selected version `publicly_exports`
+ for &p in topological_order.iter() {
+ if let Some(deps) = version_selected_for.get(&p) {
+ let mut p_exports = publicly_exports.remove(&p.as_activations_key()).unwrap();
+ for (_, versions) in deps.iter().filter(|(d, _)| d.is_public()) {
+ for (ver, sel) in versions {
+ for (&export_pid, &export_var) in publicly_exports[ver].iter() {
+ let our_var =
+ p_exports.entry(export_pid).or_insert_with(|| cnf.new_var());
+ cnf.add_clause(&[
+ sel.negative(),
+ export_var.negative(),
+ our_var.positive(),
+ ]);
+ }
+ }
+ }
+ publicly_exports.insert(p.as_activations_key(), p_exports);
+ }
+ }
+
+ // we already ensure there is only one version for each `activations_key` so we can think of
+ // `can_see` as being in terms of a set of `activations_key`s
+ // and if `p` `publicly_exports` `export` then it `can_see` `export`
+ let mut can_see: HashMap<_, HashMap<_, varisat::Var>> = HashMap::new();
+
+ // if `p` has a `dep` that selected `ver` then it `can_see` all the things that the selected version `publicly_exports`
+ for (&p, deps) in version_selected_for.iter() {
+ let p_can_see = can_see.entry(p).or_default();
+ for (_, versions) in deps.iter() {
+ for (&ver, sel) in versions {
+ for (&export_pid, &export_var) in publicly_exports[&ver].iter() {
+ let our_var = p_can_see.entry(export_pid).or_insert_with(|| cnf.new_var());
+ cnf.add_clause(&[
+ sel.negative(),
+ export_var.negative(),
+ our_var.positive(),
+ ]);
+ }
+ }
+ }
+ }
+
+ // a package `can_see` only one version by each name
+ for (_, see) in can_see.iter() {
+ sat_at_most_one_by_key(&mut cnf, see.iter().map(|((name, _, _), &v)| (name, v)));
+ }
+ let mut solver = varisat::Solver::new();
+ solver.add_formula(&cnf);
+
+ // We dont need to `solve` now. We know that "use nothing" will satisfy all the clauses so far.
+ // But things run faster if we let it spend some time figuring out how the constraints interact before we add assumptions.
+ solver
+ .solve()
+ .expect("docs say it can't error in default config");
+ SatResolve(Rc::new(RefCell::new(SatResolveInner {
+ solver,
+ var_for_is_packages_used,
+ by_name,
+ })))
+ }
+ pub fn sat_resolve(&self, deps: &[Dependency]) -> bool {
+ let mut s = self.0.borrow_mut();
+ let mut assumption = vec![];
+ let mut this_call = None;
+
+ // the starting `deps` need to be satisfied
+ for dep in deps.iter() {
+ let empty_vec = vec![];
+ let matches: Vec<varisat::Lit> = s
+ .by_name
+ .get(dep.package_name().as_str())
+ .unwrap_or(&empty_vec)
+ .iter()
+ .filter(|&p| dep.matches_id(*p))
+ .map(|p| s.var_for_is_packages_used[p].positive())
+ .collect();
+ if matches.is_empty() {
+ return false;
+ } else if matches.len() == 1 {
+ assumption.extend_from_slice(&matches)
+ } else {
+ if this_call.is_none() {
+ let new_var = s.solver.new_var();
+ this_call = Some(new_var);
+ assumption.push(new_var.positive());
+ }
+ let mut matches = matches;
+ matches.push(this_call.unwrap().negative());
+ s.solver.add_clause(&matches);
+ }
+ }
+
+ s.solver.assume(&assumption);
+
+ s.solver
+ .solve()
+ .expect("docs say it can't error in default config")
+ }
+ pub fn sat_is_valid_solution(&self, pids: &[PackageId]) -> bool {
+ let mut s = self.0.borrow_mut();
+ for p in pids {
+ if p.name().as_str() != "root" && !s.var_for_is_packages_used.contains_key(p) {
+ return false;
+ }
+ }
+ let assumption: Vec<_> = s
+ .var_for_is_packages_used
+ .iter()
+ .map(|(p, v)| v.lit(pids.contains(p)))
+ .collect();
+
+ s.solver.assume(&assumption);
+
+ s.solver
+ .solve()
+ .expect("docs say it can't error in default config")
+ }
+ fn use_packages(&self) -> Option<String> {
+ self.0.borrow().solver.model().map(|lits| {
+ let lits: HashSet<_> = lits
+ .iter()
+ .filter(|l| l.is_positive())
+ .map(|l| l.var())
+ .collect();
+ let mut out = String::new();
+ out.push_str("used:\n");
+ for (p, v) in self.0.borrow().var_for_is_packages_used.iter() {
+ if lits.contains(v) {
+ writeln!(&mut out, " {}", p).unwrap();
+ }
+ }
+ out
+ })
+ }
+}
+
+pub trait ToDep {
+ fn to_dep(self) -> Dependency;
+}
+
+impl ToDep for &'static str {
+ fn to_dep(self) -> Dependency {
+ Dependency::parse(self, Some("1.0.0"), registry_loc()).unwrap()
+ }
+}
+
+impl ToDep for Dependency {
+ fn to_dep(self) -> Dependency {
+ self
+ }
+}
+
+pub trait ToPkgId {
+ fn to_pkgid(&self) -> PackageId;
+}
+
+impl ToPkgId for PackageId {
+ fn to_pkgid(&self) -> PackageId {
+ *self
+ }
+}
+
+impl<'a> ToPkgId for &'a str {
+ fn to_pkgid(&self) -> PackageId {
+ PackageId::new(*self, "1.0.0", registry_loc()).unwrap()
+ }
+}
+
+impl<T: AsRef<str>, U: AsRef<str>> ToPkgId for (T, U) {
+ fn to_pkgid(&self) -> PackageId {
+ let (name, vers) = self;
+ PackageId::new(name.as_ref(), vers.as_ref(), registry_loc()).unwrap()
+ }
+}
+
+#[macro_export]
+macro_rules! pkg {
+ ($pkgid:expr => [$($deps:expr),+ $(,)* ]) => ({
+ let d: Vec<Dependency> = vec![$($deps.to_dep()),+];
+ $crate::pkg_dep($pkgid, d)
+ });
+
+ ($pkgid:expr) => ({
+ $crate::pkg($pkgid)
+ })
+}
+
+fn registry_loc() -> SourceId {
+ lazy_static::lazy_static! {
+ static ref EXAMPLE_DOT_COM: SourceId =
+ SourceId::for_registry(&"https://example.com".into_url().unwrap()).unwrap();
+ }
+ *EXAMPLE_DOT_COM
+}
+
+pub fn pkg<T: ToPkgId>(name: T) -> Summary {
+ pkg_dep(name, Vec::new())
+}
+
+pub fn pkg_dep<T: ToPkgId>(name: T, dep: Vec<Dependency>) -> Summary {
+ let pkgid = name.to_pkgid();
+ let link = if pkgid.name().ends_with("-sys") {
+ Some(pkgid.name().as_str())
+ } else {
+ None
+ };
+ Summary::new(
+ &Config::default().unwrap(),
+ name.to_pkgid(),
+ dep,
+ &BTreeMap::new(),
+ link,
+ )
+ .unwrap()
+}
+
+pub fn pkg_id(name: &str) -> PackageId {
+ PackageId::new(name, "1.0.0", registry_loc()).unwrap()
+}
+
+fn pkg_id_loc(name: &str, loc: &str) -> PackageId {
+ let remote = loc.into_url();
+ let master = GitReference::Branch("master".to_string());
+ let source_id = SourceId::for_git(&remote.unwrap(), master).unwrap();
+
+ PackageId::new(name, "1.0.0", source_id).unwrap()
+}
+
+pub fn pkg_loc(name: &str, loc: &str) -> Summary {
+ let link = if name.ends_with("-sys") {
+ Some(name)
+ } else {
+ None
+ };
+ Summary::new(
+ &Config::default().unwrap(),
+ pkg_id_loc(name, loc),
+ Vec::new(),
+ &BTreeMap::new(),
+ link,
+ )
+ .unwrap()
+}
+
+pub fn remove_dep(sum: &Summary, ind: usize) -> Summary {
+ let mut deps = sum.dependencies().to_vec();
+ deps.remove(ind);
+ // note: more things will need to be copied over in the future, but it works for now.
+ Summary::new(
+ &Config::default().unwrap(),
+ sum.package_id(),
+ deps,
+ &BTreeMap::new(),
+ sum.links().map(|a| a.as_str()),
+ )
+ .unwrap()
+}
+
+pub fn dep(name: &str) -> Dependency {
+ dep_req(name, "*")
+}
+pub fn dep_req(name: &str, req: &str) -> Dependency {
+ Dependency::parse(name, Some(req), registry_loc()).unwrap()
+}
+pub fn dep_req_kind(name: &str, req: &str, kind: DepKind, public: bool) -> Dependency {
+ let mut dep = dep_req(name, req);
+ dep.set_kind(kind);
+ dep.set_public(public);
+ dep
+}
+
+pub fn dep_loc(name: &str, location: &str) -> Dependency {
+ let url = location.into_url().unwrap();
+ let master = GitReference::Branch("master".to_string());
+ let source_id = SourceId::for_git(&url, master).unwrap();
+ Dependency::parse(name, Some("1.0.0"), source_id).unwrap()
+}
+pub fn dep_kind(name: &str, kind: DepKind) -> Dependency {
+ dep(name).set_kind(kind).clone()
+}
+
+pub fn registry(pkgs: Vec<Summary>) -> Vec<Summary> {
+ pkgs
+}
+
+pub fn names<P: ToPkgId>(names: &[P]) -> Vec<PackageId> {
+ names.iter().map(|name| name.to_pkgid()).collect()
+}
+
+pub fn loc_names(names: &[(&'static str, &'static str)]) -> Vec<PackageId> {
+ names
+ .iter()
+ .map(|&(name, loc)| pkg_id_loc(name, loc))
+ .collect()
+}
+
+/// By default `Summary` and `Dependency` have a very verbose `Debug` representation.
+/// This replaces with a representation that uses constructors from this file.
+///
+/// If `registry_strategy` is improved to modify more fields
+/// then this needs to update to display the corresponding constructor.
+pub struct PrettyPrintRegistry(pub Vec<Summary>);
+
+impl fmt::Debug for PrettyPrintRegistry {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "vec![")?;
+ for s in &self.0 {
+ if s.dependencies().is_empty() {
+ write!(f, "pkg!((\"{}\", \"{}\")),", s.name(), s.version())?;
+ } else {
+ write!(f, "pkg!((\"{}\", \"{}\") => [", s.name(), s.version())?;
+ for d in s.dependencies() {
+ if d.kind() == DepKind::Normal
+ && &d.version_req().to_string() == "*"
+ && !d.is_public()
+ {
+ write!(f, "dep(\"{}\"),", d.name_in_toml())?;
+ } else if d.kind() == DepKind::Normal && !d.is_public() {
+ write!(
+ f,
+ "dep_req(\"{}\", \"{}\"),",
+ d.name_in_toml(),
+ d.version_req()
+ )?;
+ } else {
+ write!(
+ f,
+ "dep_req_kind(\"{}\", \"{}\", {}, {}),",
+ d.name_in_toml(),
+ d.version_req(),
+ match d.kind() {
+ DepKind::Development => "DepKind::Development",
+ DepKind::Build => "DepKind::Build",
+ DepKind::Normal => "DepKind::Normal",
+ },
+ d.is_public()
+ )?;
+ }
+ }
+ write!(f, "]),")?;
+ }
+ }
+ write!(f, "]")
+ }
+}
+
+#[test]
+fn meta_test_deep_pretty_print_registry() {
+ assert_eq!(
+ &format!(
+ "{:?}",
+ PrettyPrintRegistry(vec![
+ pkg!(("foo", "1.0.1") => [dep_req("bar", "1")]),
+ pkg!(("foo", "1.0.0") => [dep_req("bar", "2")]),
+ pkg!(("foo", "2.0.0") => [dep_req("bar", "*")]),
+ pkg!(("bar", "1.0.0") => [dep_req("baz", "=1.0.2"),
+ dep_req("other", "1")]),
+ pkg!(("bar", "2.0.0") => [dep_req("baz", "=1.0.1")]),
+ pkg!(("baz", "1.0.2") => [dep_req("other", "2")]),
+ pkg!(("baz", "1.0.1")),
+ pkg!(("cat", "1.0.2") => [dep_req_kind("other", "2", DepKind::Build, false)]),
+ pkg!(("cat", "1.0.3") => [dep_req_kind("other", "2", DepKind::Development, false)]),
+ pkg!(("dep_req", "1.0.0")),
+ pkg!(("dep_req", "2.0.0")),
+ ])
+ ),
+ "vec![pkg!((\"foo\", \"1.0.1\") => [dep_req(\"bar\", \"^1\"),]),\
+ pkg!((\"foo\", \"1.0.0\") => [dep_req(\"bar\", \"^2\"),]),\
+ pkg!((\"foo\", \"2.0.0\") => [dep(\"bar\"),]),\
+ pkg!((\"bar\", \"1.0.0\") => [dep_req(\"baz\", \"=1.0.2\"),dep_req(\"other\", \"^1\"),]),\
+ pkg!((\"bar\", \"2.0.0\") => [dep_req(\"baz\", \"=1.0.1\"),]),\
+ pkg!((\"baz\", \"1.0.2\") => [dep_req(\"other\", \"^2\"),]),\
+ pkg!((\"baz\", \"1.0.1\")),\
+ pkg!((\"cat\", \"1.0.2\") => [dep_req_kind(\"other\", \"^2\", DepKind::Build, false),]),\
+ pkg!((\"cat\", \"1.0.3\") => [dep_req_kind(\"other\", \"^2\", DepKind::Development, false),]),\
+ pkg!((\"dep_req\", \"1.0.0\")),\
+ pkg!((\"dep_req\", \"2.0.0\")),]"
+ )
+}
+
+/// This generates a random registry index.
+/// Unlike vec((Name, Ver, vec((Name, VerRq), ..), ..)
+/// This strategy has a high probability of having valid dependencies
+pub fn registry_strategy(
+ max_crates: usize,
+ max_versions: usize,
+ shrinkage: usize,
+) -> impl Strategy<Value = PrettyPrintRegistry> {
+ let name = string_regex("[A-Za-z][A-Za-z0-9_-]*(-sys)?").unwrap();
+
+ let raw_version = ..max_versions.pow(3);
+ let version_from_raw = move |r: usize| {
+ let major = ((r / max_versions) / max_versions) % max_versions;
+ let minor = (r / max_versions) % max_versions;
+ let patch = r % max_versions;
+ format!("{}.{}.{}", major, minor, patch)
+ };
+
+ // If this is false then the crate will depend on the nonexistent "bad"
+ // instead of the complex set we generated for it.
+ let allow_deps = prop::bool::weighted(0.99);
+
+ let list_of_versions =
+ btree_map(raw_version, allow_deps, 1..=max_versions).prop_map(move |ver| {
+ ver.into_iter()
+ .map(|a| (version_from_raw(a.0), a.1))
+ .collect::<Vec<_>>()
+ });
+
+ let list_of_crates_with_versions =
+ btree_map(name, list_of_versions, 1..=max_crates).prop_map(|mut vers| {
+ // root is the name of the thing being compiled
+ // so it would be confusing to have it in the index
+ vers.remove("root");
+ // bad is a name reserved for a dep that won't work
+ vers.remove("bad");
+ vers
+ });
+
+ // each version of each crate can depend on each crate smaller then it.
+ // In theory shrinkage should be 2, but in practice we get better trees with a larger value.
+ let max_deps = max_versions * (max_crates * (max_crates - 1)) / shrinkage;
+
+ let raw_version_range = (any::<Index>(), any::<Index>());
+ let raw_dependency = (
+ any::<Index>(),
+ any::<Index>(),
+ raw_version_range,
+ 0..=1,
+ Just(false),
+ // TODO: ^ this needs to be set back to `any::<bool>()` and work before public & private dependencies can stabilize
+ );
+
+ fn order_index(a: Index, b: Index, size: usize) -> (usize, usize) {
+ let (a, b) = (a.index(size), b.index(size));
+ (min(a, b), max(a, b))
+ }
+
+ let list_of_raw_dependency = vec(raw_dependency, ..=max_deps);
+
+ // By default a package depends only on other packages that have a smaller name,
+ // this helps make sure that all things in the resulting index are DAGs.
+ // If this is true then the DAG is maintained with grater instead.
+ let reverse_alphabetical = any::<bool>().no_shrink();
+
+ (
+ list_of_crates_with_versions,
+ list_of_raw_dependency,
+ reverse_alphabetical,
+ )
+ .prop_map(
+ |(crate_vers_by_name, raw_dependencies, reverse_alphabetical)| {
+ let list_of_pkgid: Vec<_> = crate_vers_by_name
+ .iter()
+ .flat_map(|(name, vers)| vers.iter().map(move |x| ((name.as_str(), &x.0), x.1)))
+ .collect();
+ let len_all_pkgid = list_of_pkgid.len();
+ let mut dependency_by_pkgid = vec![vec![]; len_all_pkgid];
+ for (a, b, (c, d), k, p) in raw_dependencies {
+ let (a, b) = order_index(a, b, len_all_pkgid);
+ let (a, b) = if reverse_alphabetical { (b, a) } else { (a, b) };
+ let ((dep_name, _), _) = list_of_pkgid[a];
+ if (list_of_pkgid[b].0).0 == dep_name {
+ continue;
+ }
+ let s = &crate_vers_by_name[dep_name];
+ let s_last_index = s.len() - 1;
+ let (c, d) = order_index(c, d, s.len());
+
+ dependency_by_pkgid[b].push(dep_req_kind(
+ dep_name,
+ &if c == 0 && d == s_last_index {
+ "*".to_string()
+ } else if c == 0 {
+ format!("<={}", s[d].0)
+ } else if d == s_last_index {
+ format!(">={}", s[c].0)
+ } else if c == d {
+ format!("={}", s[c].0)
+ } else {
+ format!(">={}, <={}", s[c].0, s[d].0)
+ },
+ match k {
+ 0 => DepKind::Normal,
+ 1 => DepKind::Build,
+ // => DepKind::Development, // Development has no impact so don't gen
+ _ => panic!("bad index for DepKind"),
+ },
+ p && k == 0,
+ ))
+ }
+
+ let mut out: Vec<Summary> = list_of_pkgid
+ .into_iter()
+ .zip(dependency_by_pkgid.into_iter())
+ .map(|(((name, ver), allow_deps), deps)| {
+ pkg_dep(
+ (name, ver).to_pkgid(),
+ if !allow_deps {
+ vec![dep_req("bad", "*")]
+ } else {
+ let mut deps = deps;
+ deps.sort_by_key(|d| d.name_in_toml());
+ deps.dedup_by_key(|d| d.name_in_toml());
+ deps
+ },
+ )
+ })
+ .collect();
+
+ if reverse_alphabetical {
+ // make sure the complicated cases are at the end
+ out.reverse();
+ }
+
+ PrettyPrintRegistry(out)
+ },
+ )
+}
+
+/// This test is to test the generator to ensure
+/// that it makes registries with large dependency trees
+#[test]
+fn meta_test_deep_trees_from_strategy() {
+ use proptest::strategy::ValueTree;
+ use proptest::test_runner::TestRunner;
+
+ let mut dis = [0; 21];
+
+ let strategy = registry_strategy(50, 20, 60);
+ let mut test_runner = TestRunner::deterministic();
+ for _ in 0..128 {
+ let PrettyPrintRegistry(input) = strategy
+ .new_tree(&mut TestRunner::new_with_rng(
+ Default::default(),
+ test_runner.new_rng(),
+ ))
+ .unwrap()
+ .current();
+ let reg = registry(input.clone());
+ for this in input.iter().rev().take(10) {
+ let res = resolve(
+ vec![dep_req(&this.name(), &format!("={}", this.version()))],
+ &reg,
+ );
+ dis[res
+ .as_ref()
+ .map(|x| min(x.len(), dis.len()) - 1)
+ .unwrap_or(0)] += 1;
+ if dis.iter().all(|&x| x > 0) {
+ return;
+ }
+ }
+ }
+
+ panic!(
+ "In 1280 tries we did not see a wide enough distribution of dependency trees! dis: {:?}",
+ dis
+ );
+}
+
+/// This test is to test the generator to ensure
+/// that it makes registries that include multiple versions of the same library
+#[test]
+fn meta_test_multiple_versions_strategy() {
+ use proptest::strategy::ValueTree;
+ use proptest::test_runner::TestRunner;
+
+ let mut dis = [0; 10];
+
+ let strategy = registry_strategy(50, 20, 60);
+ let mut test_runner = TestRunner::deterministic();
+ for _ in 0..128 {
+ let PrettyPrintRegistry(input) = strategy
+ .new_tree(&mut TestRunner::new_with_rng(
+ Default::default(),
+ test_runner.new_rng(),
+ ))
+ .unwrap()
+ .current();
+ let reg = registry(input.clone());
+ for this in input.iter().rev().take(10) {
+ let res = resolve(
+ vec![dep_req(&this.name(), &format!("={}", this.version()))],
+ &reg,
+ );
+ if let Ok(mut res) = res {
+ let res_len = res.len();
+ res.sort_by_key(|s| s.name());
+ res.dedup_by_key(|s| s.name());
+ dis[min(res_len - res.len(), dis.len() - 1)] += 1;
+ }
+ if dis.iter().all(|&x| x > 0) {
+ return;
+ }
+ }
+ }
+ panic!(
+ "In 1280 tries we did not see a wide enough distribution of multiple versions of the same library! dis: {:?}",
+ dis
+ );
+}
+
+/// Assert `xs` contains `elems`
+#[track_caller]
+pub fn assert_contains<A: PartialEq>(xs: &[A], elems: &[A]) {
+ for elem in elems {
+ assert!(xs.contains(elem));
+ }
+}
+
+#[track_caller]
+pub fn assert_same<A: PartialEq>(a: &[A], b: &[A]) {
+ assert_eq!(a.len(), b.len());
+ assert_contains(b, a);
+}
diff --git a/crates/resolver-tests/tests/resolve.rs b/crates/resolver-tests/tests/resolve.rs
new file mode 100644
index 0000000..9d8f25a
--- /dev/null
+++ b/crates/resolver-tests/tests/resolve.rs
@@ -0,0 +1,1504 @@
+use cargo::core::dependency::DepKind;
+use cargo::core::Dependency;
+use cargo::util::Config;
+use cargo_util::is_ci;
+
+use resolver_tests::{
+ assert_contains, assert_same, dep, dep_kind, dep_loc, dep_req, dep_req_kind, loc_names, names,
+ pkg, pkg_id, pkg_loc, registry, registry_strategy, remove_dep, resolve, resolve_and_validated,
+ resolve_with_config, PrettyPrintRegistry, SatResolve, ToDep, ToPkgId,
+};
+
+use proptest::prelude::*;
+
+// NOTE: proptest is a form of fuzz testing. It generates random input and makes sure that
+// certain universal truths are upheld. Therefore, it can pass when there is a problem,
+// but if it fails then there really is something wrong. When testing something as
+// complicated as the resolver, the problems can be very subtle and hard to generate.
+// We have had a history of these tests only failing on PRs long after a bug is introduced.
+// If you have one of these test fail please report it on #6258,
+// and if you did not change the resolver then feel free to retry without concern.
+proptest! {
+ #![proptest_config(ProptestConfig {
+ max_shrink_iters:
+ if is_ci() || !is_terminal::IsTerminal::is_terminal(&std::io::stderr()){
+ // This attempts to make sure that CI will fail fast,
+ 0
+ } else {
+ // but that local builds will give a small clear test case.
+ u32::MAX
+ },
+ result_cache: prop::test_runner::basic_result_cache,
+ .. ProptestConfig::default()
+ })]
+
+ /// NOTE: if you think this test has failed spuriously see the note at the top of this macro.
+ #[test]
+ fn prop_passes_validation(
+ PrettyPrintRegistry(input) in registry_strategy(50, 20, 60)
+ ) {
+ let reg = registry(input.clone());
+ let sat_resolve = SatResolve::new(&reg);
+ // there is only a small chance that any one
+ // crate will be interesting.
+ // So we try some of the most complicated.
+ for this in input.iter().rev().take(20) {
+ let _ = resolve_and_validated(
+ vec![dep_req(&this.name(), &format!("={}", this.version()))],
+ &reg,
+ Some(sat_resolve.clone()),
+ );
+ }
+ }
+
+ /// NOTE: if you think this test has failed spuriously see the note at the top of this macro.
+ #[test]
+ fn prop_minimum_version_errors_the_same(
+ PrettyPrintRegistry(input) in registry_strategy(50, 20, 60)
+ ) {
+ let mut config = Config::default().unwrap();
+ config.nightly_features_allowed = true;
+ config
+ .configure(
+ 1,
+ false,
+ None,
+ false,
+ false,
+ false,
+ &None,
+ &["minimal-versions".to_string()],
+ &[],
+ )
+ .unwrap();
+
+ let reg = registry(input.clone());
+ // there is only a small chance that any one
+ // crate will be interesting.
+ // So we try some of the most complicated.
+ for this in input.iter().rev().take(10) {
+ // minimal-versions change what order the candidates
+ // are tried but not the existence of a solution
+ let res = resolve(
+ vec![dep_req(&this.name(), &format!("={}", this.version()))],
+ &reg,
+ );
+
+ let mres = resolve_with_config(
+ vec![dep_req(&this.name(), &format!("={}", this.version()))],
+ &reg,
+ &config,
+ );
+
+ prop_assert_eq!(
+ res.is_ok(),
+ mres.is_ok(),
+ "minimal-versions and regular resolver disagree about whether `{} = \"={}\"` can resolve",
+ this.name(),
+ this.version()
+ )
+ }
+ }
+
+ /// NOTE: if you think this test has failed spuriously see the note at the top of this macro.
+ #[test]
+ fn prop_removing_a_dep_cant_break(
+ PrettyPrintRegistry(input) in registry_strategy(50, 20, 60),
+ indexes_to_remove in prop::collection::vec((any::<prop::sample::Index>(), any::<prop::sample::Index>()), ..10)
+ ) {
+ let reg = registry(input.clone());
+ let mut removed_input = input.clone();
+ for (summary_idx, dep_idx) in indexes_to_remove {
+ if !removed_input.is_empty() {
+ let summary_idx = summary_idx.index(removed_input.len());
+ let deps = removed_input[summary_idx].dependencies();
+ if !deps.is_empty() {
+ let new = remove_dep(&removed_input[summary_idx], dep_idx.index(deps.len()));
+ removed_input[summary_idx] = new;
+ }
+ }
+ }
+ let removed_reg = registry(removed_input);
+ // there is only a small chance that any one
+ // crate will be interesting.
+ // So we try some of the most complicated.
+ for this in input.iter().rev().take(10) {
+ if resolve(
+ vec![dep_req(&this.name(), &format!("={}", this.version()))],
+ &reg,
+ ).is_ok() {
+ prop_assert!(
+ resolve(
+ vec![dep_req(&this.name(), &format!("={}", this.version()))],
+ &removed_reg,
+ ).is_ok(),
+ "full index worked for `{} = \"={}\"` but removing some deps broke it!",
+ this.name(),
+ this.version(),
+ )
+ }
+ }
+ }
+
+ /// NOTE: if you think this test has failed spuriously see the note at the top of this macro.
+ #[test]
+ fn prop_limited_independence_of_irrelevant_alternatives(
+ PrettyPrintRegistry(input) in registry_strategy(50, 20, 60),
+ indexes_to_unpublish in prop::collection::vec(any::<prop::sample::Index>(), ..10)
+ ) {
+ let reg = registry(input.clone());
+ // there is only a small chance that any one
+ // crate will be interesting.
+ // So we try some of the most complicated.
+ for this in input.iter().rev().take(10) {
+ let res = resolve(
+ vec![dep_req(&this.name(), &format!("={}", this.version()))],
+ &reg,
+ );
+
+ match res {
+ Ok(r) => {
+ // If resolution was successful, then unpublishing a version of a crate
+ // that was not selected should not change that.
+ let not_selected: Vec<_> = input
+ .iter()
+ .cloned()
+ .filter(|x| !r.contains(&x.package_id()))
+ .collect();
+ if !not_selected.is_empty() {
+ let indexes_to_unpublish: Vec<_> = indexes_to_unpublish.iter().map(|x| x.get(&not_selected)).collect();
+
+ let new_reg = registry(
+ input
+ .iter()
+ .cloned()
+ .filter(|x| !indexes_to_unpublish.contains(&x))
+ .collect(),
+ );
+
+ let res = resolve(
+ vec![dep_req(&this.name(), &format!("={}", this.version()))],
+ &new_reg,
+ );
+
+ // Note: that we can not assert that the two `res` are identical
+ // as the resolver does depend on irrelevant alternatives.
+ // It uses how constrained a dependency requirement is
+ // to determine what order to evaluate requirements.
+
+ prop_assert!(
+ res.is_ok(),
+ "unpublishing {:?} stopped `{} = \"={}\"` from working",
+ indexes_to_unpublish.iter().map(|x| x.package_id()).collect::<Vec<_>>(),
+ this.name(),
+ this.version()
+ )
+ }
+ }
+
+ Err(_) => {
+ // If resolution was unsuccessful, then it should stay unsuccessful
+ // even if any version of a crate is unpublished.
+ let indexes_to_unpublish: Vec<_> = indexes_to_unpublish.iter().map(|x| x.get(&input)).collect();
+
+ let new_reg = registry(
+ input
+ .iter()
+ .cloned()
+ .filter(|x| !indexes_to_unpublish.contains(&x))
+ .collect(),
+ );
+
+ let res = resolve(
+ vec![dep_req(&this.name(), &format!("={}", this.version()))],
+ &new_reg,
+ );
+
+ prop_assert!(
+ res.is_err(),
+ "full index did not work for `{} = \"={}\"` but unpublishing {:?} fixed it!",
+ this.name(),
+ this.version(),
+ indexes_to_unpublish.iter().map(|x| x.package_id()).collect::<Vec<_>>(),
+ )
+ }
+ }
+ }
+ }
+}
+
+#[test]
+#[should_panic(expected = "pub dep")] // The error handling is not yet implemented.
+fn pub_fail() {
+ let input = vec![
+ pkg!(("a", "0.0.4")),
+ pkg!(("a", "0.0.5")),
+ pkg!(("e", "0.0.6") => [dep_req_kind("a", "<= 0.0.4", DepKind::Normal, true),]),
+ pkg!(("kB", "0.0.3") => [dep_req("a", ">= 0.0.5"),dep("e"),]),
+ ];
+ let reg = registry(input);
+ assert!(resolve_and_validated(vec![dep("kB")], &reg, None).is_err());
+}
+
+#[test]
+fn basic_public_dependency() {
+ let reg = registry(vec![
+ pkg!(("A", "0.1.0")),
+ pkg!(("A", "0.2.0")),
+ pkg!("B" => [dep_req_kind("A", "0.1", DepKind::Normal, true)]),
+ pkg!("C" => [dep("A"), dep("B")]),
+ ]);
+
+ let res = resolve_and_validated(vec![dep("C")], &reg, None).unwrap();
+ assert_same(
+ &res,
+ &names(&[
+ ("root", "1.0.0"),
+ ("C", "1.0.0"),
+ ("B", "1.0.0"),
+ ("A", "0.1.0"),
+ ]),
+ );
+}
+
+#[test]
+fn public_dependency_filling_in() {
+ // The resolver has an optimization where if a candidate to resolve a dependency
+ // has already bean activated then we skip looking at the candidates dependencies.
+ // However, we have to be careful as the new path may make pub dependencies invalid.
+
+ // Triggering this case requires dependencies to be resolved in a specific order.
+ // Fuzzing found this unintuitive case, that triggers this unfortunate order of operations:
+ // 1. `d`'s dep on `c` is resolved
+ // 2. `d`'s dep on `a` is resolved with `0.1.1`
+ // 3. `c`'s dep on `b` is resolved with `0.0.2`
+ // 4. `b`'s dep on `a` is resolved with `0.0.6` no pub dev conflict as `b` is private to `c`
+ // 5. `d`'s dep on `b` is resolved with `0.0.2` triggering the optimization.
+ // Do we notice that `d` has a pub dep conflict on `a`? Lets try it and see.
+ let reg = registry(vec![
+ pkg!(("a", "0.0.6")),
+ pkg!(("a", "0.1.1")),
+ pkg!(("b", "0.0.0") => [dep("bad")]),
+ pkg!(("b", "0.0.1") => [dep("bad")]),
+ pkg!(("b", "0.0.2") => [dep_req_kind("a", "=0.0.6", DepKind::Normal, true)]),
+ pkg!("c" => [dep_req("b", ">=0.0.1")]),
+ pkg!("d" => [dep("c"), dep("a"), dep("b")]),
+ ]);
+
+ let res = resolve_and_validated(vec![dep("d")], &reg, None).unwrap();
+ assert_same(
+ &res,
+ &names(&[
+ ("root", "1.0.0"),
+ ("d", "1.0.0"),
+ ("c", "1.0.0"),
+ ("b", "0.0.2"),
+ ("a", "0.0.6"),
+ ]),
+ );
+}
+
+#[test]
+fn public_dependency_filling_in_and_update() {
+ // The resolver has an optimization where if a candidate to resolve a dependency
+ // has already bean activated then we skip looking at the candidates dependencies.
+ // However, we have to be careful as the new path may make pub dependencies invalid.
+
+ // Triggering this case requires dependencies to be resolved in a specific order.
+ // Fuzzing found this unintuitive case, that triggers this unfortunate order of operations:
+ // 1. `D`'s dep on `B` is resolved
+ // 2. `D`'s dep on `C` is resolved
+ // 3. `B`'s dep on `A` is resolved with `0.0.0`
+ // 4. `C`'s dep on `B` triggering the optimization.
+ // So did we add `A 0.0.0` to the deps `C` can see?
+ // Or are we going to resolve `C`'s dep on `A` with `0.0.2`?
+ // Lets try it and see.
+ let reg = registry(vec![
+ pkg!(("A", "0.0.0")),
+ pkg!(("A", "0.0.2")),
+ pkg!("B" => [dep_req_kind("A", "=0.0.0", DepKind::Normal, true),]),
+ pkg!("C" => [dep("A"),dep("B")]),
+ pkg!("D" => [dep("B"),dep("C")]),
+ ]);
+ let res = resolve_and_validated(vec![dep("D")], &reg, None).unwrap();
+ assert_same(
+ &res,
+ &names(&[
+ ("root", "1.0.0"),
+ ("D", "1.0.0"),
+ ("C", "1.0.0"),
+ ("B", "1.0.0"),
+ ("A", "0.0.0"),
+ ]),
+ );
+}
+
+#[test]
+fn public_dependency_skipping() {
+ // When backtracking due to a failed dependency, if Cargo is
+ // trying to be clever and skip irrelevant dependencies, care must
+ // the effects of pub dep must be accounted for.
+ let input = vec![
+ pkg!(("a", "0.2.0")),
+ pkg!(("a", "2.0.0")),
+ pkg!(("b", "0.0.0") => [dep("bad")]),
+ pkg!(("b", "0.2.1") => [dep_req_kind("a", "0.2.0", DepKind::Normal, true)]),
+ pkg!("c" => [dep("a"),dep("b")]),
+ ];
+ let reg = registry(input);
+
+ resolve_and_validated(vec![dep("c")], &reg, None).unwrap();
+}
+
+#[test]
+fn public_dependency_skipping_in_backtracking() {
+ // When backtracking due to a failed dependency, if Cargo is
+ // trying to be clever and skip irrelevant dependencies, care must
+ // the effects of pub dep must be accounted for.
+ let input = vec![
+ pkg!(("A", "0.0.0") => [dep("bad")]),
+ pkg!(("A", "0.0.1") => [dep("bad")]),
+ pkg!(("A", "0.0.2") => [dep("bad")]),
+ pkg!(("A", "0.0.3") => [dep("bad")]),
+ pkg!(("A", "0.0.4")),
+ pkg!(("A", "0.0.5")),
+ pkg!("B" => [dep_req_kind("A", ">= 0.0.3", DepKind::Normal, true)]),
+ pkg!("C" => [dep_req("A", "<= 0.0.4"), dep("B")]),
+ ];
+ let reg = registry(input);
+
+ resolve_and_validated(vec![dep("C")], &reg, None).unwrap();
+}
+
+#[test]
+fn public_sat_topological_order() {
+ let input = vec![
+ pkg!(("a", "0.0.1")),
+ pkg!(("a", "0.0.0")),
+ pkg!(("b", "0.0.1") => [dep_req_kind("a", "= 0.0.1", DepKind::Normal, true),]),
+ pkg!(("b", "0.0.0") => [dep("bad"),]),
+ pkg!("A" => [dep_req("a", "= 0.0.0"),dep_req_kind("b", "*", DepKind::Normal, true)]),
+ ];
+
+ let reg = registry(input);
+ assert!(resolve_and_validated(vec![dep("A")], &reg, None).is_err());
+}
+
+#[test]
+fn public_sat_unused_makes_things_pub() {
+ let input = vec![
+ pkg!(("a", "0.0.1")),
+ pkg!(("a", "0.0.0")),
+ pkg!(("b", "8.0.1") => [dep_req_kind("a", "= 0.0.1", DepKind::Normal, true),]),
+ pkg!(("b", "8.0.0") => [dep_req("a", "= 0.0.1"),]),
+ pkg!("c" => [dep_req("b", "= 8.0.0"),dep_req("a", "= 0.0.0"),]),
+ ];
+ let reg = registry(input);
+
+ resolve_and_validated(vec![dep("c")], &reg, None).unwrap();
+}
+
+#[test]
+fn public_sat_unused_makes_things_pub_2() {
+ let input = vec![
+ pkg!(("c", "0.0.2")),
+ pkg!(("c", "0.0.1")),
+ pkg!(("a-sys", "0.0.2")),
+ pkg!(("a-sys", "0.0.1") => [dep_req_kind("c", "= 0.0.1", DepKind::Normal, true),]),
+ pkg!("P" => [dep_req_kind("a-sys", "*", DepKind::Normal, true),dep_req("c", "= 0.0.1"),]),
+ pkg!("A" => [dep("P"),dep_req("c", "= 0.0.2"),]),
+ ];
+ let reg = registry(input);
+
+ resolve_and_validated(vec![dep("A")], &reg, None).unwrap();
+}
+
+#[test]
+#[should_panic(expected = "assertion failed: !name.is_empty()")]
+fn test_dependency_with_empty_name() {
+ // Bug 5229, dependency-names must not be empty
+ "".to_dep();
+}
+
+#[test]
+fn test_resolving_empty_dependency_list() {
+ let res = resolve(Vec::new(), &registry(vec![])).unwrap();
+
+ assert_eq!(res, names(&["root"]));
+}
+
+#[test]
+fn test_resolving_only_package() {
+ let reg = registry(vec![pkg!("foo")]);
+ let res = resolve(vec![dep("foo")], &reg).unwrap();
+ assert_same(&res, &names(&["root", "foo"]));
+}
+
+#[test]
+fn test_resolving_one_dep() {
+ let reg = registry(vec![pkg!("foo"), pkg!("bar")]);
+ let res = resolve(vec![dep("foo")], &reg).unwrap();
+ assert_same(&res, &names(&["root", "foo"]));
+}
+
+#[test]
+fn test_resolving_multiple_deps() {
+ let reg = registry(vec![pkg!("foo"), pkg!("bar"), pkg!("baz")]);
+ let res = resolve(vec![dep("foo"), dep("baz")], &reg).unwrap();
+ assert_same(&res, &names(&["root", "foo", "baz"]));
+}
+
+#[test]
+fn test_resolving_transitive_deps() {
+ let reg = registry(vec![pkg!("foo"), pkg!("bar" => ["foo"])]);
+ let res = resolve(vec![dep("bar")], &reg).unwrap();
+
+ assert_same(&res, &names(&["root", "foo", "bar"]));
+}
+
+#[test]
+fn test_resolving_common_transitive_deps() {
+ let reg = registry(vec![pkg!("foo" => ["bar"]), pkg!("bar")]);
+ let res = resolve(vec![dep("foo"), dep("bar")], &reg).unwrap();
+
+ assert_same(&res, &names(&["root", "foo", "bar"]));
+}
+
+#[test]
+fn test_resolving_with_same_name() {
+ let list = vec![
+ pkg_loc("foo", "https://first.example.com"),
+ pkg_loc("bar", "https://second.example.com"),
+ ];
+
+ let reg = registry(list);
+ let res = resolve(
+ vec![
+ dep_loc("foo", "https://first.example.com"),
+ dep_loc("bar", "https://second.example.com"),
+ ],
+ &reg,
+ )
+ .unwrap();
+
+ let mut names = loc_names(&[
+ ("foo", "https://first.example.com"),
+ ("bar", "https://second.example.com"),
+ ]);
+
+ names.push(pkg_id("root"));
+ assert_same(&res, &names);
+}
+
+#[test]
+fn test_resolving_with_dev_deps() {
+ let reg = registry(vec![
+ pkg!("foo" => ["bar", dep_kind("baz", DepKind::Development)]),
+ pkg!("baz" => ["bat", dep_kind("bam", DepKind::Development)]),
+ pkg!("bar"),
+ pkg!("bat"),
+ ]);
+
+ let res = resolve(
+ vec![dep("foo"), dep_kind("baz", DepKind::Development)],
+ &reg,
+ )
+ .unwrap();
+
+ assert_same(&res, &names(&["root", "foo", "bar", "baz", "bat"]));
+}
+
+#[test]
+fn resolving_with_many_versions() {
+ let reg = registry(vec![pkg!(("foo", "1.0.1")), pkg!(("foo", "1.0.2"))]);
+
+ let res = resolve(vec![dep("foo")], &reg).unwrap();
+
+ assert_same(&res, &names(&[("root", "1.0.0"), ("foo", "1.0.2")]));
+}
+
+#[test]
+fn resolving_with_specific_version() {
+ let reg = registry(vec![pkg!(("foo", "1.0.1")), pkg!(("foo", "1.0.2"))]);
+
+ let res = resolve(vec![dep_req("foo", "=1.0.1")], &reg).unwrap();
+
+ assert_same(&res, &names(&[("root", "1.0.0"), ("foo", "1.0.1")]));
+}
+
+#[test]
+fn test_resolving_maximum_version_with_transitive_deps() {
+ let reg = registry(vec![
+ pkg!(("util", "1.2.2")),
+ pkg!(("util", "1.0.0")),
+ pkg!(("util", "1.1.1")),
+ pkg!("foo" => [dep_req("util", "1.0.0")]),
+ pkg!("bar" => [dep_req("util", ">=1.0.1")]),
+ ]);
+
+ let res = resolve(vec![dep_req("foo", "1.0.0"), dep_req("bar", "1.0.0")], &reg).unwrap();
+
+ assert_contains(
+ &res,
+ &names(&[
+ ("root", "1.0.0"),
+ ("foo", "1.0.0"),
+ ("bar", "1.0.0"),
+ ("util", "1.2.2"),
+ ]),
+ );
+ assert!(!res.contains(&("util", "1.0.1").to_pkgid()));
+ assert!(!res.contains(&("util", "1.1.1").to_pkgid()));
+}
+
+#[test]
+fn test_resolving_minimum_version_with_transitive_deps() {
+ let reg = registry(vec![
+ pkg!(("util", "1.2.2")),
+ pkg!(("util", "1.0.0")),
+ pkg!(("util", "1.1.1")),
+ pkg!("foo" => [dep_req("util", "1.0.0")]),
+ pkg!("bar" => [dep_req("util", ">=1.0.1")]),
+ ]);
+
+ let mut config = Config::default().unwrap();
+ // -Z minimal-versions
+ // When the minimal-versions config option is specified then the lowest
+ // possible version of a package should be selected. "util 1.0.0" can't be
+ // selected because of the requirements of "bar", so the minimum version
+ // must be 1.1.1.
+ config.nightly_features_allowed = true;
+ config
+ .configure(
+ 1,
+ false,
+ None,
+ false,
+ false,
+ false,
+ &None,
+ &["minimal-versions".to_string()],
+ &[],
+ )
+ .unwrap();
+
+ let res = resolve_with_config(
+ vec![dep_req("foo", "1.0.0"), dep_req("bar", "1.0.0")],
+ &reg,
+ &config,
+ )
+ .unwrap();
+
+ assert_contains(
+ &res,
+ &names(&[
+ ("root", "1.0.0"),
+ ("foo", "1.0.0"),
+ ("bar", "1.0.0"),
+ ("util", "1.1.1"),
+ ]),
+ );
+ assert!(!res.contains(&("util", "1.2.2").to_pkgid()));
+ assert!(!res.contains(&("util", "1.0.0").to_pkgid()));
+}
+
+#[test]
+fn resolving_incompat_versions() {
+ let reg = registry(vec![
+ pkg!(("foo", "1.0.1")),
+ pkg!(("foo", "1.0.2")),
+ pkg!("bar" => [dep_req("foo", "=1.0.2")]),
+ ]);
+
+ assert!(resolve(vec![dep_req("foo", "=1.0.1"), dep("bar")], &reg).is_err());
+}
+
+#[test]
+fn resolving_wrong_case_from_registry() {
+ // In the future we may #5678 allow this to happen.
+ // For back compatibility reasons, we probably won't.
+ // But we may want to future prove ourselves by understanding it.
+ // This test documents the current behavior.
+ let reg = registry(vec![pkg!(("foo", "1.0.0")), pkg!("bar" => ["Foo"])]);
+
+ assert!(resolve(vec![dep("bar")], &reg).is_err());
+}
+
+#[test]
+fn resolving_mis_hyphenated_from_registry() {
+ // In the future we may #2775 allow this to happen.
+ // For back compatibility reasons, we probably won't.
+ // But we may want to future prove ourselves by understanding it.
+ // This test documents the current behavior.
+ let reg = registry(vec![pkg!(("fo-o", "1.0.0")), pkg!("bar" => ["fo_o"])]);
+
+ assert!(resolve(vec![dep("bar")], &reg).is_err());
+}
+
+#[test]
+fn resolving_backtrack() {
+ let reg = registry(vec![
+ pkg!(("foo", "1.0.2") => [dep("bar")]),
+ pkg!(("foo", "1.0.1") => [dep("baz")]),
+ pkg!("bar" => [dep_req("foo", "=2.0.2")]),
+ pkg!("baz"),
+ ]);
+
+ let res = resolve(vec![dep_req("foo", "^1")], &reg).unwrap();
+
+ assert_contains(
+ &res,
+ &names(&[("root", "1.0.0"), ("foo", "1.0.1"), ("baz", "1.0.0")]),
+ );
+}
+
+#[test]
+fn resolving_backtrack_features() {
+ // test for cargo/issues/4347
+ let mut bad = dep("bar");
+ bad.set_features(vec!["bad"]);
+
+ let reg = registry(vec![
+ pkg!(("foo", "1.0.2") => [bad]),
+ pkg!(("foo", "1.0.1") => [dep("bar")]),
+ pkg!("bar"),
+ ]);
+
+ let res = resolve(vec![dep_req("foo", "^1")], &reg).unwrap();
+
+ assert_contains(
+ &res,
+ &names(&[("root", "1.0.0"), ("foo", "1.0.1"), ("bar", "1.0.0")]),
+ );
+}
+
+#[test]
+fn resolving_allows_multiple_compatible_versions() {
+ let reg = registry(vec![
+ pkg!(("foo", "1.0.0")),
+ pkg!(("foo", "2.0.0")),
+ pkg!(("foo", "0.1.0")),
+ pkg!(("foo", "0.2.0")),
+ pkg!("bar" => ["d1", "d2", "d3", "d4"]),
+ pkg!("d1" => [dep_req("foo", "1")]),
+ pkg!("d2" => [dep_req("foo", "2")]),
+ pkg!("d3" => [dep_req("foo", "0.1")]),
+ pkg!("d4" => [dep_req("foo", "0.2")]),
+ ]);
+
+ let res = resolve(vec![dep("bar")], &reg).unwrap();
+
+ assert_same(
+ &res,
+ &names(&[
+ ("root", "1.0.0"),
+ ("foo", "1.0.0"),
+ ("foo", "2.0.0"),
+ ("foo", "0.1.0"),
+ ("foo", "0.2.0"),
+ ("d1", "1.0.0"),
+ ("d2", "1.0.0"),
+ ("d3", "1.0.0"),
+ ("d4", "1.0.0"),
+ ("bar", "1.0.0"),
+ ]),
+ );
+}
+
+#[test]
+fn resolving_with_deep_backtracking() {
+ let reg = registry(vec![
+ pkg!(("foo", "1.0.1") => [dep_req("bar", "1")]),
+ pkg!(("foo", "1.0.0") => [dep_req("bar", "2")]),
+ pkg!(("bar", "1.0.0") => [dep_req("baz", "=1.0.2"),
+ dep_req("other", "1")]),
+ pkg!(("bar", "2.0.0") => [dep_req("baz", "=1.0.1")]),
+ pkg!(("baz", "1.0.2") => [dep_req("other", "2")]),
+ pkg!(("baz", "1.0.1")),
+ pkg!(("dep_req", "1.0.0")),
+ pkg!(("dep_req", "2.0.0")),
+ ]);
+
+ let res = resolve(vec![dep_req("foo", "1")], &reg).unwrap();
+
+ assert_same(
+ &res,
+ &names(&[
+ ("root", "1.0.0"),
+ ("foo", "1.0.0"),
+ ("bar", "2.0.0"),
+ ("baz", "1.0.1"),
+ ]),
+ );
+}
+
+#[test]
+fn resolving_with_sys_crates() {
+ // This is based on issues/4902
+ // With `l` a normal library we get 2copies so everyone gets the newest compatible.
+ // But `l-sys` a library with a links attribute we make sure there is only one.
+ let reg = registry(vec![
+ pkg!(("l-sys", "0.9.1")),
+ pkg!(("l-sys", "0.10.0")),
+ pkg!(("l", "0.9.1")),
+ pkg!(("l", "0.10.0")),
+ pkg!(("d", "1.0.0") => [dep_req("l-sys", ">=0.8.0, <=0.10.0"), dep_req("l", ">=0.8.0, <=0.10.0")]),
+ pkg!(("r", "1.0.0") => [dep_req("l-sys", "0.9"), dep_req("l", "0.9")]),
+ ]);
+
+ let res = resolve(vec![dep_req("d", "1"), dep_req("r", "1")], &reg).unwrap();
+
+ assert_same(
+ &res,
+ &names(&[
+ ("root", "1.0.0"),
+ ("d", "1.0.0"),
+ ("r", "1.0.0"),
+ ("l-sys", "0.9.1"),
+ ("l", "0.9.1"),
+ ("l", "0.10.0"),
+ ]),
+ );
+}
+
+#[test]
+fn resolving_with_constrained_sibling_backtrack_parent() {
+ // There is no point in considering all of the backtrack_trap{1,2}
+ // candidates since they can't change the result of failing to
+ // resolve 'constrained'. Cargo should (ideally) skip past them and resume
+ // resolution once the activation of the parent, 'bar', is rolled back.
+ // Note that the traps are slightly more constrained to make sure they
+ // get picked first.
+ let mut reglist = vec![
+ pkg!(("foo", "1.0.0") => [dep_req("bar", "1.0"),
+ dep_req("constrained", "=1.0.0")]),
+ pkg!(("bar", "1.0.0") => [dep_req("backtrack_trap1", "1.0.2"),
+ dep_req("backtrack_trap2", "1.0.2"),
+ dep_req("constrained", "1.0.0")]),
+ pkg!(("constrained", "1.0.0")),
+ pkg!(("backtrack_trap1", "1.0.0")),
+ pkg!(("backtrack_trap2", "1.0.0")),
+ ];
+ // Bump this to make the test harder - it adds more versions of bar that will
+ // fail to resolve, and more versions of the traps to consider.
+ const NUM_BARS_AND_TRAPS: usize = 50; // minimum 2
+ for i in 1..NUM_BARS_AND_TRAPS {
+ let vsn = format!("1.0.{}", i);
+ reglist.push(
+ pkg!(("bar", vsn.clone()) => [dep_req("backtrack_trap1", "1.0.2"),
+ dep_req("backtrack_trap2", "1.0.2"),
+ dep_req("constrained", "1.0.1")]),
+ );
+ reglist.push(pkg!(("backtrack_trap1", vsn.clone())));
+ reglist.push(pkg!(("backtrack_trap2", vsn.clone())));
+ reglist.push(pkg!(("constrained", vsn.clone())));
+ }
+ let reg = registry(reglist);
+
+ let res = resolve(vec![dep_req("foo", "1")], &reg).unwrap();
+
+ assert_contains(
+ &res,
+ &names(&[
+ ("root", "1.0.0"),
+ ("foo", "1.0.0"),
+ ("bar", "1.0.0"),
+ ("constrained", "1.0.0"),
+ ]),
+ );
+}
+
+#[test]
+fn resolving_with_many_equivalent_backtracking() {
+ let mut reglist = Vec::new();
+
+ const DEPTH: usize = 200;
+ const BRANCHING_FACTOR: usize = 100;
+
+ // Each level depends on the next but the last level does not exist.
+ // Without cashing we need to test every path to the last level O(BRANCHING_FACTOR ^ DEPTH)
+ // and this test will time out. With cashing we need to discover that none of these
+ // can be activated O(BRANCHING_FACTOR * DEPTH)
+ for l in 0..DEPTH {
+ let name = format!("level{}", l);
+ let next = format!("level{}", l + 1);
+ for i in 1..BRANCHING_FACTOR {
+ let vsn = format!("1.0.{}", i);
+ reglist.push(pkg!((name.as_str(), vsn.as_str()) => [dep(next.as_str())]));
+ }
+ }
+
+ let reg = registry(reglist.clone());
+
+ let res = resolve(vec![dep("level0")], &reg);
+
+ assert!(res.is_err());
+
+ // It is easy to write code that quickly returns an error.
+ // Lets make sure we can find a good answer if it is there.
+ reglist.push(pkg!(("level0", "1.0.0")));
+
+ let reg = registry(reglist.clone());
+
+ let res = resolve(vec![dep("level0")], &reg).unwrap();
+
+ assert_contains(&res, &names(&[("root", "1.0.0"), ("level0", "1.0.0")]));
+
+ // Make sure we have not special case no candidates.
+ reglist.push(pkg!(("constrained", "1.1.0")));
+ reglist.push(pkg!(("constrained", "1.0.0")));
+ reglist.push(
+ pkg!((format!("level{}", DEPTH).as_str(), "1.0.0") => [dep_req("constrained", "=1.0.0")]),
+ );
+
+ let reg = registry(reglist.clone());
+
+ let res = resolve(vec![dep("level0"), dep("constrained")], &reg).unwrap();
+
+ assert_contains(
+ &res,
+ &names(&[
+ ("root", "1.0.0"),
+ ("level0", "1.0.0"),
+ ("constrained", "1.1.0"),
+ ]),
+ );
+
+ let reg = registry(reglist.clone());
+
+ let res = resolve(vec![dep_req("level0", "1.0.1"), dep("constrained")], &reg).unwrap();
+
+ assert_contains(
+ &res,
+ &names(&[
+ ("root", "1.0.0"),
+ (format!("level{}", DEPTH).as_str(), "1.0.0"),
+ ("constrained", "1.0.0"),
+ ]),
+ );
+
+ let reg = registry(reglist);
+
+ let res = resolve(
+ vec![dep_req("level0", "1.0.1"), dep_req("constrained", "1.1.0")],
+ &reg,
+ );
+
+ assert!(res.is_err());
+}
+
+#[test]
+fn resolving_with_deep_traps() {
+ let mut reglist = Vec::new();
+
+ const DEPTH: usize = 200;
+ const BRANCHING_FACTOR: usize = 100;
+
+ // Each backtrack_trap depends on the next, and adds a backtrack frame.
+ // None of witch is going to help with `bad`.
+ for l in 0..DEPTH {
+ let name = format!("backtrack_trap{}", l);
+ let next = format!("backtrack_trap{}", l + 1);
+ for i in 1..BRANCHING_FACTOR {
+ let vsn = format!("1.0.{}", i);
+ reglist.push(pkg!((name.as_str(), vsn.as_str()) => [dep(next.as_str())]));
+ }
+ }
+ {
+ let name = format!("backtrack_trap{}", DEPTH);
+ for i in 1..BRANCHING_FACTOR {
+ let vsn = format!("1.0.{}", i);
+ reglist.push(pkg!((name.as_str(), vsn.as_str())));
+ }
+ }
+ {
+ // slightly less constrained to make sure `cloaking` gets picked last.
+ for i in 1..(BRANCHING_FACTOR + 10) {
+ let vsn = format!("1.0.{}", i);
+ reglist.push(pkg!(("cloaking", vsn.as_str()) => [dep_req("bad", "1.0.1")]));
+ }
+ }
+
+ let reg = registry(reglist);
+
+ let res = resolve(vec![dep("backtrack_trap0"), dep("cloaking")], &reg);
+
+ assert!(res.is_err());
+}
+
+#[test]
+fn resolving_with_constrained_cousins_backtrack() {
+ let mut reglist = Vec::new();
+
+ const DEPTH: usize = 100;
+ const BRANCHING_FACTOR: usize = 50;
+
+ // Each backtrack_trap depends on the next.
+ // The last depends on a specific ver of constrained.
+ for l in 0..DEPTH {
+ let name = format!("backtrack_trap{}", l);
+ let next = format!("backtrack_trap{}", l + 1);
+ for i in 1..BRANCHING_FACTOR {
+ let vsn = format!("1.0.{}", i);
+ reglist.push(pkg!((name.as_str(), vsn.as_str()) => [dep(next.as_str())]));
+ }
+ }
+ {
+ let name = format!("backtrack_trap{}", DEPTH);
+ for i in 1..BRANCHING_FACTOR {
+ let vsn = format!("1.0.{}", i);
+ reglist.push(
+ pkg!((name.as_str(), vsn.as_str()) => [dep_req("constrained", ">=1.1.0, <=2.0.0")]),
+ );
+ }
+ }
+ {
+ // slightly less constrained to make sure `constrained` gets picked last.
+ for i in 0..(BRANCHING_FACTOR + 10) {
+ let vsn = format!("1.0.{}", i);
+ reglist.push(pkg!(("constrained", vsn.as_str())));
+ }
+ reglist.push(pkg!(("constrained", "1.1.0")));
+ reglist.push(pkg!(("constrained", "2.0.0")));
+ reglist.push(pkg!(("constrained", "2.0.1")));
+ }
+ reglist.push(pkg!(("cloaking", "1.0.0") => [dep_req("constrained", "~1.0.0")]));
+
+ let reg = registry(reglist.clone());
+
+ // `backtrack_trap0 = "*"` is a lot of ways of saying `constrained = ">=1.1.0, <=2.0.0"`
+ // but `constrained= "2.0.1"` is already picked.
+ // Only then to try and solve `constrained= "~1.0.0"` which is incompatible.
+ let res = resolve(
+ vec![
+ dep("backtrack_trap0"),
+ dep_req("constrained", "2.0.1"),
+ dep("cloaking"),
+ ],
+ &reg,
+ );
+
+ assert!(res.is_err());
+
+ // Each level depends on the next but the last depends on incompatible deps.
+ // Let's make sure that we can cache that a dep has incompatible deps.
+ for l in 0..DEPTH {
+ let name = format!("level{}", l);
+ let next = format!("level{}", l + 1);
+ for i in 1..BRANCHING_FACTOR {
+ let vsn = format!("1.0.{}", i);
+ reglist.push(pkg!((name.as_str(), vsn.as_str()) => [dep(next.as_str())]));
+ }
+ }
+ reglist.push(
+ pkg!((format!("level{}", DEPTH).as_str(), "1.0.0") => [dep("backtrack_trap0"),
+ dep("cloaking")
+ ]),
+ );
+
+ let reg = registry(reglist);
+
+ let res = resolve(vec![dep("level0"), dep_req("constrained", "2.0.1")], &reg);
+
+ assert!(res.is_err());
+
+ let res = resolve(vec![dep("level0"), dep_req("constrained", "2.0.0")], &reg).unwrap();
+
+ assert_contains(
+ &res,
+ &names(&[("constrained", "2.0.0"), ("cloaking", "1.0.0")]),
+ );
+}
+
+#[test]
+fn resolving_with_constrained_sibling_backtrack_activation() {
+ // It makes sense to resolve most-constrained deps first, but
+ // with that logic the backtrack traps here come between the two
+ // attempted resolutions of 'constrained'. When backtracking,
+ // cargo should skip past them and resume resolution once the
+ // number of activations for 'constrained' changes.
+ let mut reglist = vec![
+ pkg!(("foo", "1.0.0") => [dep_req("bar", "=1.0.0"),
+ dep_req("backtrack_trap1", "1.0"),
+ dep_req("backtrack_trap2", "1.0"),
+ dep_req("constrained", "<=1.0.60")]),
+ pkg!(("bar", "1.0.0") => [dep_req("constrained", ">=1.0.60")]),
+ ];
+ // Bump these to make the test harder, but you'll also need to
+ // change the version constraints on `constrained` above. To correctly
+ // exercise Cargo, the relationship between the values is:
+ // NUM_CONSTRAINED - vsn < NUM_TRAPS < vsn
+ // to make sure the traps are resolved between `constrained`.
+ const NUM_TRAPS: usize = 45; // min 1
+ const NUM_CONSTRAINED: usize = 100; // min 1
+ for i in 0..NUM_TRAPS {
+ let vsn = format!("1.0.{}", i);
+ reglist.push(pkg!(("backtrack_trap1", vsn.clone())));
+ reglist.push(pkg!(("backtrack_trap2", vsn.clone())));
+ }
+ for i in 0..NUM_CONSTRAINED {
+ let vsn = format!("1.0.{}", i);
+ reglist.push(pkg!(("constrained", vsn.clone())));
+ }
+ let reg = registry(reglist);
+
+ let res = resolve(vec![dep_req("foo", "1")], &reg).unwrap();
+
+ assert_contains(
+ &res,
+ &names(&[
+ ("root", "1.0.0"),
+ ("foo", "1.0.0"),
+ ("bar", "1.0.0"),
+ ("constrained", "1.0.60"),
+ ]),
+ );
+}
+
+#[test]
+fn resolving_with_public_constrained_sibling() {
+ // It makes sense to resolve most-constrained deps first, but
+ // with that logic the backtrack traps here come between the two
+ // attempted resolutions of 'constrained'. When backtracking,
+ // cargo should skip past them and resume resolution once the
+ // number of activations for 'constrained' changes.
+ let mut reglist = vec![
+ pkg!(("foo", "1.0.0") => [dep_req("bar", "=1.0.0"),
+ dep_req("backtrack_trap1", "1.0"),
+ dep_req("backtrack_trap2", "1.0"),
+ dep_req("constrained", "<=60")]),
+ pkg!(("bar", "1.0.0") => [dep_req_kind("constrained", ">=60", DepKind::Normal, true)]),
+ ];
+ // Bump these to make the test harder, but you'll also need to
+ // change the version constraints on `constrained` above. To correctly
+ // exercise Cargo, the relationship between the values is:
+ // NUM_CONSTRAINED - vsn < NUM_TRAPS < vsn
+ // to make sure the traps are resolved between `constrained`.
+ const NUM_TRAPS: usize = 45; // min 1
+ const NUM_CONSTRAINED: usize = 100; // min 1
+ for i in 0..NUM_TRAPS {
+ let vsn = format!("1.0.{}", i);
+ reglist.push(pkg!(("backtrack_trap1", vsn.clone())));
+ reglist.push(pkg!(("backtrack_trap2", vsn.clone())));
+ }
+ for i in 0..NUM_CONSTRAINED {
+ let vsn = format!("{}.0.0", i);
+ reglist.push(pkg!(("constrained", vsn.clone())));
+ }
+ let reg = registry(reglist);
+
+ let _ = resolve_and_validated(vec![dep_req("foo", "1")], &reg, None);
+}
+
+#[test]
+fn resolving_with_constrained_sibling_transitive_dep_effects() {
+ // When backtracking due to a failed dependency, if Cargo is
+ // trying to be clever and skip irrelevant dependencies, care must
+ // be taken to not miss the transitive effects of alternatives. E.g.
+ // in the right-to-left resolution of the graph below, B may
+ // affect whether D is successfully resolved.
+ //
+ // A
+ // / | \
+ // B C D
+ // | |
+ // C D
+ let reg = registry(vec![
+ pkg!(("A", "1.0.0") => [dep_req("B", "1.0"),
+ dep_req("C", "1.0"),
+ dep_req("D", "1.0.100")]),
+ pkg!(("B", "1.0.0") => [dep_req("C", ">=1.0.0")]),
+ pkg!(("B", "1.0.1") => [dep_req("C", ">=1.0.1")]),
+ pkg!(("C", "1.0.0") => [dep_req("D", "1.0.0")]),
+ pkg!(("C", "1.0.1") => [dep_req("D", ">=1.0.1,<1.0.100")]),
+ pkg!(("C", "1.0.2") => [dep_req("D", ">=1.0.2,<1.0.100")]),
+ pkg!(("D", "1.0.0")),
+ pkg!(("D", "1.0.1")),
+ pkg!(("D", "1.0.2")),
+ pkg!(("D", "1.0.100")),
+ pkg!(("D", "1.0.101")),
+ pkg!(("D", "1.0.102")),
+ pkg!(("D", "1.0.103")),
+ pkg!(("D", "1.0.104")),
+ pkg!(("D", "1.0.105")),
+ ]);
+
+ let res = resolve(vec![dep_req("A", "1")], &reg).unwrap();
+
+ assert_same(
+ &res,
+ &names(&[
+ ("root", "1.0.0"),
+ ("A", "1.0.0"),
+ ("B", "1.0.0"),
+ ("C", "1.0.0"),
+ ("D", "1.0.105"),
+ ]),
+ );
+}
+
+#[test]
+fn incomplete_information_skipping() {
+ // When backtracking due to a failed dependency, if Cargo is
+ // trying to be clever and skip irrelevant dependencies, care must
+ // be taken to not miss the transitive effects of alternatives.
+ // Fuzzing discovered that for some reason cargo was skipping based
+ // on incomplete information in the following case:
+ // minimized bug found in:
+ // https://github.com/rust-lang/cargo/commit/003c29b0c71e5ea28fbe8e72c148c755c9f3f8d9
+ let input = vec![
+ pkg!(("a", "1.0.0")),
+ pkg!(("a", "1.1.0")),
+ pkg!("b" => [dep("a")]),
+ pkg!(("c", "1.0.0")),
+ pkg!(("c", "1.1.0")),
+ pkg!("d" => [dep_req("c", "=1.0")]),
+ pkg!(("e", "1.0.0")),
+ pkg!(("e", "1.1.0") => [dep_req("c", "1.1")]),
+ pkg!("to_yank"),
+ pkg!(("f", "1.0.0") => [
+ dep("to_yank"),
+ dep("d"),
+ ]),
+ pkg!(("f", "1.1.0") => [dep("d")]),
+ pkg!("g" => [
+ dep("b"),
+ dep("e"),
+ dep("f"),
+ ]),
+ ];
+ let reg = registry(input.clone());
+
+ let res = resolve(vec![dep("g")], &reg).unwrap();
+ let package_to_yank = "to_yank".to_pkgid();
+ // this package is not used in the resolution.
+ assert!(!res.contains(&package_to_yank));
+ // so when we yank it
+ let new_reg = registry(
+ input
+ .iter()
+ .cloned()
+ .filter(|x| package_to_yank != x.package_id())
+ .collect(),
+ );
+ assert_eq!(input.len(), new_reg.len() + 1);
+ // it should still build
+ assert!(resolve(vec![dep("g")], &new_reg).is_ok());
+}
+
+#[test]
+fn incomplete_information_skipping_2() {
+ // When backtracking due to a failed dependency, if Cargo is
+ // trying to be clever and skip irrelevant dependencies, care must
+ // be taken to not miss the transitive effects of alternatives.
+ // Fuzzing discovered that for some reason cargo was skipping based
+ // on incomplete information in the following case:
+ // https://github.com/rust-lang/cargo/commit/003c29b0c71e5ea28fbe8e72c148c755c9f3f8d9
+ let input = vec![
+ pkg!(("b", "3.8.10")),
+ pkg!(("b", "8.7.4")),
+ pkg!(("b", "9.4.6")),
+ pkg!(("c", "1.8.8")),
+ pkg!(("c", "10.2.5")),
+ pkg!(("d", "4.1.2") => [
+ dep_req("bad", "=6.10.9"),
+ ]),
+ pkg!(("d", "5.5.6")),
+ pkg!(("d", "5.6.10")),
+ pkg!(("to_yank", "8.0.1")),
+ pkg!(("to_yank", "8.8.1")),
+ pkg!(("e", "4.7.8") => [
+ dep_req("d", ">=5.5.6, <=5.6.10"),
+ dep_req("to_yank", "=8.0.1"),
+ ]),
+ pkg!(("e", "7.4.9") => [
+ dep_req("bad", "=4.7.5"),
+ ]),
+ pkg!("f" => [
+ dep_req("d", ">=4.1.2, <=5.5.6"),
+ ]),
+ pkg!("g" => [
+ dep("bad"),
+ ]),
+ pkg!(("h", "3.8.3") => [
+ dep("g"),
+ ]),
+ pkg!(("h", "6.8.3") => [
+ dep("f"),
+ ]),
+ pkg!(("h", "8.1.9") => [
+ dep_req("to_yank", "=8.8.1"),
+ ]),
+ pkg!("i" => [
+ dep("b"),
+ dep("c"),
+ dep("e"),
+ dep("h"),
+ ]),
+ ];
+ let reg = registry(input.clone());
+
+ let res = resolve(vec![dep("i")], &reg).unwrap();
+ let package_to_yank = ("to_yank", "8.8.1").to_pkgid();
+ // this package is not used in the resolution.
+ assert!(!res.contains(&package_to_yank));
+ // so when we yank it
+ let new_reg = registry(
+ input
+ .iter()
+ .cloned()
+ .filter(|x| package_to_yank != x.package_id())
+ .collect(),
+ );
+ assert_eq!(input.len(), new_reg.len() + 1);
+ // it should still build
+ assert!(resolve(vec![dep("i")], &new_reg).is_ok());
+}
+
+#[test]
+fn incomplete_information_skipping_3() {
+ // When backtracking due to a failed dependency, if Cargo is
+ // trying to be clever and skip irrelevant dependencies, care must
+ // be taken to not miss the transitive effects of alternatives.
+ // Fuzzing discovered that for some reason cargo was skipping based
+ // on incomplete information in the following case:
+ // minimized bug found in:
+ // https://github.com/rust-lang/cargo/commit/003c29b0c71e5ea28fbe8e72c148c755c9f3f8d9
+ let input = vec![
+ pkg! {("to_yank", "3.0.3")},
+ pkg! {("to_yank", "3.3.0")},
+ pkg! {("to_yank", "3.3.1")},
+ pkg! {("a", "3.3.0") => [
+ dep_req("to_yank", "=3.0.3"),
+ ] },
+ pkg! {("a", "3.3.2") => [
+ dep_req("to_yank", "<=3.3.0"),
+ ] },
+ pkg! {("b", "0.1.3") => [
+ dep_req("a", "=3.3.0"),
+ ] },
+ pkg! {("b", "2.0.2") => [
+ dep_req("to_yank", "3.3.0"),
+ dep("a"),
+ ] },
+ pkg! {("b", "2.3.3") => [
+ dep_req("to_yank", "3.3.0"),
+ dep_req("a", "=3.3.0"),
+ ] },
+ ];
+ let reg = registry(input.clone());
+
+ let res = resolve(vec![dep("b")], &reg).unwrap();
+ let package_to_yank = ("to_yank", "3.0.3").to_pkgid();
+ // this package is not used in the resolution.
+ assert!(!res.contains(&package_to_yank));
+ // so when we yank it
+ let new_reg = registry(
+ input
+ .iter()
+ .cloned()
+ .filter(|x| package_to_yank != x.package_id())
+ .collect(),
+ );
+ assert_eq!(input.len(), new_reg.len() + 1);
+ // it should still build
+ assert!(resolve(vec![dep("b")], &new_reg).is_ok());
+}
+
+#[test]
+fn resolving_but_no_exists() {
+ let reg = registry(vec![]);
+
+ let res = resolve(vec![dep_req("foo", "1")], &reg);
+ assert!(res.is_err());
+
+ assert_eq!(
+ res.err().unwrap().to_string(),
+ "no matching package named `foo` found\n\
+ location searched: registry `https://example.com/`\n\
+ required by package `root v1.0.0 (registry `https://example.com/`)`\
+ "
+ );
+}
+
+#[test]
+fn resolving_cycle() {
+ let reg = registry(vec![pkg!("foo" => ["foo"])]);
+
+ let _ = resolve(vec![dep_req("foo", "1")], &reg);
+}
+
+#[test]
+fn hard_equality() {
+ let reg = registry(vec![
+ pkg!(("foo", "1.0.1")),
+ pkg!(("foo", "1.0.0")),
+ pkg!(("bar", "1.0.0") => [dep_req("foo", "1.0.0")]),
+ ]);
+
+ let res = resolve(vec![dep_req("bar", "1"), dep_req("foo", "=1.0.0")], &reg).unwrap();
+
+ assert_same(
+ &res,
+ &names(&[("root", "1.0.0"), ("foo", "1.0.0"), ("bar", "1.0.0")]),
+ );
+}
+
+#[test]
+fn large_conflict_cache() {
+ let mut input = vec![
+ pkg!(("last", "0.0.0") => [dep("bad")]), // just to make sure last is less constrained
+ ];
+ let mut root_deps = vec![dep("last")];
+ const NUM_VERSIONS: u8 = 20;
+ for name in 0..=NUM_VERSIONS {
+ // a large number of conflicts can easily be generated by a sys crate.
+ let sys_name = format!("{}-sys", (b'a' + name) as char);
+ let in_len = input.len();
+ input.push(pkg!(("last", format!("{}.0.0", in_len)) => [dep_req(&sys_name, "=0.0.0")]));
+ root_deps.push(dep_req(&sys_name, ">= 0.0.1"));
+
+ // a large number of conflicts can also easily be generated by a major release version.
+ let plane_name = format!("{}", (b'a' + name) as char);
+ let in_len = input.len();
+ input.push(pkg!(("last", format!("{}.0.0", in_len)) => [dep_req(&plane_name, "=1.0.0")]));
+ root_deps.push(dep_req(&plane_name, ">= 1.0.1"));
+
+ for i in 0..=NUM_VERSIONS {
+ input.push(pkg!((&sys_name, format!("{}.0.0", i))));
+ input.push(pkg!((&plane_name, format!("1.0.{}", i))));
+ }
+ }
+ let reg = registry(input);
+ let _ = resolve(root_deps, &reg);
+}
+
+#[test]
+fn off_by_one_bug() {
+ let input = vec![
+ pkg!(("A-sys", "0.0.1")),
+ pkg!(("A-sys", "0.0.4")),
+ pkg!(("A-sys", "0.0.6")),
+ pkg!(("A-sys", "0.0.7")),
+ pkg!(("NA", "0.0.0") => [dep_req("A-sys", "<= 0.0.5"),]),
+ pkg!(("NA", "0.0.1") => [dep_req("A-sys", ">= 0.0.6, <= 0.0.8"),]),
+ pkg!(("a", "0.0.1")),
+ pkg!(("a", "0.0.2")),
+ pkg!(("aa", "0.0.0") => [dep_req("A-sys", ">= 0.0.4, <= 0.0.6"),dep_req("NA", "<= 0.0.0"),]),
+ pkg!(("f", "0.0.3") => [dep("NA"),dep_req("a", "<= 0.0.2"),dep("aa"),]),
+ ];
+
+ let reg = registry(input);
+ let _ = resolve_and_validated(vec![dep("f")], &reg, None);
+}
+
+#[test]
+fn conflict_store_bug() {
+ let input = vec![
+ pkg!(("A", "0.0.3")),
+ pkg!(("A", "0.0.5")),
+ pkg!(("A", "0.0.9") => [dep("bad"),]),
+ pkg!(("A", "0.0.10") => [dep("bad"),]),
+ pkg!(("L-sys", "0.0.1") => [dep("bad"),]),
+ pkg!(("L-sys", "0.0.5")),
+ pkg!(("R", "0.0.4") => [
+ dep_req("L-sys", "= 0.0.5"),
+ ]),
+ pkg!(("R", "0.0.6")),
+ pkg!(("a-sys", "0.0.5")),
+ pkg!(("a-sys", "0.0.11")),
+ pkg!(("c", "0.0.12") => [
+ dep_req("R", ">= 0.0.3, <= 0.0.4"),
+ ]),
+ pkg!(("c", "0.0.13") => [
+ dep_req("a-sys", ">= 0.0.8, <= 0.0.11"),
+ ]),
+ pkg!(("c0", "0.0.6") => [
+ dep_req("L-sys", "<= 0.0.2"),
+ ]),
+ pkg!(("c0", "0.0.10") => [
+ dep_req("A", ">= 0.0.9, <= 0.0.10"),
+ dep_req("a-sys", "= 0.0.5"),
+ ]),
+ pkg!("j" => [
+ dep_req("A", ">= 0.0.3, <= 0.0.5"),
+ dep_req("R", ">=0.0.4, <= 0.0.6"),
+ dep_req("c", ">= 0.0.9"),
+ dep_req("c0", ">= 0.0.6"),
+ ]),
+ ];
+
+ let reg = registry(input);
+ let _ = resolve_and_validated(vec![dep("j")], &reg, None);
+}
+
+#[test]
+fn conflict_store_more_then_one_match() {
+ let input = vec![
+ pkg!(("A", "0.0.0")),
+ pkg!(("A", "0.0.1")),
+ pkg!(("A-sys", "0.0.0")),
+ pkg!(("A-sys", "0.0.1")),
+ pkg!(("A-sys", "0.0.2")),
+ pkg!(("A-sys", "0.0.3")),
+ pkg!(("A-sys", "0.0.12")),
+ pkg!(("A-sys", "0.0.16")),
+ pkg!(("B-sys", "0.0.0")),
+ pkg!(("B-sys", "0.0.1")),
+ pkg!(("B-sys", "0.0.2") => [dep_req("A-sys", "= 0.0.12"),]),
+ pkg!(("BA-sys", "0.0.0") => [dep_req("A-sys","= 0.0.16"),]),
+ pkg!(("BA-sys", "0.0.1") => [dep("bad"),]),
+ pkg!(("BA-sys", "0.0.2") => [dep("bad"),]),
+ pkg!("nA" => [
+ dep("A"),
+ dep_req("A-sys", "<= 0.0.3"),
+ dep("B-sys"),
+ dep("BA-sys"),
+ ]),
+ ];
+ let reg = registry(input);
+ let _ = resolve_and_validated(vec![dep("nA")], &reg, None);
+}
+
+#[test]
+fn bad_lockfile_from_8249() {
+ let input = vec![
+ pkg!(("a-sys", "0.2.0")),
+ pkg!(("a-sys", "0.1.0")),
+ pkg!(("b", "0.1.0") => [
+ dep_req("a-sys", "0.1"), // should be optional: true, but not deeded for now
+ ]),
+ pkg!(("c", "1.0.0") => [
+ dep_req("b", "=0.1.0"),
+ ]),
+ pkg!("foo" => [
+ dep_req("a-sys", "=0.2.0"),
+ {
+ let mut b = dep_req("b", "=0.1.0");
+ b.set_features(vec!["a-sys"]);
+ b
+ },
+ dep_req("c", "=1.0.0"),
+ ]),
+ ];
+ let reg = registry(input);
+ let _ = resolve_and_validated(vec![dep("foo")], &reg, None);
+}
+
+#[test]
+fn cyclic_good_error_message() {
+ let input = vec![
+ pkg!(("A", "0.0.0") => [dep("C")]),
+ pkg!(("B", "0.0.0") => [dep("C")]),
+ pkg!(("C", "0.0.0") => [dep("A")]),
+ ];
+ let reg = registry(input);
+ let error = resolve(vec![dep("A"), dep("B")], &reg).unwrap_err();
+ println!("{}", error);
+ assert_eq!("\
+cyclic package dependency: package `A v0.0.0 (registry `https://example.com/`)` depends on itself. Cycle:
+package `A v0.0.0 (registry `https://example.com/`)`
+ ... which satisfies dependency `A = \"*\"` of package `C v0.0.0 (registry `https://example.com/`)`
+ ... which satisfies dependency `C = \"*\"` of package `A v0.0.0 (registry `https://example.com/`)`\
+", error.to_string());
+}