summaryrefslogtreecommitdiffstats
path: root/crates/cargo-test-support/src
diff options
context:
space:
mode:
Diffstat (limited to '')
-rw-r--r--crates/cargo-test-support/src/compare.rs781
-rw-r--r--crates/cargo-test-support/src/containers.rs285
-rw-r--r--crates/cargo-test-support/src/cross_compile.rs264
-rw-r--r--crates/cargo-test-support/src/diff.rs174
-rw-r--r--crates/cargo-test-support/src/git.rs249
-rw-r--r--crates/cargo-test-support/src/install.rs29
-rw-r--r--crates/cargo-test-support/src/lib.rs1424
-rw-r--r--crates/cargo-test-support/src/paths.rs347
-rw-r--r--crates/cargo-test-support/src/publish.rs245
-rw-r--r--crates/cargo-test-support/src/registry.rs1581
-rw-r--r--crates/cargo-test-support/src/tools.rs108
11 files changed, 5487 insertions, 0 deletions
diff --git a/crates/cargo-test-support/src/compare.rs b/crates/cargo-test-support/src/compare.rs
new file mode 100644
index 0000000..da1d099
--- /dev/null
+++ b/crates/cargo-test-support/src/compare.rs
@@ -0,0 +1,781 @@
+//! Routines for comparing and diffing output.
+//!
+//! # Patterns
+//!
+//! Many of these functions support special markup to assist with comparing
+//! text that may vary or is otherwise uninteresting for the test at hand. The
+//! supported patterns are:
+//!
+//! - `[..]` is a wildcard that matches 0 or more characters on the same line
+//! (similar to `.*` in a regex). It is non-greedy.
+//! - `[EXE]` optionally adds `.exe` on Windows (empty string on other
+//! platforms).
+//! - `[ROOT]` is the path to the test directory's root.
+//! - `[CWD]` is the working directory of the process that was run.
+//! - There is a wide range of substitutions (such as `[COMPILING]` or
+//! `[WARNING]`) to match cargo's "status" output and allows you to ignore
+//! the alignment. See the source of `substitute_macros` for a complete list
+//! of substitutions.
+//! - `[DIRTY-MSVC]` (only when the line starts with it) would be replaced by
+//! `[DIRTY]` when `cfg(target_env = "msvc")` or the line will be ignored otherwise.
+//! Tests that work around [issue 7358](https://github.com/rust-lang/cargo/issues/7358)
+//! can use this to avoid duplicating the `with_stderr` call like:
+//! `if cfg!(target_env = "msvc") {e.with_stderr("...[DIRTY]...");} else {e.with_stderr("...");}`.
+//!
+//! # Normalization
+//!
+//! In addition to the patterns described above, the strings are normalized
+//! in such a way to avoid unwanted differences. The normalizations are:
+//!
+//! - Raw tab characters are converted to the string `<tab>`. This is helpful
+//! so that raw tabs do not need to be written in the expected string, and
+//! to avoid confusion of tabs vs spaces.
+//! - Backslashes are converted to forward slashes to deal with Windows paths.
+//! This helps so that all tests can be written assuming forward slashes.
+//! Other heuristics are applied to try to ensure Windows-style paths aren't
+//! a problem.
+//! - Carriage returns are removed, which can help when running on Windows.
+
+use crate::diff;
+use crate::paths;
+use anyhow::{bail, Context, Result};
+use serde_json::Value;
+use std::env;
+use std::fmt;
+use std::path::Path;
+use std::str;
+use url::Url;
+
+/// Default `snapbox` Assertions
+///
+/// # Snapshots
+///
+/// Updating of snapshots is controlled with the `SNAPSHOTS` environment variable:
+///
+/// - `skip`: do not run the tests
+/// - `ignore`: run the tests but ignore their failure
+/// - `verify`: run the tests
+/// - `overwrite`: update the snapshots based on the output of the tests
+///
+/// # Patterns
+///
+/// - `[..]` is a character wildcard, stopping at line breaks
+/// - `\n...\n` is a multi-line wildcard
+/// - `[EXE]` matches the exe suffix for the current platform
+/// - `[ROOT]` matches [`paths::root()`][crate::paths::root]
+/// - `[ROOTURL]` matches [`paths::root()`][crate::paths::root] as a URL
+///
+/// # Normalization
+///
+/// In addition to the patterns described above, text is normalized
+/// in such a way to avoid unwanted differences. The normalizations are:
+///
+/// - Backslashes are converted to forward slashes to deal with Windows paths.
+/// This helps so that all tests can be written assuming forward slashes.
+/// Other heuristics are applied to try to ensure Windows-style paths aren't
+/// a problem.
+/// - Carriage returns are removed, which can help when running on Windows.
+pub fn assert_ui() -> snapbox::Assert {
+ let root = paths::root();
+ // Use `from_file_path` instead of `from_dir_path` so the trailing slash is
+ // put in the users output, rather than hidden in the variable
+ let root_url = url::Url::from_file_path(&root).unwrap().to_string();
+ let root = root.display().to_string();
+
+ let mut subs = snapbox::Substitutions::new();
+ subs.extend([
+ (
+ "[EXE]",
+ std::borrow::Cow::Borrowed(std::env::consts::EXE_SUFFIX),
+ ),
+ ("[ROOT]", std::borrow::Cow::Owned(root)),
+ ("[ROOTURL]", std::borrow::Cow::Owned(root_url)),
+ ])
+ .unwrap();
+ snapbox::Assert::new()
+ .action_env(snapbox::DEFAULT_ACTION_ENV)
+ .substitutions(subs)
+}
+
+/// Normalizes the output so that it can be compared against the expected value.
+fn normalize_actual(actual: &str, cwd: Option<&Path>) -> String {
+ // It's easier to read tabs in outputs if they don't show up as literal
+ // hidden characters
+ let actual = actual.replace('\t', "<tab>");
+ if cfg!(windows) {
+ // Let's not deal with \r\n vs \n on windows...
+ let actual = actual.replace('\r', "");
+ normalize_windows(&actual, cwd)
+ } else {
+ actual
+ }
+}
+
+/// Normalizes the expected string so that it can be compared against the actual output.
+fn normalize_expected(expected: &str, cwd: Option<&Path>) -> String {
+ let expected = replace_dirty_msvc(expected);
+ let expected = substitute_macros(&expected);
+
+ if cfg!(windows) {
+ normalize_windows(&expected, cwd)
+ } else {
+ let expected = match cwd {
+ None => expected,
+ Some(cwd) => expected.replace("[CWD]", &cwd.display().to_string()),
+ };
+ let expected = expected.replace("[ROOT]", &paths::root().display().to_string());
+ expected
+ }
+}
+
+fn replace_dirty_msvc_impl(s: &str, is_msvc: bool) -> String {
+ if is_msvc {
+ s.replace("[DIRTY-MSVC]", "[DIRTY]")
+ } else {
+ use itertools::Itertools;
+
+ let mut new = s
+ .lines()
+ .filter(|it| !it.starts_with("[DIRTY-MSVC]"))
+ .join("\n");
+
+ if s.ends_with("\n") {
+ new.push_str("\n");
+ }
+
+ new
+ }
+}
+
+fn replace_dirty_msvc(s: &str) -> String {
+ replace_dirty_msvc_impl(s, cfg!(target_env = "msvc"))
+}
+
+/// Normalizes text for both actual and expected strings on Windows.
+fn normalize_windows(text: &str, cwd: Option<&Path>) -> String {
+ // Let's not deal with / vs \ (windows...)
+ let text = text.replace('\\', "/");
+
+ // Weirdness for paths on Windows extends beyond `/` vs `\` apparently.
+ // Namely paths like `c:\` and `C:\` are equivalent and that can cause
+ // issues. The return value of `env::current_dir()` may return a
+ // lowercase drive name, but we round-trip a lot of values through `Url`
+ // which will auto-uppercase the drive name. To just ignore this
+ // distinction we try to canonicalize as much as possible, taking all
+ // forms of a path and canonicalizing them to one.
+ let replace_path = |s: &str, path: &Path, with: &str| {
+ let path_through_url = Url::from_file_path(path).unwrap().to_file_path().unwrap();
+ let path1 = path.display().to_string().replace('\\', "/");
+ let path2 = path_through_url.display().to_string().replace('\\', "/");
+ s.replace(&path1, with)
+ .replace(&path2, with)
+ .replace(with, &path1)
+ };
+
+ let text = match cwd {
+ None => text,
+ Some(p) => replace_path(&text, p, "[CWD]"),
+ };
+
+ // Similar to cwd above, perform similar treatment to the root path
+ // which in theory all of our paths should otherwise get rooted at.
+ let root = paths::root();
+ let text = replace_path(&text, &root, "[ROOT]");
+
+ text
+}
+
+fn substitute_macros(input: &str) -> String {
+ let macros = [
+ ("[RUNNING]", " Running"),
+ ("[COMPILING]", " Compiling"),
+ ("[CHECKING]", " Checking"),
+ ("[COMPLETED]", " Completed"),
+ ("[CREATED]", " Created"),
+ ("[FINISHED]", " Finished"),
+ ("[ERROR]", "error:"),
+ ("[WARNING]", "warning:"),
+ ("[NOTE]", "note:"),
+ ("[HELP]", "help:"),
+ ("[DOCUMENTING]", " Documenting"),
+ ("[SCRAPING]", " Scraping"),
+ ("[FRESH]", " Fresh"),
+ ("[DIRTY]", " Dirty"),
+ ("[UPDATING]", " Updating"),
+ ("[ADDING]", " Adding"),
+ ("[REMOVING]", " Removing"),
+ ("[DOCTEST]", " Doc-tests"),
+ ("[PACKAGING]", " Packaging"),
+ ("[PACKAGED]", " Packaged"),
+ ("[DOWNLOADING]", " Downloading"),
+ ("[DOWNLOADED]", " Downloaded"),
+ ("[UPLOADING]", " Uploading"),
+ ("[VERIFYING]", " Verifying"),
+ ("[ARCHIVING]", " Archiving"),
+ ("[INSTALLING]", " Installing"),
+ ("[REPLACING]", " Replacing"),
+ ("[UNPACKING]", " Unpacking"),
+ ("[SUMMARY]", " Summary"),
+ ("[FIXED]", " Fixed"),
+ ("[FIXING]", " Fixing"),
+ ("[EXE]", env::consts::EXE_SUFFIX),
+ ("[IGNORED]", " Ignored"),
+ ("[INSTALLED]", " Installed"),
+ ("[REPLACED]", " Replaced"),
+ ("[BUILDING]", " Building"),
+ ("[LOGIN]", " Login"),
+ ("[LOGOUT]", " Logout"),
+ ("[YANK]", " Yank"),
+ ("[OWNER]", " Owner"),
+ ("[MIGRATING]", " Migrating"),
+ ("[EXECUTABLE]", " Executable"),
+ ("[SKIPPING]", " Skipping"),
+ ("[WAITING]", " Waiting"),
+ ];
+ let mut result = input.to_owned();
+ for &(pat, subst) in &macros {
+ result = result.replace(pat, subst)
+ }
+ result
+}
+
+/// Compares one string against another, checking that they both match.
+///
+/// See [Patterns](index.html#patterns) for more information on pattern matching.
+///
+/// - `description` explains where the output is from (usually "stdout" or "stderr").
+/// - `other_output` is other output to display in the error (usually stdout or stderr).
+pub fn match_exact(
+ expected: &str,
+ actual: &str,
+ description: &str,
+ other_output: &str,
+ cwd: Option<&Path>,
+) -> Result<()> {
+ let expected = normalize_expected(expected, cwd);
+ let actual = normalize_actual(actual, cwd);
+ let e: Vec<_> = expected.lines().map(WildStr::new).collect();
+ let a: Vec<_> = actual.lines().map(WildStr::new).collect();
+ if e == a {
+ return Ok(());
+ }
+ let diff = diff::colored_diff(&e, &a);
+ bail!(
+ "{} did not match:\n\
+ {}\n\n\
+ other output:\n\
+ {}\n",
+ description,
+ diff,
+ other_output,
+ );
+}
+
+/// Convenience wrapper around [`match_exact`] which will panic on error.
+#[track_caller]
+pub fn assert_match_exact(expected: &str, actual: &str) {
+ if let Err(e) = match_exact(expected, actual, "", "", None) {
+ crate::panic_error("", e);
+ }
+}
+
+/// Checks that the given string contains the given lines, ignoring the order
+/// of the lines.
+///
+/// See [Patterns](index.html#patterns) for more information on pattern matching.
+pub fn match_unordered(expected: &str, actual: &str, cwd: Option<&Path>) -> Result<()> {
+ let expected = normalize_expected(expected, cwd);
+ let actual = normalize_actual(actual, cwd);
+ let e: Vec<_> = expected.lines().map(|line| WildStr::new(line)).collect();
+ let mut a: Vec<_> = actual.lines().map(|line| WildStr::new(line)).collect();
+ // match more-constrained lines first, although in theory we'll
+ // need some sort of recursive match here. This handles the case
+ // that you expect "a\n[..]b" and two lines are printed out,
+ // "ab\n"a", where technically we do match unordered but a naive
+ // search fails to find this. This simple sort at least gets the
+ // test suite to pass for now, but we may need to get more fancy
+ // if tests start failing again.
+ a.sort_by_key(|s| s.line.len());
+ let mut changes = Vec::new();
+ let mut a_index = 0;
+ let mut failure = false;
+
+ use crate::diff::Change;
+ for (e_i, e_line) in e.into_iter().enumerate() {
+ match a.iter().position(|a_line| e_line == *a_line) {
+ Some(index) => {
+ let a_line = a.remove(index);
+ changes.push(Change::Keep(e_i, index, a_line));
+ a_index += 1;
+ }
+ None => {
+ failure = true;
+ changes.push(Change::Remove(e_i, e_line));
+ }
+ }
+ }
+ for unmatched in a {
+ failure = true;
+ changes.push(Change::Add(a_index, unmatched));
+ a_index += 1;
+ }
+ if failure {
+ bail!(
+ "Expected lines did not match (ignoring order):\n{}\n",
+ diff::render_colored_changes(&changes)
+ );
+ } else {
+ Ok(())
+ }
+}
+
+/// Checks that the given string contains the given contiguous lines
+/// somewhere.
+///
+/// See [Patterns](index.html#patterns) for more information on pattern matching.
+pub fn match_contains(expected: &str, actual: &str, cwd: Option<&Path>) -> Result<()> {
+ let expected = normalize_expected(expected, cwd);
+ let actual = normalize_actual(actual, cwd);
+ let e: Vec<_> = expected.lines().map(|line| WildStr::new(line)).collect();
+ let a: Vec<_> = actual.lines().map(|line| WildStr::new(line)).collect();
+ if e.len() == 0 {
+ bail!("expected length must not be zero");
+ }
+ for window in a.windows(e.len()) {
+ if window == e {
+ return Ok(());
+ }
+ }
+ bail!(
+ "expected to find:\n\
+ {}\n\n\
+ did not find in output:\n\
+ {}",
+ expected,
+ actual
+ );
+}
+
+/// Checks that the given string does not contain the given contiguous lines
+/// anywhere.
+///
+/// See [Patterns](index.html#patterns) for more information on pattern matching.
+pub fn match_does_not_contain(expected: &str, actual: &str, cwd: Option<&Path>) -> Result<()> {
+ if match_contains(expected, actual, cwd).is_ok() {
+ bail!(
+ "expected not to find:\n\
+ {}\n\n\
+ but found in output:\n\
+ {}",
+ expected,
+ actual
+ );
+ } else {
+ Ok(())
+ }
+}
+
+/// Checks that the given string contains the given contiguous lines
+/// somewhere, and should be repeated `number` times.
+///
+/// See [Patterns](index.html#patterns) for more information on pattern matching.
+pub fn match_contains_n(
+ expected: &str,
+ number: usize,
+ actual: &str,
+ cwd: Option<&Path>,
+) -> Result<()> {
+ let expected = normalize_expected(expected, cwd);
+ let actual = normalize_actual(actual, cwd);
+ let e: Vec<_> = expected.lines().map(|line| WildStr::new(line)).collect();
+ let a: Vec<_> = actual.lines().map(|line| WildStr::new(line)).collect();
+ if e.len() == 0 {
+ bail!("expected length must not be zero");
+ }
+ let matches = a.windows(e.len()).filter(|window| *window == e).count();
+ if matches == number {
+ Ok(())
+ } else {
+ bail!(
+ "expected to find {} occurrences of:\n\
+ {}\n\n\
+ but found {} matches in the output:\n\
+ {}",
+ number,
+ expected,
+ matches,
+ actual
+ )
+ }
+}
+
+/// Checks that the given string has a line that contains the given patterns,
+/// and that line also does not contain the `without` patterns.
+///
+/// See [Patterns](index.html#patterns) for more information on pattern matching.
+///
+/// See [`crate::Execs::with_stderr_line_without`] for an example and cautions
+/// against using.
+pub fn match_with_without(
+ actual: &str,
+ with: &[String],
+ without: &[String],
+ cwd: Option<&Path>,
+) -> Result<()> {
+ let actual = normalize_actual(actual, cwd);
+ let norm = |s: &String| format!("[..]{}[..]", normalize_expected(s, cwd));
+ let with: Vec<_> = with.iter().map(norm).collect();
+ let without: Vec<_> = without.iter().map(norm).collect();
+ let with_wild: Vec<_> = with.iter().map(|w| WildStr::new(w)).collect();
+ let without_wild: Vec<_> = without.iter().map(|w| WildStr::new(w)).collect();
+
+ let matches: Vec<_> = actual
+ .lines()
+ .map(WildStr::new)
+ .filter(|line| with_wild.iter().all(|with| with == line))
+ .filter(|line| !without_wild.iter().any(|without| without == line))
+ .collect();
+ match matches.len() {
+ 0 => bail!(
+ "Could not find expected line in output.\n\
+ With contents: {:?}\n\
+ Without contents: {:?}\n\
+ Actual stderr:\n\
+ {}\n",
+ with,
+ without,
+ actual
+ ),
+ 1 => Ok(()),
+ _ => bail!(
+ "Found multiple matching lines, but only expected one.\n\
+ With contents: {:?}\n\
+ Without contents: {:?}\n\
+ Matching lines:\n\
+ {}\n",
+ with,
+ without,
+ itertools::join(matches, "\n")
+ ),
+ }
+}
+
+/// Checks that the given string of JSON objects match the given set of
+/// expected JSON objects.
+///
+/// See [`crate::Execs::with_json`] for more details.
+pub fn match_json(expected: &str, actual: &str, cwd: Option<&Path>) -> Result<()> {
+ let (exp_objs, act_objs) = collect_json_objects(expected, actual)?;
+ if exp_objs.len() != act_objs.len() {
+ bail!(
+ "expected {} json lines, got {}, stdout:\n{}",
+ exp_objs.len(),
+ act_objs.len(),
+ actual
+ );
+ }
+ for (exp_obj, act_obj) in exp_objs.iter().zip(act_objs) {
+ find_json_mismatch(exp_obj, &act_obj, cwd)?;
+ }
+ Ok(())
+}
+
+/// Checks that the given string of JSON objects match the given set of
+/// expected JSON objects, ignoring their order.
+///
+/// See [`crate::Execs::with_json_contains_unordered`] for more details and
+/// cautions when using.
+pub fn match_json_contains_unordered(
+ expected: &str,
+ actual: &str,
+ cwd: Option<&Path>,
+) -> Result<()> {
+ let (exp_objs, mut act_objs) = collect_json_objects(expected, actual)?;
+ for exp_obj in exp_objs {
+ match act_objs
+ .iter()
+ .position(|act_obj| find_json_mismatch(&exp_obj, act_obj, cwd).is_ok())
+ {
+ Some(index) => act_objs.remove(index),
+ None => {
+ bail!(
+ "Did not find expected JSON:\n\
+ {}\n\
+ Remaining available output:\n\
+ {}\n",
+ serde_json::to_string_pretty(&exp_obj).unwrap(),
+ itertools::join(
+ act_objs.iter().map(|o| serde_json::to_string(o).unwrap()),
+ "\n"
+ )
+ );
+ }
+ };
+ }
+ Ok(())
+}
+
+fn collect_json_objects(
+ expected: &str,
+ actual: &str,
+) -> Result<(Vec<serde_json::Value>, Vec<serde_json::Value>)> {
+ let expected_objs: Vec<_> = expected
+ .split("\n\n")
+ .map(|expect| {
+ expect
+ .parse()
+ .with_context(|| format!("failed to parse expected JSON object:\n{}", expect))
+ })
+ .collect::<Result<_>>()?;
+ let actual_objs: Vec<_> = actual
+ .lines()
+ .filter(|line| line.starts_with('{'))
+ .map(|line| {
+ line.parse()
+ .with_context(|| format!("failed to parse JSON object:\n{}", line))
+ })
+ .collect::<Result<_>>()?;
+ Ok((expected_objs, actual_objs))
+}
+
+/// Compares JSON object for approximate equality.
+/// You can use `[..]` wildcard in strings (useful for OS-dependent things such
+/// as paths). You can use a `"{...}"` string literal as a wildcard for
+/// arbitrary nested JSON (useful for parts of object emitted by other programs
+/// (e.g., rustc) rather than Cargo itself).
+pub fn find_json_mismatch(expected: &Value, actual: &Value, cwd: Option<&Path>) -> Result<()> {
+ match find_json_mismatch_r(expected, actual, cwd) {
+ Some((expected_part, actual_part)) => bail!(
+ "JSON mismatch\nExpected:\n{}\nWas:\n{}\nExpected part:\n{}\nActual part:\n{}\n",
+ serde_json::to_string_pretty(expected).unwrap(),
+ serde_json::to_string_pretty(&actual).unwrap(),
+ serde_json::to_string_pretty(expected_part).unwrap(),
+ serde_json::to_string_pretty(actual_part).unwrap(),
+ ),
+ None => Ok(()),
+ }
+}
+
+fn find_json_mismatch_r<'a>(
+ expected: &'a Value,
+ actual: &'a Value,
+ cwd: Option<&Path>,
+) -> Option<(&'a Value, &'a Value)> {
+ use serde_json::Value::*;
+ match (expected, actual) {
+ (&Number(ref l), &Number(ref r)) if l == r => None,
+ (&Bool(l), &Bool(r)) if l == r => None,
+ (&String(ref l), _) if l == "{...}" => None,
+ (&String(ref l), &String(ref r)) => {
+ if match_exact(l, r, "", "", cwd).is_err() {
+ Some((expected, actual))
+ } else {
+ None
+ }
+ }
+ (&Array(ref l), &Array(ref r)) => {
+ if l.len() != r.len() {
+ return Some((expected, actual));
+ }
+
+ l.iter()
+ .zip(r.iter())
+ .filter_map(|(l, r)| find_json_mismatch_r(l, r, cwd))
+ .next()
+ }
+ (&Object(ref l), &Object(ref r)) => {
+ let same_keys = l.len() == r.len() && l.keys().all(|k| r.contains_key(k));
+ if !same_keys {
+ return Some((expected, actual));
+ }
+
+ l.values()
+ .zip(r.values())
+ .filter_map(|(l, r)| find_json_mismatch_r(l, r, cwd))
+ .next()
+ }
+ (&Null, &Null) => None,
+ // Magic string literal `"{...}"` acts as wildcard for any sub-JSON.
+ _ => Some((expected, actual)),
+ }
+}
+
+/// A single line string that supports `[..]` wildcard matching.
+pub struct WildStr<'a> {
+ has_meta: bool,
+ line: &'a str,
+}
+
+impl<'a> WildStr<'a> {
+ pub fn new(line: &'a str) -> WildStr<'a> {
+ WildStr {
+ has_meta: line.contains("[..]"),
+ line,
+ }
+ }
+}
+
+impl<'a> PartialEq for WildStr<'a> {
+ fn eq(&self, other: &Self) -> bool {
+ match (self.has_meta, other.has_meta) {
+ (false, false) => self.line == other.line,
+ (true, false) => meta_cmp(self.line, other.line),
+ (false, true) => meta_cmp(other.line, self.line),
+ (true, true) => panic!("both lines cannot have [..]"),
+ }
+ }
+}
+
+fn meta_cmp(a: &str, mut b: &str) -> bool {
+ for (i, part) in a.split("[..]").enumerate() {
+ match b.find(part) {
+ Some(j) => {
+ if i == 0 && j != 0 {
+ return false;
+ }
+ b = &b[j + part.len()..];
+ }
+ None => return false,
+ }
+ }
+ b.is_empty() || a.ends_with("[..]")
+}
+
+impl fmt::Display for WildStr<'_> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.line)
+ }
+}
+
+impl fmt::Debug for WildStr<'_> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "{:?}", self.line)
+ }
+}
+
+#[test]
+fn wild_str_cmp() {
+ for (a, b) in &[
+ ("a b", "a b"),
+ ("a[..]b", "a b"),
+ ("a[..]", "a b"),
+ ("[..]", "a b"),
+ ("[..]b", "a b"),
+ ] {
+ assert_eq!(WildStr::new(a), WildStr::new(b));
+ }
+ for (a, b) in &[("[..]b", "c"), ("b", "c"), ("b", "cb")] {
+ assert_ne!(WildStr::new(a), WildStr::new(b));
+ }
+}
+
+#[test]
+fn dirty_msvc() {
+ let case = |expected: &str, wild: &str, msvc: bool| {
+ assert_eq!(expected, &replace_dirty_msvc_impl(wild, msvc));
+ };
+
+ // no replacements
+ case("aa", "aa", false);
+ case("aa", "aa", true);
+
+ // with replacements
+ case(
+ "\
+[DIRTY] a",
+ "\
+[DIRTY-MSVC] a",
+ true,
+ );
+ case(
+ "",
+ "\
+[DIRTY-MSVC] a",
+ false,
+ );
+ case(
+ "\
+[DIRTY] a
+[COMPILING] a",
+ "\
+[DIRTY-MSVC] a
+[COMPILING] a",
+ true,
+ );
+ case(
+ "\
+[COMPILING] a",
+ "\
+[DIRTY-MSVC] a
+[COMPILING] a",
+ false,
+ );
+
+ // test trailing newline behavior
+ case(
+ "\
+A
+B
+", "\
+A
+B
+", true,
+ );
+
+ case(
+ "\
+A
+B
+", "\
+A
+B
+", false,
+ );
+
+ case(
+ "\
+A
+B", "\
+A
+B", true,
+ );
+
+ case(
+ "\
+A
+B", "\
+A
+B", false,
+ );
+
+ case(
+ "\
+[DIRTY] a
+",
+ "\
+[DIRTY-MSVC] a
+",
+ true,
+ );
+ case(
+ "\n",
+ "\
+[DIRTY-MSVC] a
+",
+ false,
+ );
+
+ case(
+ "\
+[DIRTY] a",
+ "\
+[DIRTY-MSVC] a",
+ true,
+ );
+ case(
+ "",
+ "\
+[DIRTY-MSVC] a",
+ false,
+ );
+}
diff --git a/crates/cargo-test-support/src/containers.rs b/crates/cargo-test-support/src/containers.rs
new file mode 100644
index 0000000..17040d8
--- /dev/null
+++ b/crates/cargo-test-support/src/containers.rs
@@ -0,0 +1,285 @@
+//! Support for testing using Docker containers.
+//!
+//! The [`Container`] type is a builder for configuring a container to run.
+//! After you call `launch`, you can use the [`ContainerHandle`] to interact
+//! with the running container.
+//!
+//! Tests using containers must use `#[cargo_test(container_test)]` to disable
+//! them unless the CARGO_CONTAINER_TESTS environment variable is set.
+
+use cargo_util::ProcessBuilder;
+use std::collections::HashMap;
+use std::io::Read;
+use std::path::PathBuf;
+use std::process::Command;
+use std::sync::atomic::{AtomicUsize, Ordering};
+use std::sync::Mutex;
+use tar::Header;
+
+/// A builder for configuring a container to run.
+pub struct Container {
+ /// The host directory that forms the basis of the Docker image.
+ build_context: PathBuf,
+ /// Files to copy over to the image.
+ files: Vec<MkFile>,
+}
+
+/// A handle to a running container.
+///
+/// You can use this to interact with the container.
+pub struct ContainerHandle {
+ /// The name of the container.
+ name: String,
+ /// The IP address of the container.
+ ///
+ /// NOTE: This is currently unused, but may be useful so I left it in.
+ /// This can only be used on Linux. macOS and Windows docker doesn't allow
+ /// direct connection to the container.
+ pub ip_address: String,
+ /// Port mappings of container_port to host_port for ports exposed via EXPOSE.
+ pub port_mappings: HashMap<u16, u16>,
+}
+
+impl Container {
+ pub fn new(context_dir: &str) -> Container {
+ assert!(std::env::var_os("CARGO_CONTAINER_TESTS").is_some());
+ let mut build_context = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
+ build_context.push("containers");
+ build_context.push(context_dir);
+ Container {
+ build_context,
+ files: Vec::new(),
+ }
+ }
+
+ /// Adds a file to be copied into the container.
+ pub fn file(mut self, file: MkFile) -> Self {
+ self.files.push(file);
+ self
+ }
+
+ /// Starts the container.
+ pub fn launch(mut self) -> ContainerHandle {
+ static NEXT_ID: AtomicUsize = AtomicUsize::new(0);
+
+ let id = NEXT_ID.fetch_add(1, Ordering::SeqCst);
+ let name = format!("cargo_test_{id}");
+ remove_if_exists(&name);
+ self.create_container(&name);
+ self.copy_files(&name);
+ self.start_container(&name);
+ let info = self.container_inspect(&name);
+ let ip_address = if cfg!(target_os = "linux") {
+ info[0]["NetworkSettings"]["IPAddress"]
+ .as_str()
+ .unwrap()
+ .to_string()
+ } else {
+ // macOS and Windows can't make direct connections to the
+ // container. It only works through exposed ports or mapped ports.
+ "127.0.0.1".to_string()
+ };
+ let port_mappings = self.port_mappings(&info);
+ self.wait_till_ready(&port_mappings);
+
+ ContainerHandle {
+ name,
+ ip_address,
+ port_mappings,
+ }
+ }
+
+ fn create_container(&self, name: &str) {
+ static BUILD_LOCK: Mutex<()> = Mutex::new(());
+
+ let image_base = self.build_context.file_name().unwrap();
+ let image_name = format!("cargo-test-{}", image_base.to_str().unwrap());
+ let _lock = BUILD_LOCK.lock().unwrap();
+ ProcessBuilder::new("docker")
+ .args(&["build", "--tag", image_name.as_str()])
+ .arg(&self.build_context)
+ .exec_with_output()
+ .unwrap();
+
+ ProcessBuilder::new("docker")
+ .args(&[
+ "container",
+ "create",
+ "--publish-all",
+ "--rm",
+ "--name",
+ name,
+ ])
+ .arg(image_name)
+ .exec_with_output()
+ .unwrap();
+ }
+
+ fn copy_files(&mut self, name: &str) {
+ if self.files.is_empty() {
+ return;
+ }
+ let mut ar = tar::Builder::new(Vec::new());
+ let files = std::mem::replace(&mut self.files, Vec::new());
+ for mut file in files {
+ ar.append_data(&mut file.header, &file.path, file.contents.as_slice())
+ .unwrap();
+ }
+ let ar = ar.into_inner().unwrap();
+ ProcessBuilder::new("docker")
+ .args(&["cp", "-"])
+ .arg(format!("{name}:/"))
+ .stdin(ar)
+ .exec_with_output()
+ .unwrap();
+ }
+
+ fn start_container(&self, name: &str) {
+ ProcessBuilder::new("docker")
+ .args(&["container", "start"])
+ .arg(name)
+ .exec_with_output()
+ .unwrap();
+ }
+
+ fn container_inspect(&self, name: &str) -> serde_json::Value {
+ let output = ProcessBuilder::new("docker")
+ .args(&["inspect", name])
+ .exec_with_output()
+ .unwrap();
+ serde_json::from_slice(&output.stdout).unwrap()
+ }
+
+ /// Returns the mapping of container_port->host_port for ports that were
+ /// exposed with EXPOSE.
+ fn port_mappings(&self, info: &serde_json::Value) -> HashMap<u16, u16> {
+ info[0]["NetworkSettings"]["Ports"]
+ .as_object()
+ .unwrap()
+ .iter()
+ .map(|(key, value)| {
+ let key = key
+ .strip_suffix("/tcp")
+ .expect("expected TCP only ports")
+ .parse()
+ .unwrap();
+ let values = value.as_array().unwrap();
+ let value = values
+ .iter()
+ .find(|value| value["HostIp"].as_str().unwrap() == "0.0.0.0")
+ .expect("expected localhost IP");
+ let host_port = value["HostPort"].as_str().unwrap().parse().unwrap();
+ (key, host_port)
+ })
+ .collect()
+ }
+
+ fn wait_till_ready(&self, port_mappings: &HashMap<u16, u16>) {
+ for port in port_mappings.values() {
+ let mut ok = false;
+ for _ in 0..30 {
+ match std::net::TcpStream::connect(format!("127.0.0.1:{port}")) {
+ Ok(_) => {
+ ok = true;
+ break;
+ }
+ Err(e) => {
+ if e.kind() != std::io::ErrorKind::ConnectionRefused {
+ panic!("unexpected localhost connection error: {e:?}");
+ }
+ std::thread::sleep(std::time::Duration::new(1, 0));
+ }
+ }
+ }
+ if !ok {
+ panic!("no listener on localhost port {port}");
+ }
+ }
+ }
+}
+
+impl ContainerHandle {
+ /// Executes a program inside a running container.
+ pub fn exec(&self, args: &[&str]) -> std::process::Output {
+ ProcessBuilder::new("docker")
+ .args(&["container", "exec", &self.name])
+ .args(args)
+ .exec_with_output()
+ .unwrap()
+ }
+
+ /// Returns the contents of a file inside the container.
+ pub fn read_file(&self, path: &str) -> String {
+ let output = ProcessBuilder::new("docker")
+ .args(&["cp", &format!("{}:{}", self.name, path), "-"])
+ .exec_with_output()
+ .unwrap();
+ let mut ar = tar::Archive::new(output.stdout.as_slice());
+ let mut entry = ar.entries().unwrap().next().unwrap().unwrap();
+ let mut contents = String::new();
+ entry.read_to_string(&mut contents).unwrap();
+ contents
+ }
+}
+
+impl Drop for ContainerHandle {
+ fn drop(&mut self) {
+ // To help with debugging, this will keep the container alive.
+ if std::env::var_os("CARGO_CONTAINER_TEST_KEEP").is_some() {
+ return;
+ }
+ remove_if_exists(&self.name);
+ }
+}
+
+fn remove_if_exists(name: &str) {
+ if let Err(e) = Command::new("docker")
+ .args(&["container", "rm", "--force", name])
+ .output()
+ {
+ panic!("failed to run docker: {e}");
+ }
+}
+
+/// Builder for configuring a file to copy into a container.
+pub struct MkFile {
+ path: String,
+ contents: Vec<u8>,
+ header: Header,
+}
+
+impl MkFile {
+ /// Defines a file to add to the container.
+ ///
+ /// This should be passed to `Container::file`.
+ ///
+ /// The path is the path inside the container to create the file.
+ pub fn path(path: &str) -> MkFile {
+ MkFile {
+ path: path.to_string(),
+ contents: Vec::new(),
+ header: Header::new_gnu(),
+ }
+ }
+
+ pub fn contents(mut self, contents: impl Into<Vec<u8>>) -> Self {
+ self.contents = contents.into();
+ self.header.set_size(self.contents.len() as u64);
+ self
+ }
+
+ pub fn mode(mut self, mode: u32) -> Self {
+ self.header.set_mode(mode);
+ self
+ }
+
+ pub fn uid(mut self, uid: u64) -> Self {
+ self.header.set_uid(uid);
+ self
+ }
+
+ pub fn gid(mut self, gid: u64) -> Self {
+ self.header.set_gid(gid);
+ self
+ }
+}
diff --git a/crates/cargo-test-support/src/cross_compile.rs b/crates/cargo-test-support/src/cross_compile.rs
new file mode 100644
index 0000000..a2daf88
--- /dev/null
+++ b/crates/cargo-test-support/src/cross_compile.rs
@@ -0,0 +1,264 @@
+//! Support for cross-compile tests with the `--target` flag.
+//!
+//! Note that cross-testing is very limited. You need to install the
+//! "alternate" target to the host (32-bit for 64-bit hosts or vice-versa).
+//!
+//! Set CFG_DISABLE_CROSS_TESTS=1 environment variable to disable these tests
+//! if you are unable to use the alternate target. Unfortunately 32-bit
+//! support on macOS is going away, so macOS users are out of luck.
+//!
+//! These tests are all disabled on rust-lang/rust's CI, but run in Cargo's CI.
+
+use crate::{basic_manifest, main_file, project};
+use cargo_util::ProcessError;
+use std::env;
+use std::fmt::Write;
+use std::process::{Command, Output};
+use std::sync::atomic::{AtomicBool, Ordering};
+use std::sync::Once;
+
+/// Whether or not the resulting cross binaries can run on the host.
+static CAN_RUN_ON_HOST: AtomicBool = AtomicBool::new(false);
+
+pub fn disabled() -> bool {
+ // First, disable if requested.
+ match env::var("CFG_DISABLE_CROSS_TESTS") {
+ Ok(ref s) if *s == "1" => return true,
+ _ => {}
+ }
+
+ // Cross tests are only tested to work on macos, linux, and MSVC windows.
+ if !(cfg!(target_os = "macos") || cfg!(target_os = "linux") || cfg!(target_env = "msvc")) {
+ return true;
+ }
+
+ // It's not particularly common to have a cross-compilation setup, so
+ // try to detect that before we fail a bunch of tests through no fault
+ // of the user.
+ static CAN_BUILD_CROSS_TESTS: AtomicBool = AtomicBool::new(false);
+ static CHECK: Once = Once::new();
+
+ let cross_target = alternate();
+
+ let run_cross_test = || -> anyhow::Result<Output> {
+ let p = project()
+ .at("cross_test")
+ .file("Cargo.toml", &basic_manifest("cross_test", "1.0.0"))
+ .file("src/main.rs", &main_file(r#""testing!""#, &[]))
+ .build();
+
+ let build_result = p
+ .cargo("build --target")
+ .arg(&cross_target)
+ .exec_with_output();
+
+ if build_result.is_ok() {
+ CAN_BUILD_CROSS_TESTS.store(true, Ordering::SeqCst);
+ }
+
+ let result = p
+ .cargo("run --target")
+ .arg(&cross_target)
+ .exec_with_output();
+
+ if result.is_ok() {
+ CAN_RUN_ON_HOST.store(true, Ordering::SeqCst);
+ }
+ build_result
+ };
+
+ CHECK.call_once(|| {
+ drop(run_cross_test());
+ });
+
+ if CAN_BUILD_CROSS_TESTS.load(Ordering::SeqCst) {
+ // We were able to compile a simple project, so the user has the
+ // necessary `std::` bits installed. Therefore, tests should not
+ // be disabled.
+ return false;
+ }
+
+ // We can't compile a simple cross project. We want to warn the user
+ // by failing a single test and having the remainder of the cross tests
+ // pass. We don't use `std::sync::Once` here because panicking inside its
+ // `call_once` method would poison the `Once` instance, which is not what
+ // we want.
+ static HAVE_WARNED: AtomicBool = AtomicBool::new(false);
+
+ if HAVE_WARNED.swap(true, Ordering::SeqCst) {
+ // We are some other test and somebody else is handling the warning.
+ // Just disable the current test.
+ return true;
+ }
+
+ // We are responsible for warning the user, which we do by panicking.
+ let mut message = format!(
+ "
+Cannot cross compile to {}.
+
+This failure can be safely ignored. If you would prefer to not see this
+failure, you can set the environment variable CFG_DISABLE_CROSS_TESTS to \"1\".
+
+Alternatively, you can install the necessary libraries to enable cross
+compilation tests. Cross compilation tests depend on your host platform.
+",
+ cross_target
+ );
+
+ if cfg!(target_os = "linux") {
+ message.push_str(
+ "
+Linux cross tests target i686-unknown-linux-gnu, which requires the ability to
+build and run 32-bit targets. This requires the 32-bit libraries to be
+installed. For example, on Ubuntu, run `sudo apt install gcc-multilib` to
+install the necessary libraries.
+",
+ );
+ } else if cfg!(all(target_os = "macos", target_arch = "aarch64")) {
+ message.push_str(
+ "
+macOS on aarch64 cross tests to target x86_64-apple-darwin.
+This should be natively supported via Xcode, nothing additional besides the
+rustup target should be needed.
+",
+ );
+ } else if cfg!(target_os = "macos") {
+ message.push_str(
+ "
+macOS on x86_64 cross tests to target x86_64-apple-ios, which requires the iOS
+SDK to be installed. This should be included with Xcode automatically. If you
+are using the Xcode command line tools, you'll need to install the full Xcode
+app (from the Apple App Store), and switch to it with this command:
+
+ sudo xcode-select --switch /Applications/Xcode.app/Contents/Developer
+
+Some cross-tests want to *run* the executables on the host. These tests will
+be ignored if this is not possible. On macOS, this means you need an iOS
+simulator installed to run these tests. To install a simulator, open Xcode, go
+to preferences > Components, and download the latest iOS simulator.
+",
+ );
+ } else if cfg!(target_os = "windows") {
+ message.push_str(
+ "
+Windows cross tests target i686-pc-windows-msvc, which requires the ability
+to build and run 32-bit targets. This should work automatically if you have
+properly installed Visual Studio build tools.
+",
+ );
+ } else {
+ // The check at the top should prevent this.
+ panic!("platform should have been skipped");
+ }
+
+ let rustup_available = Command::new("rustup").output().is_ok();
+ if rustup_available {
+ write!(
+ message,
+ "
+Make sure that the appropriate `rustc` target is installed with rustup:
+
+ rustup target add {}
+",
+ cross_target
+ )
+ .unwrap();
+ } else {
+ write!(
+ message,
+ "
+rustup does not appear to be installed. Make sure that the appropriate
+`rustc` target is installed for the target `{}`.
+",
+ cross_target
+ )
+ .unwrap();
+ }
+
+ // Show the actual error message.
+ match run_cross_test() {
+ Ok(_) => message.push_str("\nUh oh, second run succeeded?\n"),
+ Err(err) => match err.downcast_ref::<ProcessError>() {
+ Some(proc_err) => write!(message, "\nTest error: {}\n", proc_err).unwrap(),
+ None => write!(message, "\nUnexpected non-process error: {}\n", err).unwrap(),
+ },
+ }
+
+ panic!("{}", message);
+}
+
+/// The arch triple of the test-running host.
+pub fn native() -> &'static str {
+ env!("NATIVE_ARCH")
+}
+
+pub fn native_arch() -> &'static str {
+ match native()
+ .split("-")
+ .next()
+ .expect("Target triple has unexpected format")
+ {
+ "x86_64" => "x86_64",
+ "aarch64" => "aarch64",
+ "i686" => "x86",
+ _ => panic!("This test should be gated on cross_compile::disabled."),
+ }
+}
+
+/// The alternate target-triple to build with.
+///
+/// Only use this function on tests that check `cross_compile::disabled`.
+pub fn alternate() -> &'static str {
+ if cfg!(all(target_os = "macos", target_arch = "aarch64")) {
+ "x86_64-apple-darwin"
+ } else if cfg!(target_os = "macos") {
+ "x86_64-apple-ios"
+ } else if cfg!(target_os = "linux") {
+ "i686-unknown-linux-gnu"
+ } else if cfg!(all(target_os = "windows", target_env = "msvc")) {
+ "i686-pc-windows-msvc"
+ } else if cfg!(all(target_os = "windows", target_env = "gnu")) {
+ "i686-pc-windows-gnu"
+ } else {
+ panic!("This test should be gated on cross_compile::disabled.");
+ }
+}
+
+pub fn alternate_arch() -> &'static str {
+ if cfg!(target_os = "macos") {
+ "x86_64"
+ } else {
+ "x86"
+ }
+}
+
+/// A target-triple that is neither the host nor the target.
+///
+/// Rustc may not work with it and it's alright, apart from being a
+/// valid target triple it is supposed to be used only as a
+/// placeholder for targets that should not be considered.
+pub fn unused() -> &'static str {
+ "wasm32-unknown-unknown"
+}
+
+/// Whether or not the host can run cross-compiled executables.
+pub fn can_run_on_host() -> bool {
+ if disabled() {
+ return false;
+ }
+ // macos is currently configured to cross compile to x86_64-apple-ios
+ // which requires a simulator to run. Azure's CI image appears to have the
+ // SDK installed, but are not configured to launch iOS images with a
+ // simulator.
+ if cfg!(target_os = "macos") {
+ if CAN_RUN_ON_HOST.load(Ordering::SeqCst) {
+ return true;
+ } else {
+ println!("Note: Cannot run on host, skipping.");
+ return false;
+ }
+ } else {
+ assert!(CAN_RUN_ON_HOST.load(Ordering::SeqCst));
+ return true;
+ }
+}
diff --git a/crates/cargo-test-support/src/diff.rs b/crates/cargo-test-support/src/diff.rs
new file mode 100644
index 0000000..f3b283b
--- /dev/null
+++ b/crates/cargo-test-support/src/diff.rs
@@ -0,0 +1,174 @@
+//! A simple Myers diff implementation.
+//!
+//! This focuses on being short and simple, and the expense of being
+//! inefficient. A key characteristic here is that this supports cargotest's
+//! `[..]` wildcard matching. That means things like hashing can't be used.
+//! Since Cargo's output tends to be small, this should be sufficient.
+
+use std::fmt;
+use std::io::Write;
+use termcolor::{Ansi, Color, ColorSpec, NoColor, WriteColor};
+
+/// A single line change to be applied to the original.
+#[derive(Debug, Eq, PartialEq)]
+pub enum Change<T> {
+ Add(usize, T),
+ Remove(usize, T),
+ Keep(usize, usize, T),
+}
+
+pub fn diff<'a, T>(a: &'a [T], b: &'a [T]) -> Vec<Change<&'a T>>
+where
+ T: PartialEq,
+{
+ if a.is_empty() && b.is_empty() {
+ return vec![];
+ }
+ let mut diff = vec![];
+ for (prev_x, prev_y, x, y) in backtrack(&a, &b) {
+ if x == prev_x {
+ diff.push(Change::Add(prev_y + 1, &b[prev_y]));
+ } else if y == prev_y {
+ diff.push(Change::Remove(prev_x + 1, &a[prev_x]));
+ } else {
+ diff.push(Change::Keep(prev_x + 1, prev_y + 1, &a[prev_x]));
+ }
+ }
+ diff.reverse();
+ diff
+}
+
+fn shortest_edit<T>(a: &[T], b: &[T]) -> Vec<Vec<usize>>
+where
+ T: PartialEq,
+{
+ let max = a.len() + b.len();
+ let mut v = vec![0; 2 * max + 1];
+ let mut trace = vec![];
+ for d in 0..=max {
+ trace.push(v.clone());
+ for k in (0..=(2 * d)).step_by(2) {
+ let mut x = if k == 0 || (k != 2 * d && v[max - d + k - 1] < v[max - d + k + 1]) {
+ // Move down
+ v[max - d + k + 1]
+ } else {
+ // Move right
+ v[max - d + k - 1] + 1
+ };
+ let mut y = x + d - k;
+ // Step diagonally as far as possible.
+ while x < a.len() && y < b.len() && a[x] == b[y] {
+ x += 1;
+ y += 1;
+ }
+ v[max - d + k] = x;
+ // Return if reached the bottom-right position.
+ if x >= a.len() && y >= b.len() {
+ return trace;
+ }
+ }
+ }
+ panic!("finished without hitting end?");
+}
+
+fn backtrack<T>(a: &[T], b: &[T]) -> Vec<(usize, usize, usize, usize)>
+where
+ T: PartialEq,
+{
+ let mut result = vec![];
+ let mut x = a.len();
+ let mut y = b.len();
+ let max = x + y;
+ for (d, v) in shortest_edit(a, b).iter().enumerate().rev() {
+ let k = x + d - y;
+ let prev_k = if k == 0 || (k != 2 * d && v[max - d + k - 1] < v[max - d + k + 1]) {
+ k + 1
+ } else {
+ k - 1
+ };
+ let prev_x = v[max - d + prev_k];
+ let prev_y = (prev_x + d).saturating_sub(prev_k);
+ while x > prev_x && y > prev_y {
+ result.push((x - 1, y - 1, x, y));
+ x -= 1;
+ y -= 1;
+ }
+ if d > 0 {
+ result.push((prev_x, prev_y, x, y));
+ }
+ x = prev_x;
+ y = prev_y;
+ }
+ return result;
+}
+
+pub fn colored_diff<'a, T>(a: &'a [T], b: &'a [T]) -> String
+where
+ T: PartialEq + fmt::Display,
+{
+ let changes = diff(a, b);
+ render_colored_changes(&changes)
+}
+
+pub fn render_colored_changes<T: fmt::Display>(changes: &[Change<T>]) -> String {
+ // termcolor is not very ergonomic, but I don't want to bring in another dependency.
+ let mut red = ColorSpec::new();
+ red.set_fg(Some(Color::Red));
+ let mut green = ColorSpec::new();
+ green.set_fg(Some(Color::Green));
+ let mut dim = ColorSpec::new();
+ dim.set_dimmed(true);
+ let mut v = Vec::new();
+ let mut result: Box<dyn WriteColor> = if crate::is_ci() {
+ // Don't use color on CI. Even though GitHub can display colors, it
+ // makes reading the raw logs more difficult.
+ Box::new(NoColor::new(&mut v))
+ } else {
+ Box::new(Ansi::new(&mut v))
+ };
+
+ for change in changes {
+ let (nums, sign, color, text) = match change {
+ Change::Add(i, s) => (format!(" {:<4} ", i), '+', &green, s),
+ Change::Remove(i, s) => (format!("{:<4} ", i), '-', &red, s),
+ Change::Keep(x, y, s) => (format!("{:<4}{:<4} ", x, y), ' ', &dim, s),
+ };
+ result.set_color(&dim).unwrap();
+ write!(result, "{}", nums).unwrap();
+ let mut bold = color.clone();
+ bold.set_bold(true);
+ result.set_color(&bold).unwrap();
+ write!(result, "{}", sign).unwrap();
+ result.reset().unwrap();
+ result.set_color(&color).unwrap();
+ write!(result, "{}", text).unwrap();
+ result.reset().unwrap();
+ writeln!(result).unwrap();
+ }
+ drop(result);
+ String::from_utf8(v).unwrap()
+}
+
+#[cfg(test)]
+pub fn compare(a: &str, b: &str) {
+ let a: Vec<_> = a.chars().collect();
+ let b: Vec<_> = b.chars().collect();
+ let changes = diff(&a, &b);
+ let mut result = vec![];
+ for change in changes {
+ match change {
+ Change::Add(_, s) => result.push(*s),
+ Change::Remove(_, _s) => {}
+ Change::Keep(_, _, s) => result.push(*s),
+ }
+ }
+ assert_eq!(b, result);
+}
+
+#[test]
+fn basic_tests() {
+ compare("", "");
+ compare("A", "");
+ compare("", "B");
+ compare("ABCABBA", "CBABAC");
+}
diff --git a/crates/cargo-test-support/src/git.rs b/crates/cargo-test-support/src/git.rs
new file mode 100644
index 0000000..18c4646
--- /dev/null
+++ b/crates/cargo-test-support/src/git.rs
@@ -0,0 +1,249 @@
+/*
+# Git Testing Support
+
+## Creating a git dependency
+`git::new()` is an easy way to create a new git repository containing a
+project that you can then use as a dependency. It will automatically add all
+the files you specify in the project and commit them to the repository.
+Example:
+
+```
+let git_project = git::new("dep1", |project| {
+ project
+ .file("Cargo.toml", &basic_manifest("dep1"))
+ .file("src/lib.rs", r#"pub fn f() { println!("hi!"); } "#)
+});
+
+// Use the `url()` method to get the file url to the new repository.
+let p = project()
+ .file("Cargo.toml", &format!(r#"
+ [package]
+ name = "a"
+ version = "1.0.0"
+
+ [dependencies]
+ dep1 = {{ git = '{}' }}
+ "#, git_project.url()))
+ .file("src/lib.rs", "extern crate dep1;")
+ .build();
+```
+
+## Manually creating repositories
+`git::repo()` can be used to create a `RepoBuilder` which provides a way of
+adding files to a blank repository and committing them.
+
+If you want to then manipulate the repository (such as adding new files or
+tags), you can use `git2::Repository::open()` to open the repository and then
+use some of the helper functions in this file to interact with the repository.
+
+*/
+
+use crate::{path2url, project, Project, ProjectBuilder};
+use std::fs;
+use std::path::{Path, PathBuf};
+use std::sync::Once;
+use url::Url;
+
+#[must_use]
+pub struct RepoBuilder {
+ repo: git2::Repository,
+ files: Vec<PathBuf>,
+}
+
+pub struct Repository(git2::Repository);
+
+/// Create a `RepoBuilder` to build a new git repository.
+///
+/// Call `build()` to finalize and create the repository.
+pub fn repo(p: &Path) -> RepoBuilder {
+ RepoBuilder::init(p)
+}
+
+impl RepoBuilder {
+ pub fn init(p: &Path) -> RepoBuilder {
+ t!(fs::create_dir_all(p.parent().unwrap()));
+ let repo = init(p);
+ RepoBuilder {
+ repo,
+ files: Vec::new(),
+ }
+ }
+
+ /// Add a file to the repository.
+ pub fn file(self, path: &str, contents: &str) -> RepoBuilder {
+ let mut me = self.nocommit_file(path, contents);
+ me.files.push(PathBuf::from(path));
+ me
+ }
+
+ /// Add a file that will be left in the working directory, but not added
+ /// to the repository.
+ pub fn nocommit_file(self, path: &str, contents: &str) -> RepoBuilder {
+ let dst = self.repo.workdir().unwrap().join(path);
+ t!(fs::create_dir_all(dst.parent().unwrap()));
+ t!(fs::write(&dst, contents));
+ self
+ }
+
+ /// Create the repository and commit the new files.
+ pub fn build(self) -> Repository {
+ {
+ let mut index = t!(self.repo.index());
+ for file in self.files.iter() {
+ t!(index.add_path(file));
+ }
+ t!(index.write());
+ let id = t!(index.write_tree());
+ let tree = t!(self.repo.find_tree(id));
+ let sig = t!(self.repo.signature());
+ t!(self
+ .repo
+ .commit(Some("HEAD"), &sig, &sig, "Initial commit", &tree, &[]));
+ }
+ let RepoBuilder { repo, .. } = self;
+ Repository(repo)
+ }
+}
+
+impl Repository {
+ pub fn root(&self) -> &Path {
+ self.0.workdir().unwrap()
+ }
+
+ pub fn url(&self) -> Url {
+ path2url(self.0.workdir().unwrap().to_path_buf())
+ }
+
+ pub fn revparse_head(&self) -> String {
+ self.0
+ .revparse_single("HEAD")
+ .expect("revparse HEAD")
+ .id()
+ .to_string()
+ }
+}
+
+/// Initialize a new repository at the given path.
+pub fn init(path: &Path) -> git2::Repository {
+ default_search_path();
+ let repo = t!(git2::Repository::init(path));
+ default_repo_cfg(&repo);
+ repo
+}
+
+fn default_search_path() {
+ use crate::paths::global_root;
+ use git2::{opts::set_search_path, ConfigLevel};
+
+ static INIT: Once = Once::new();
+ INIT.call_once(|| unsafe {
+ let path = global_root().join("blank_git_search_path");
+ t!(set_search_path(ConfigLevel::System, &path));
+ t!(set_search_path(ConfigLevel::Global, &path));
+ t!(set_search_path(ConfigLevel::XDG, &path));
+ t!(set_search_path(ConfigLevel::ProgramData, &path));
+ })
+}
+
+fn default_repo_cfg(repo: &git2::Repository) {
+ let mut cfg = t!(repo.config());
+ t!(cfg.set_str("user.email", "foo@bar.com"));
+ t!(cfg.set_str("user.name", "Foo Bar"));
+}
+
+/// Create a new git repository with a project.
+pub fn new<F>(name: &str, callback: F) -> Project
+where
+ F: FnOnce(ProjectBuilder) -> ProjectBuilder,
+{
+ new_repo(name, callback).0
+}
+
+/// Create a new git repository with a project.
+/// Returns both the Project and the git Repository.
+pub fn new_repo<F>(name: &str, callback: F) -> (Project, git2::Repository)
+where
+ F: FnOnce(ProjectBuilder) -> ProjectBuilder,
+{
+ let mut git_project = project().at(name);
+ git_project = callback(git_project);
+ let git_project = git_project.build();
+
+ let repo = init(&git_project.root());
+ add(&repo);
+ commit(&repo);
+ (git_project, repo)
+}
+
+/// Add all files in the working directory to the git index.
+pub fn add(repo: &git2::Repository) {
+ // FIXME(libgit2/libgit2#2514): apparently, `add_all` will add all submodules
+ // as well, and then fail because they're directories. As a stop-gap, we just
+ // ignore all submodules.
+ let mut s = t!(repo.submodules());
+ for submodule in s.iter_mut() {
+ t!(submodule.add_to_index(false));
+ }
+ let mut index = t!(repo.index());
+ t!(index.add_all(
+ ["*"].iter(),
+ git2::IndexAddOption::DEFAULT,
+ Some(
+ &mut (|a, _b| if s.iter().any(|s| a.starts_with(s.path())) {
+ 1
+ } else {
+ 0
+ })
+ )
+ ));
+ t!(index.write());
+}
+
+/// Add a git submodule to the repository.
+pub fn add_submodule<'a>(
+ repo: &'a git2::Repository,
+ url: &str,
+ path: &Path,
+) -> git2::Submodule<'a> {
+ let path = path.to_str().unwrap().replace(r"\", "/");
+ let mut s = t!(repo.submodule(url, Path::new(&path), false));
+ let subrepo = t!(s.open());
+ default_repo_cfg(&subrepo);
+ t!(subrepo.remote_add_fetch("origin", "refs/heads/*:refs/heads/*"));
+ let mut origin = t!(subrepo.find_remote("origin"));
+ t!(origin.fetch(&Vec::<String>::new(), None, None));
+ t!(subrepo.checkout_head(None));
+ t!(s.add_finalize());
+ s
+}
+
+/// Commit changes to the git repository.
+pub fn commit(repo: &git2::Repository) -> git2::Oid {
+ let tree_id = t!(t!(repo.index()).write_tree());
+ let sig = t!(repo.signature());
+ let mut parents = Vec::new();
+ if let Some(parent) = repo.head().ok().map(|h| h.target().unwrap()) {
+ parents.push(t!(repo.find_commit(parent)))
+ }
+ let parents = parents.iter().collect::<Vec<_>>();
+ t!(repo.commit(
+ Some("HEAD"),
+ &sig,
+ &sig,
+ "test",
+ &t!(repo.find_tree(tree_id)),
+ &parents
+ ))
+}
+
+/// Create a new tag in the git repository.
+pub fn tag(repo: &git2::Repository, name: &str) {
+ let head = repo.head().unwrap().target().unwrap();
+ t!(repo.tag(
+ name,
+ &t!(repo.find_object(head, None)),
+ &t!(repo.signature()),
+ "make a new tag",
+ false
+ ));
+}
diff --git a/crates/cargo-test-support/src/install.rs b/crates/cargo-test-support/src/install.rs
new file mode 100644
index 0000000..478b482
--- /dev/null
+++ b/crates/cargo-test-support/src/install.rs
@@ -0,0 +1,29 @@
+use crate::paths;
+use std::env::consts::EXE_SUFFIX;
+use std::path::{Path, PathBuf};
+
+/// Used by `cargo install` tests to assert an executable binary
+/// has been installed. Example usage:
+///
+/// assert_has_installed_exe(cargo_home(), "foo");
+#[track_caller]
+pub fn assert_has_installed_exe<P: AsRef<Path>>(path: P, name: &'static str) {
+ assert!(check_has_installed_exe(path, name));
+}
+
+#[track_caller]
+pub fn assert_has_not_installed_exe<P: AsRef<Path>>(path: P, name: &'static str) {
+ assert!(!check_has_installed_exe(path, name));
+}
+
+fn check_has_installed_exe<P: AsRef<Path>>(path: P, name: &'static str) -> bool {
+ path.as_ref().join("bin").join(exe(name)).is_file()
+}
+
+pub fn cargo_home() -> PathBuf {
+ paths::home().join(".cargo")
+}
+
+pub fn exe(name: &str) -> String {
+ format!("{}{}", name, EXE_SUFFIX)
+}
diff --git a/crates/cargo-test-support/src/lib.rs b/crates/cargo-test-support/src/lib.rs
new file mode 100644
index 0000000..04d6ce9
--- /dev/null
+++ b/crates/cargo-test-support/src/lib.rs
@@ -0,0 +1,1424 @@
+//! # Cargo test support.
+//!
+//! See <https://rust-lang.github.io/cargo/contrib/> for a guide on writing tests.
+
+#![allow(clippy::all)]
+
+use std::env;
+use std::ffi::OsStr;
+use std::fmt::Write;
+use std::fs;
+use std::os;
+use std::path::{Path, PathBuf};
+use std::process::{Command, Output};
+use std::str;
+use std::time::{self, Duration};
+
+use anyhow::{bail, Result};
+use cargo_util::{is_ci, ProcessBuilder, ProcessError};
+use serde_json;
+use url::Url;
+
+use self::paths::CargoPathExt;
+
+#[macro_export]
+macro_rules! t {
+ ($e:expr) => {
+ match $e {
+ Ok(e) => e,
+ Err(e) => $crate::panic_error(&format!("failed running {}", stringify!($e)), e),
+ }
+ };
+}
+
+#[macro_export]
+macro_rules! curr_dir {
+ () => {
+ $crate::_curr_dir(std::path::Path::new(file!()));
+ };
+}
+
+#[doc(hidden)]
+pub fn _curr_dir(mut file_path: &'static Path) -> &'static Path {
+ if !file_path.exists() {
+ // HACK: Must be running in the rust-lang/rust workspace, adjust the paths accordingly.
+ let prefix = PathBuf::from("src").join("tools").join("cargo");
+ if let Ok(crate_relative) = file_path.strip_prefix(prefix) {
+ file_path = crate_relative
+ }
+ }
+ assert!(file_path.exists(), "{} does not exist", file_path.display());
+ file_path.parent().unwrap()
+}
+
+#[track_caller]
+pub fn panic_error(what: &str, err: impl Into<anyhow::Error>) -> ! {
+ let err = err.into();
+ pe(what, err);
+ #[track_caller]
+ fn pe(what: &str, err: anyhow::Error) -> ! {
+ let mut result = format!("{}\nerror: {}", what, err);
+ for cause in err.chain().skip(1) {
+ drop(writeln!(result, "\nCaused by:"));
+ drop(write!(result, "{}", cause));
+ }
+ panic!("\n{}", result);
+ }
+}
+
+pub use cargo_test_macro::cargo_test;
+
+pub mod compare;
+pub mod containers;
+pub mod cross_compile;
+mod diff;
+pub mod git;
+pub mod install;
+pub mod paths;
+pub mod publish;
+pub mod registry;
+pub mod tools;
+
+pub mod prelude {
+ pub use crate::ArgLine;
+ pub use crate::CargoCommand;
+ pub use crate::ChannelChanger;
+ pub use crate::TestEnv;
+}
+
+/*
+ *
+ * ===== Builders =====
+ *
+ */
+
+#[derive(PartialEq, Clone)]
+struct FileBuilder {
+ path: PathBuf,
+ body: String,
+ executable: bool,
+}
+
+impl FileBuilder {
+ pub fn new(path: PathBuf, body: &str, executable: bool) -> FileBuilder {
+ FileBuilder {
+ path,
+ body: body.to_string(),
+ executable: executable,
+ }
+ }
+
+ fn mk(&mut self) {
+ if self.executable {
+ self.path.set_extension(env::consts::EXE_EXTENSION);
+ }
+
+ self.dirname().mkdir_p();
+ fs::write(&self.path, &self.body)
+ .unwrap_or_else(|e| panic!("could not create file {}: {}", self.path.display(), e));
+
+ #[cfg(unix)]
+ if self.executable {
+ use std::os::unix::fs::PermissionsExt;
+
+ let mut perms = fs::metadata(&self.path).unwrap().permissions();
+ let mode = perms.mode();
+ perms.set_mode(mode | 0o111);
+ fs::set_permissions(&self.path, perms).unwrap();
+ }
+ }
+
+ fn dirname(&self) -> &Path {
+ self.path.parent().unwrap()
+ }
+}
+
+#[derive(PartialEq, Clone)]
+struct SymlinkBuilder {
+ dst: PathBuf,
+ src: PathBuf,
+ src_is_dir: bool,
+}
+
+impl SymlinkBuilder {
+ pub fn new(dst: PathBuf, src: PathBuf) -> SymlinkBuilder {
+ SymlinkBuilder {
+ dst,
+ src,
+ src_is_dir: false,
+ }
+ }
+
+ pub fn new_dir(dst: PathBuf, src: PathBuf) -> SymlinkBuilder {
+ SymlinkBuilder {
+ dst,
+ src,
+ src_is_dir: true,
+ }
+ }
+
+ #[cfg(unix)]
+ fn mk(&self) {
+ self.dirname().mkdir_p();
+ t!(os::unix::fs::symlink(&self.dst, &self.src));
+ }
+
+ #[cfg(windows)]
+ fn mk(&mut self) {
+ self.dirname().mkdir_p();
+ if self.src_is_dir {
+ t!(os::windows::fs::symlink_dir(&self.dst, &self.src));
+ } else {
+ if let Some(ext) = self.dst.extension() {
+ if ext == env::consts::EXE_EXTENSION {
+ self.src.set_extension(ext);
+ }
+ }
+ t!(os::windows::fs::symlink_file(&self.dst, &self.src));
+ }
+ }
+
+ fn dirname(&self) -> &Path {
+ self.src.parent().unwrap()
+ }
+}
+
+/// A cargo project to run tests against.
+///
+/// See [`ProjectBuilder`] or [`Project::from_template`] to get started.
+pub struct Project {
+ root: PathBuf,
+}
+
+/// Create a project to run tests against
+///
+/// The project can be constructed programmatically or from the filesystem with [`Project::from_template`]
+#[must_use]
+pub struct ProjectBuilder {
+ root: Project,
+ files: Vec<FileBuilder>,
+ symlinks: Vec<SymlinkBuilder>,
+ no_manifest: bool,
+}
+
+impl ProjectBuilder {
+ /// Root of the project, ex: `/path/to/cargo/target/cit/t0/foo`
+ pub fn root(&self) -> PathBuf {
+ self.root.root()
+ }
+
+ /// Project's debug dir, ex: `/path/to/cargo/target/cit/t0/foo/target/debug`
+ pub fn target_debug_dir(&self) -> PathBuf {
+ self.root.target_debug_dir()
+ }
+
+ pub fn new(root: PathBuf) -> ProjectBuilder {
+ ProjectBuilder {
+ root: Project { root },
+ files: vec![],
+ symlinks: vec![],
+ no_manifest: false,
+ }
+ }
+
+ pub fn at<P: AsRef<Path>>(mut self, path: P) -> Self {
+ self.root = Project {
+ root: paths::root().join(path),
+ };
+ self
+ }
+
+ /// Adds a file to the project.
+ pub fn file<B: AsRef<Path>>(mut self, path: B, body: &str) -> Self {
+ self._file(path.as_ref(), body, false);
+ self
+ }
+
+ /// Adds an executable file to the project.
+ pub fn executable<B: AsRef<Path>>(mut self, path: B, body: &str) -> Self {
+ self._file(path.as_ref(), body, true);
+ self
+ }
+
+ fn _file(&mut self, path: &Path, body: &str, executable: bool) {
+ self.files.push(FileBuilder::new(
+ self.root.root().join(path),
+ body,
+ executable,
+ ));
+ }
+
+ /// Adds a symlink to a file to the project.
+ pub fn symlink<T: AsRef<Path>>(mut self, dst: T, src: T) -> Self {
+ self.symlinks.push(SymlinkBuilder::new(
+ self.root.root().join(dst),
+ self.root.root().join(src),
+ ));
+ self
+ }
+
+ /// Create a symlink to a directory
+ pub fn symlink_dir<T: AsRef<Path>>(mut self, dst: T, src: T) -> Self {
+ self.symlinks.push(SymlinkBuilder::new_dir(
+ self.root.root().join(dst),
+ self.root.root().join(src),
+ ));
+ self
+ }
+
+ pub fn no_manifest(mut self) -> Self {
+ self.no_manifest = true;
+ self
+ }
+
+ /// Creates the project.
+ pub fn build(mut self) -> Project {
+ // First, clean the directory if it already exists
+ self.rm_root();
+
+ // Create the empty directory
+ self.root.root().mkdir_p();
+
+ let manifest_path = self.root.root().join("Cargo.toml");
+ if !self.no_manifest && self.files.iter().all(|fb| fb.path != manifest_path) {
+ self._file(
+ Path::new("Cargo.toml"),
+ &basic_manifest("foo", "0.0.1"),
+ false,
+ )
+ }
+
+ let past = time::SystemTime::now() - Duration::new(1, 0);
+ let ftime = filetime::FileTime::from_system_time(past);
+
+ for file in self.files.iter_mut() {
+ file.mk();
+ if is_coarse_mtime() {
+ // Place the entire project 1 second in the past to ensure
+ // that if cargo is called multiple times, the 2nd call will
+ // see targets as "fresh". Without this, if cargo finishes in
+ // under 1 second, the second call will see the mtime of
+ // source == mtime of output and consider it dirty.
+ filetime::set_file_times(&file.path, ftime, ftime).unwrap();
+ }
+ }
+
+ for symlink in self.symlinks.iter_mut() {
+ symlink.mk();
+ }
+
+ let ProjectBuilder { root, .. } = self;
+ root
+ }
+
+ fn rm_root(&self) {
+ self.root.root().rm_rf()
+ }
+}
+
+impl Project {
+ /// Copy the test project from a fixed state
+ pub fn from_template(template_path: impl AsRef<std::path::Path>) -> Self {
+ let root = paths::root();
+ let project_root = root.join("case");
+ snapbox::path::copy_template(template_path.as_ref(), &project_root).unwrap();
+ Self { root: project_root }
+ }
+
+ /// Root of the project, ex: `/path/to/cargo/target/cit/t0/foo`
+ pub fn root(&self) -> PathBuf {
+ self.root.clone()
+ }
+
+ /// Project's target dir, ex: `/path/to/cargo/target/cit/t0/foo/target`
+ pub fn build_dir(&self) -> PathBuf {
+ self.root().join("target")
+ }
+
+ /// Project's debug dir, ex: `/path/to/cargo/target/cit/t0/foo/target/debug`
+ pub fn target_debug_dir(&self) -> PathBuf {
+ self.build_dir().join("debug")
+ }
+
+ /// File url for root, ex: `file:///path/to/cargo/target/cit/t0/foo`
+ pub fn url(&self) -> Url {
+ path2url(self.root())
+ }
+
+ /// Path to an example built as a library.
+ /// `kind` should be one of: "lib", "rlib", "staticlib", "dylib", "proc-macro"
+ /// ex: `/path/to/cargo/target/cit/t0/foo/target/debug/examples/libex.rlib`
+ pub fn example_lib(&self, name: &str, kind: &str) -> PathBuf {
+ self.target_debug_dir()
+ .join("examples")
+ .join(paths::get_lib_filename(name, kind))
+ }
+
+ /// Path to a debug binary.
+ /// ex: `/path/to/cargo/target/cit/t0/foo/target/debug/foo`
+ pub fn bin(&self, b: &str) -> PathBuf {
+ self.build_dir()
+ .join("debug")
+ .join(&format!("{}{}", b, env::consts::EXE_SUFFIX))
+ }
+
+ /// Path to a release binary.
+ /// ex: `/path/to/cargo/target/cit/t0/foo/target/release/foo`
+ pub fn release_bin(&self, b: &str) -> PathBuf {
+ self.build_dir()
+ .join("release")
+ .join(&format!("{}{}", b, env::consts::EXE_SUFFIX))
+ }
+
+ /// Path to a debug binary for a specific target triple.
+ /// ex: `/path/to/cargo/target/cit/t0/foo/target/i686-apple-darwin/debug/foo`
+ pub fn target_bin(&self, target: &str, b: &str) -> PathBuf {
+ self.build_dir().join(target).join("debug").join(&format!(
+ "{}{}",
+ b,
+ env::consts::EXE_SUFFIX
+ ))
+ }
+
+ /// Returns an iterator of paths matching the glob pattern, which is
+ /// relative to the project root.
+ pub fn glob<P: AsRef<Path>>(&self, pattern: P) -> glob::Paths {
+ let pattern = self.root().join(pattern);
+ glob::glob(pattern.to_str().expect("failed to convert pattern to str"))
+ .expect("failed to glob")
+ }
+
+ /// Changes the contents of an existing file.
+ pub fn change_file(&self, path: &str, body: &str) {
+ FileBuilder::new(self.root().join(path), body, false).mk()
+ }
+
+ /// Creates a `ProcessBuilder` to run a program in the project
+ /// and wrap it in an Execs to assert on the execution.
+ /// Example:
+ /// p.process(&p.bin("foo"))
+ /// .with_stdout("bar\n")
+ /// .run();
+ pub fn process<T: AsRef<OsStr>>(&self, program: T) -> Execs {
+ let mut p = process(program);
+ p.cwd(self.root());
+ execs().with_process_builder(p)
+ }
+
+ /// Creates a `ProcessBuilder` to run cargo.
+ /// Arguments can be separated by spaces.
+ /// Example:
+ /// p.cargo("build --bin foo").run();
+ pub fn cargo(&self, cmd: &str) -> Execs {
+ let cargo = cargo_exe();
+ let mut execs = self.process(&cargo);
+ if let Some(ref mut p) = execs.process_builder {
+ p.env("CARGO", cargo);
+ p.arg_line(cmd);
+ }
+ execs
+ }
+
+ /// Safely run a process after `cargo build`.
+ ///
+ /// Windows has a problem where a process cannot be reliably
+ /// be replaced, removed, or renamed immediately after executing it.
+ /// The action may fail (with errors like Access is denied), or
+ /// it may succeed, but future attempts to use the same filename
+ /// will fail with "Already Exists".
+ ///
+ /// If you have a test that needs to do `cargo run` multiple
+ /// times, you should instead use `cargo build` and use this
+ /// method to run the executable. Each time you call this,
+ /// use a new name for `dst`.
+ /// See rust-lang/cargo#5481.
+ pub fn rename_run(&self, src: &str, dst: &str) -> Execs {
+ let src = self.bin(src);
+ let dst = self.bin(dst);
+ fs::rename(&src, &dst)
+ .unwrap_or_else(|e| panic!("Failed to rename `{:?}` to `{:?}`: {}", src, dst, e));
+ self.process(dst)
+ }
+
+ /// Returns the contents of `Cargo.lock`.
+ pub fn read_lockfile(&self) -> String {
+ self.read_file("Cargo.lock")
+ }
+
+ /// Returns the contents of a path in the project root
+ pub fn read_file(&self, path: &str) -> String {
+ let full = self.root().join(path);
+ fs::read_to_string(&full)
+ .unwrap_or_else(|e| panic!("could not read file {}: {}", full.display(), e))
+ }
+
+ /// Modifies `Cargo.toml` to remove all commented lines.
+ pub fn uncomment_root_manifest(&self) {
+ let contents = self.read_file("Cargo.toml").replace("#", "");
+ fs::write(self.root().join("Cargo.toml"), contents).unwrap();
+ }
+
+ pub fn symlink(&self, src: impl AsRef<Path>, dst: impl AsRef<Path>) {
+ let src = self.root().join(src.as_ref());
+ let dst = self.root().join(dst.as_ref());
+ #[cfg(unix)]
+ {
+ if let Err(e) = os::unix::fs::symlink(&src, &dst) {
+ panic!("failed to symlink {:?} to {:?}: {:?}", src, dst, e);
+ }
+ }
+ #[cfg(windows)]
+ {
+ if src.is_dir() {
+ if let Err(e) = os::windows::fs::symlink_dir(&src, &dst) {
+ panic!("failed to symlink {:?} to {:?}: {:?}", src, dst, e);
+ }
+ } else {
+ if let Err(e) = os::windows::fs::symlink_file(&src, &dst) {
+ panic!("failed to symlink {:?} to {:?}: {:?}", src, dst, e);
+ }
+ }
+ }
+ }
+}
+
+// Generates a project layout
+pub fn project() -> ProjectBuilder {
+ ProjectBuilder::new(paths::root().join("foo"))
+}
+
+// Generates a project layout in given directory
+pub fn project_in(dir: &str) -> ProjectBuilder {
+ ProjectBuilder::new(paths::root().join(dir).join("foo"))
+}
+
+// Generates a project layout inside our fake home dir
+pub fn project_in_home(name: &str) -> ProjectBuilder {
+ ProjectBuilder::new(paths::home().join(name))
+}
+
+// === Helpers ===
+
+pub fn main_file(println: &str, deps: &[&str]) -> String {
+ let mut buf = String::new();
+
+ for dep in deps.iter() {
+ buf.push_str(&format!("extern crate {};\n", dep));
+ }
+
+ buf.push_str("fn main() { println!(");
+ buf.push_str(println);
+ buf.push_str("); }\n");
+
+ buf
+}
+
+pub fn cargo_exe() -> PathBuf {
+ snapbox::cmd::cargo_bin("cargo")
+}
+
+/// This is the raw output from the process.
+///
+/// This is similar to `std::process::Output`, however the `status` is
+/// translated to the raw `code`. This is necessary because `ProcessError`
+/// does not have access to the raw `ExitStatus` because `ProcessError` needs
+/// to be serializable (for the Rustc cache), and `ExitStatus` does not
+/// provide a constructor.
+pub struct RawOutput {
+ pub code: Option<i32>,
+ pub stdout: Vec<u8>,
+ pub stderr: Vec<u8>,
+}
+
+#[must_use]
+#[derive(Clone)]
+pub struct Execs {
+ ran: bool,
+ process_builder: Option<ProcessBuilder>,
+ expect_stdout: Option<String>,
+ expect_stdin: Option<String>,
+ expect_stderr: Option<String>,
+ expect_exit_code: Option<i32>,
+ expect_stdout_contains: Vec<String>,
+ expect_stderr_contains: Vec<String>,
+ expect_stdout_contains_n: Vec<(String, usize)>,
+ expect_stdout_not_contains: Vec<String>,
+ expect_stderr_not_contains: Vec<String>,
+ expect_stderr_unordered: Vec<String>,
+ expect_stderr_with_without: Vec<(Vec<String>, Vec<String>)>,
+ expect_json: Option<String>,
+ expect_json_contains_unordered: Option<String>,
+ stream_output: bool,
+}
+
+impl Execs {
+ pub fn with_process_builder(mut self, p: ProcessBuilder) -> Execs {
+ self.process_builder = Some(p);
+ self
+ }
+
+ /// Verifies that stdout is equal to the given lines.
+ /// See [`compare`] for supported patterns.
+ pub fn with_stdout<S: ToString>(&mut self, expected: S) -> &mut Self {
+ self.expect_stdout = Some(expected.to_string());
+ self
+ }
+
+ /// Verifies that stderr is equal to the given lines.
+ /// See [`compare`] for supported patterns.
+ pub fn with_stderr<S: ToString>(&mut self, expected: S) -> &mut Self {
+ self.expect_stderr = Some(expected.to_string());
+ self
+ }
+
+ /// Writes the given lines to stdin.
+ pub fn with_stdin<S: ToString>(&mut self, expected: S) -> &mut Self {
+ self.expect_stdin = Some(expected.to_string());
+ self
+ }
+
+ /// Verifies the exit code from the process.
+ ///
+ /// This is not necessary if the expected exit code is `0`.
+ pub fn with_status(&mut self, expected: i32) -> &mut Self {
+ self.expect_exit_code = Some(expected);
+ self
+ }
+
+ /// Removes exit code check for the process.
+ ///
+ /// By default, the expected exit code is `0`.
+ pub fn without_status(&mut self) -> &mut Self {
+ self.expect_exit_code = None;
+ self
+ }
+
+ /// Verifies that stdout contains the given contiguous lines somewhere in
+ /// its output.
+ ///
+ /// See [`compare`] for supported patterns.
+ pub fn with_stdout_contains<S: ToString>(&mut self, expected: S) -> &mut Self {
+ self.expect_stdout_contains.push(expected.to_string());
+ self
+ }
+
+ /// Verifies that stderr contains the given contiguous lines somewhere in
+ /// its output.
+ ///
+ /// See [`compare`] for supported patterns.
+ pub fn with_stderr_contains<S: ToString>(&mut self, expected: S) -> &mut Self {
+ self.expect_stderr_contains.push(expected.to_string());
+ self
+ }
+
+ /// Verifies that stdout contains the given contiguous lines somewhere in
+ /// its output, and should be repeated `number` times.
+ ///
+ /// See [`compare`] for supported patterns.
+ pub fn with_stdout_contains_n<S: ToString>(&mut self, expected: S, number: usize) -> &mut Self {
+ self.expect_stdout_contains_n
+ .push((expected.to_string(), number));
+ self
+ }
+
+ /// Verifies that stdout does not contain the given contiguous lines.
+ ///
+ /// See [`compare`] for supported patterns.
+ ///
+ /// See note on [`Self::with_stderr_does_not_contain`].
+ pub fn with_stdout_does_not_contain<S: ToString>(&mut self, expected: S) -> &mut Self {
+ self.expect_stdout_not_contains.push(expected.to_string());
+ self
+ }
+
+ /// Verifies that stderr does not contain the given contiguous lines.
+ ///
+ /// See [`compare`] for supported patterns.
+ ///
+ /// Care should be taken when using this method because there is a
+ /// limitless number of possible things that *won't* appear. A typo means
+ /// your test will pass without verifying the correct behavior. If
+ /// possible, write the test first so that it fails, and then implement
+ /// your fix/feature to make it pass.
+ pub fn with_stderr_does_not_contain<S: ToString>(&mut self, expected: S) -> &mut Self {
+ self.expect_stderr_not_contains.push(expected.to_string());
+ self
+ }
+
+ /// Verifies that all of the stderr output is equal to the given lines,
+ /// ignoring the order of the lines.
+ ///
+ /// See [`compare`] for supported patterns.
+ ///
+ /// This is useful when checking the output of `cargo build -v` since
+ /// the order of the output is not always deterministic.
+ /// Recommend use `with_stderr_contains` instead unless you really want to
+ /// check *every* line of output.
+ ///
+ /// Be careful when using patterns such as `[..]`, because you may end up
+ /// with multiple lines that might match, and this is not smart enough to
+ /// do anything like longest-match. For example, avoid something like:
+ ///
+ /// ```text
+ /// [RUNNING] `rustc [..]
+ /// [RUNNING] `rustc --crate-name foo [..]
+ /// ```
+ ///
+ /// This will randomly fail if the other crate name is `bar`, and the
+ /// order changes.
+ pub fn with_stderr_unordered<S: ToString>(&mut self, expected: S) -> &mut Self {
+ self.expect_stderr_unordered.push(expected.to_string());
+ self
+ }
+
+ /// Verify that a particular line appears in stderr with and without the
+ /// given substrings. Exactly one line must match.
+ ///
+ /// The substrings are matched as `contains`. Example:
+ ///
+ /// ```no_run
+ /// execs.with_stderr_line_without(
+ /// &[
+ /// "[RUNNING] `rustc --crate-name build_script_build",
+ /// "-C opt-level=3",
+ /// ],
+ /// &["-C debuginfo", "-C incremental"],
+ /// )
+ /// ```
+ ///
+ /// This will check that a build line includes `-C opt-level=3` but does
+ /// not contain `-C debuginfo` or `-C incremental`.
+ ///
+ /// Be careful writing the `without` fragments, see note in
+ /// `with_stderr_does_not_contain`.
+ pub fn with_stderr_line_without<S: ToString>(
+ &mut self,
+ with: &[S],
+ without: &[S],
+ ) -> &mut Self {
+ let with = with.iter().map(|s| s.to_string()).collect();
+ let without = without.iter().map(|s| s.to_string()).collect();
+ self.expect_stderr_with_without.push((with, without));
+ self
+ }
+
+ /// Verifies the JSON output matches the given JSON.
+ ///
+ /// This is typically used when testing cargo commands that emit JSON.
+ /// Each separate JSON object should be separated by a blank line.
+ /// Example:
+ ///
+ /// ```rust,ignore
+ /// assert_that(
+ /// p.cargo("metadata"),
+ /// execs().with_json(r#"
+ /// {"example": "abc"}
+ ///
+ /// {"example": "def"}
+ /// "#)
+ /// );
+ /// ```
+ ///
+ /// - Objects should match in the order given.
+ /// - The order of arrays is ignored.
+ /// - Strings support patterns described in [`compare`].
+ /// - Use `"{...}"` to match any object.
+ pub fn with_json(&mut self, expected: &str) -> &mut Self {
+ self.expect_json = Some(expected.to_string());
+ self
+ }
+
+ /// Verifies JSON output contains the given objects (in any order) somewhere
+ /// in its output.
+ ///
+ /// CAUTION: Be very careful when using this. Make sure every object is
+ /// unique (not a subset of one another). Also avoid using objects that
+ /// could possibly match multiple output lines unless you're very sure of
+ /// what you are doing.
+ ///
+ /// See `with_json` for more detail.
+ pub fn with_json_contains_unordered(&mut self, expected: &str) -> &mut Self {
+ match &mut self.expect_json_contains_unordered {
+ None => self.expect_json_contains_unordered = Some(expected.to_string()),
+ Some(e) => {
+ e.push_str("\n\n");
+ e.push_str(expected);
+ }
+ }
+ self
+ }
+
+ /// Forward subordinate process stdout/stderr to the terminal.
+ /// Useful for printf debugging of the tests.
+ /// CAUTION: CI will fail if you leave this in your test!
+ #[allow(unused)]
+ pub fn stream(&mut self) -> &mut Self {
+ self.stream_output = true;
+ self
+ }
+
+ pub fn arg<T: AsRef<OsStr>>(&mut self, arg: T) -> &mut Self {
+ if let Some(ref mut p) = self.process_builder {
+ p.arg(arg);
+ }
+ self
+ }
+
+ pub fn cwd<T: AsRef<OsStr>>(&mut self, path: T) -> &mut Self {
+ if let Some(ref mut p) = self.process_builder {
+ if let Some(cwd) = p.get_cwd() {
+ let new_path = cwd.join(path.as_ref());
+ p.cwd(new_path);
+ } else {
+ p.cwd(path);
+ }
+ }
+ self
+ }
+
+ fn get_cwd(&self) -> Option<&Path> {
+ self.process_builder.as_ref().and_then(|p| p.get_cwd())
+ }
+
+ pub fn env<T: AsRef<OsStr>>(&mut self, key: &str, val: T) -> &mut Self {
+ if let Some(ref mut p) = self.process_builder {
+ p.env(key, val);
+ }
+ self
+ }
+
+ pub fn env_remove(&mut self, key: &str) -> &mut Self {
+ if let Some(ref mut p) = self.process_builder {
+ p.env_remove(key);
+ }
+ self
+ }
+
+ pub fn exec_with_output(&mut self) -> Result<Output> {
+ self.ran = true;
+ // TODO avoid unwrap
+ let p = (&self.process_builder).clone().unwrap();
+ p.exec_with_output()
+ }
+
+ pub fn build_command(&mut self) -> Command {
+ self.ran = true;
+ // TODO avoid unwrap
+ let p = (&self.process_builder).clone().unwrap();
+ p.build_command()
+ }
+
+ /// Enables nightly features for testing
+ ///
+ /// The list of reasons should be why nightly cargo is needed. If it is
+ /// becuase of an unstable feature put the name of the feature as the reason,
+ /// e.g. `&["print-im-a-teapot"]`
+ pub fn masquerade_as_nightly_cargo(&mut self, reasons: &[&str]) -> &mut Self {
+ if let Some(ref mut p) = self.process_builder {
+ p.masquerade_as_nightly_cargo(reasons);
+ }
+ self
+ }
+
+ /// Overrides the crates.io URL for testing.
+ ///
+ /// Can be used for testing crates-io functionality where alt registries
+ /// cannot be used.
+ pub fn replace_crates_io(&mut self, url: &Url) -> &mut Self {
+ if let Some(ref mut p) = self.process_builder {
+ p.env("__CARGO_TEST_CRATES_IO_URL_DO_NOT_USE_THIS", url.as_str());
+ }
+ self
+ }
+
+ pub fn enable_split_debuginfo_packed(&mut self) -> &mut Self {
+ self.env("CARGO_PROFILE_DEV_SPLIT_DEBUGINFO", "packed")
+ .env("CARGO_PROFILE_TEST_SPLIT_DEBUGINFO", "packed")
+ .env("CARGO_PROFILE_RELEASE_SPLIT_DEBUGINFO", "packed")
+ .env("CARGO_PROFILE_BENCH_SPLIT_DEBUGINFO", "packed");
+ self
+ }
+
+ pub fn enable_mac_dsym(&mut self) -> &mut Self {
+ if cfg!(target_os = "macos") {
+ return self.enable_split_debuginfo_packed();
+ }
+ self
+ }
+
+ #[track_caller]
+ pub fn run(&mut self) {
+ self.ran = true;
+ let mut p = (&self.process_builder).clone().unwrap();
+ if let Some(stdin) = self.expect_stdin.take() {
+ p.stdin(stdin);
+ }
+ if let Err(e) = self.match_process(&p) {
+ panic_error(&format!("test failed running {}", p), e);
+ }
+ }
+
+ #[track_caller]
+ pub fn run_expect_error(&mut self) {
+ self.ran = true;
+ let p = (&self.process_builder).clone().unwrap();
+ if self.match_process(&p).is_ok() {
+ panic!("test was expected to fail, but succeeded running {}", p);
+ }
+ }
+
+ /// Runs the process, checks the expected output, and returns the first
+ /// JSON object on stdout.
+ #[track_caller]
+ pub fn run_json(&mut self) -> serde_json::Value {
+ self.ran = true;
+ let p = (&self.process_builder).clone().unwrap();
+ match self.match_process(&p) {
+ Err(e) => panic_error(&format!("test failed running {}", p), e),
+ Ok(output) => serde_json::from_slice(&output.stdout).unwrap_or_else(|e| {
+ panic!(
+ "\nfailed to parse JSON: {}\n\
+ output was:\n{}\n",
+ e,
+ String::from_utf8_lossy(&output.stdout)
+ );
+ }),
+ }
+ }
+
+ #[track_caller]
+ pub fn run_output(&mut self, output: &Output) {
+ self.ran = true;
+ if let Err(e) = self.match_output(output.status.code(), &output.stdout, &output.stderr) {
+ panic_error("process did not return the expected result", e)
+ }
+ }
+
+ fn verify_checks_output(&self, stdout: &[u8], stderr: &[u8]) {
+ if self.expect_exit_code.unwrap_or(0) != 0
+ && self.expect_stdout.is_none()
+ && self.expect_stdin.is_none()
+ && self.expect_stderr.is_none()
+ && self.expect_stdout_contains.is_empty()
+ && self.expect_stderr_contains.is_empty()
+ && self.expect_stdout_contains_n.is_empty()
+ && self.expect_stdout_not_contains.is_empty()
+ && self.expect_stderr_not_contains.is_empty()
+ && self.expect_stderr_unordered.is_empty()
+ && self.expect_stderr_with_without.is_empty()
+ && self.expect_json.is_none()
+ && self.expect_json_contains_unordered.is_none()
+ {
+ panic!(
+ "`with_status()` is used, but no output is checked.\n\
+ The test must check the output to ensure the correct error is triggered.\n\
+ --- stdout\n{}\n--- stderr\n{}",
+ String::from_utf8_lossy(stdout),
+ String::from_utf8_lossy(stderr),
+ );
+ }
+ }
+
+ fn match_process(&self, process: &ProcessBuilder) -> Result<RawOutput> {
+ println!("running {}", process);
+ let res = if self.stream_output {
+ if is_ci() {
+ panic!("`.stream()` is for local debugging")
+ }
+ process.exec_with_streaming(
+ &mut |out| {
+ println!("{}", out);
+ Ok(())
+ },
+ &mut |err| {
+ eprintln!("{}", err);
+ Ok(())
+ },
+ true,
+ )
+ } else {
+ process.exec_with_output()
+ };
+
+ match res {
+ Ok(out) => {
+ self.match_output(out.status.code(), &out.stdout, &out.stderr)?;
+ return Ok(RawOutput {
+ stdout: out.stdout,
+ stderr: out.stderr,
+ code: out.status.code(),
+ });
+ }
+ Err(e) => {
+ if let Some(ProcessError {
+ stdout: Some(stdout),
+ stderr: Some(stderr),
+ code,
+ ..
+ }) = e.downcast_ref::<ProcessError>()
+ {
+ self.match_output(*code, stdout, stderr)?;
+ return Ok(RawOutput {
+ stdout: stdout.to_vec(),
+ stderr: stderr.to_vec(),
+ code: *code,
+ });
+ }
+ bail!("could not exec process {}: {:?}", process, e)
+ }
+ }
+ }
+
+ fn match_output(&self, code: Option<i32>, stdout: &[u8], stderr: &[u8]) -> Result<()> {
+ self.verify_checks_output(stdout, stderr);
+ let stdout = str::from_utf8(stdout).expect("stdout is not utf8");
+ let stderr = str::from_utf8(stderr).expect("stderr is not utf8");
+ let cwd = self.get_cwd();
+
+ match self.expect_exit_code {
+ None => {}
+ Some(expected) if code == Some(expected) => {}
+ Some(expected) => bail!(
+ "process exited with code {} (expected {})\n--- stdout\n{}\n--- stderr\n{}",
+ code.unwrap_or(-1),
+ expected,
+ stdout,
+ stderr
+ ),
+ }
+
+ if let Some(expect_stdout) = &self.expect_stdout {
+ compare::match_exact(expect_stdout, stdout, "stdout", stderr, cwd)?;
+ }
+ if let Some(expect_stderr) = &self.expect_stderr {
+ compare::match_exact(expect_stderr, stderr, "stderr", stdout, cwd)?;
+ }
+ for expect in self.expect_stdout_contains.iter() {
+ compare::match_contains(expect, stdout, cwd)?;
+ }
+ for expect in self.expect_stderr_contains.iter() {
+ compare::match_contains(expect, stderr, cwd)?;
+ }
+ for &(ref expect, number) in self.expect_stdout_contains_n.iter() {
+ compare::match_contains_n(expect, number, stdout, cwd)?;
+ }
+ for expect in self.expect_stdout_not_contains.iter() {
+ compare::match_does_not_contain(expect, stdout, cwd)?;
+ }
+ for expect in self.expect_stderr_not_contains.iter() {
+ compare::match_does_not_contain(expect, stderr, cwd)?;
+ }
+ for expect in self.expect_stderr_unordered.iter() {
+ compare::match_unordered(expect, stderr, cwd)?;
+ }
+ for (with, without) in self.expect_stderr_with_without.iter() {
+ compare::match_with_without(stderr, with, without, cwd)?;
+ }
+
+ if let Some(ref expect_json) = self.expect_json {
+ compare::match_json(expect_json, stdout, cwd)?;
+ }
+
+ if let Some(ref expected) = self.expect_json_contains_unordered {
+ compare::match_json_contains_unordered(expected, stdout, cwd)?;
+ }
+ Ok(())
+ }
+}
+
+impl Drop for Execs {
+ fn drop(&mut self) {
+ if !self.ran && !std::thread::panicking() {
+ panic!("forgot to run this command");
+ }
+ }
+}
+
+pub fn execs() -> Execs {
+ Execs {
+ ran: false,
+ process_builder: None,
+ expect_stdout: None,
+ expect_stderr: None,
+ expect_stdin: None,
+ expect_exit_code: Some(0),
+ expect_stdout_contains: Vec::new(),
+ expect_stderr_contains: Vec::new(),
+ expect_stdout_contains_n: Vec::new(),
+ expect_stdout_not_contains: Vec::new(),
+ expect_stderr_not_contains: Vec::new(),
+ expect_stderr_unordered: Vec::new(),
+ expect_stderr_with_without: Vec::new(),
+ expect_json: None,
+ expect_json_contains_unordered: None,
+ stream_output: false,
+ }
+}
+
+pub fn basic_manifest(name: &str, version: &str) -> String {
+ format!(
+ r#"
+ [package]
+ name = "{}"
+ version = "{}"
+ authors = []
+ "#,
+ name, version
+ )
+}
+
+pub fn basic_bin_manifest(name: &str) -> String {
+ format!(
+ r#"
+ [package]
+
+ name = "{}"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [[bin]]
+
+ name = "{}"
+ "#,
+ name, name
+ )
+}
+
+pub fn basic_lib_manifest(name: &str) -> String {
+ format!(
+ r#"
+ [package]
+
+ name = "{}"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [lib]
+
+ name = "{}"
+ "#,
+ name, name
+ )
+}
+
+pub fn path2url<P: AsRef<Path>>(p: P) -> Url {
+ Url::from_file_path(p).ok().unwrap()
+}
+
+struct RustcInfo {
+ verbose_version: String,
+ host: String,
+}
+
+impl RustcInfo {
+ fn new() -> RustcInfo {
+ let output = ProcessBuilder::new("rustc")
+ .arg("-vV")
+ .exec_with_output()
+ .expect("rustc should exec");
+ let verbose_version = String::from_utf8(output.stdout).expect("utf8 output");
+ let host = verbose_version
+ .lines()
+ .filter_map(|line| line.strip_prefix("host: "))
+ .next()
+ .expect("verbose version has host: field")
+ .to_string();
+ RustcInfo {
+ verbose_version,
+ host,
+ }
+ }
+}
+
+lazy_static::lazy_static! {
+ static ref RUSTC_INFO: RustcInfo = RustcInfo::new();
+}
+
+/// The rustc host such as `x86_64-unknown-linux-gnu`.
+pub fn rustc_host() -> &'static str {
+ &RUSTC_INFO.host
+}
+
+/// The host triple suitable for use in a cargo environment variable (uppercased).
+pub fn rustc_host_env() -> String {
+ rustc_host().to_uppercase().replace('-', "_")
+}
+
+pub fn is_nightly() -> bool {
+ let vv = &RUSTC_INFO.verbose_version;
+ // CARGO_TEST_DISABLE_NIGHTLY is set in rust-lang/rust's CI so that all
+ // nightly-only tests are disabled there. Otherwise, it could make it
+ // difficult to land changes which would need to be made simultaneously in
+ // rust-lang/cargo and rust-lan/rust, which isn't possible.
+ env::var("CARGO_TEST_DISABLE_NIGHTLY").is_err()
+ && (vv.contains("-nightly") || vv.contains("-dev"))
+}
+
+pub fn process<T: AsRef<OsStr>>(t: T) -> ProcessBuilder {
+ _process(t.as_ref())
+}
+
+fn _process(t: &OsStr) -> ProcessBuilder {
+ let mut p = ProcessBuilder::new(t);
+ p.cwd(&paths::root()).test_env();
+ p
+}
+
+/// Enable nightly features for testing
+pub trait ChannelChanger {
+ /// The list of reasons should be why nightly cargo is needed. If it is
+ /// becuase of an unstable feature put the name of the feature as the reason,
+ /// e.g. `&["print-im-a-teapot"]`.
+ fn masquerade_as_nightly_cargo(self, _reasons: &[&str]) -> Self;
+}
+
+impl ChannelChanger for &mut ProcessBuilder {
+ fn masquerade_as_nightly_cargo(self, _reasons: &[&str]) -> Self {
+ self.env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "nightly")
+ }
+}
+
+impl ChannelChanger for snapbox::cmd::Command {
+ fn masquerade_as_nightly_cargo(self, _reasons: &[&str]) -> Self {
+ self.env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "nightly")
+ }
+}
+
+/// Establish a process's test environment
+pub trait TestEnv: Sized {
+ fn test_env(mut self) -> Self {
+ // In general just clear out all cargo-specific configuration already in the
+ // environment. Our tests all assume a "default configuration" unless
+ // specified otherwise.
+ for (k, _v) in env::vars() {
+ if k.starts_with("CARGO_") {
+ self = self.env_remove(&k);
+ }
+ }
+ if env::var_os("RUSTUP_TOOLCHAIN").is_some() {
+ // Override the PATH to avoid executing the rustup wrapper thousands
+ // of times. This makes the testsuite run substantially faster.
+ lazy_static::lazy_static! {
+ static ref RUSTC_DIR: PathBuf = {
+ match ProcessBuilder::new("rustup")
+ .args(&["which", "rustc"])
+ .exec_with_output()
+ {
+ Ok(output) => {
+ let s = str::from_utf8(&output.stdout).expect("utf8").trim();
+ let mut p = PathBuf::from(s);
+ p.pop();
+ p
+ }
+ Err(e) => {
+ panic!("RUSTUP_TOOLCHAIN was set, but could not run rustup: {}", e);
+ }
+ }
+ };
+ }
+ let path = env::var_os("PATH").unwrap_or_default();
+ let paths = env::split_paths(&path);
+ let new_path =
+ env::join_paths(std::iter::once(RUSTC_DIR.clone()).chain(paths)).unwrap();
+ self = self.env("PATH", new_path);
+ }
+
+ self = self
+ .current_dir(&paths::root())
+ .env("HOME", paths::home())
+ .env("CARGO_HOME", paths::home().join(".cargo"))
+ .env("__CARGO_TEST_ROOT", paths::global_root())
+ // Force Cargo to think it's on the stable channel for all tests, this
+ // should hopefully not surprise us as we add cargo features over time and
+ // cargo rides the trains.
+ .env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "stable")
+ // Keeps cargo within its sandbox.
+ .env("__CARGO_TEST_DISABLE_GLOBAL_KNOWN_HOST", "1")
+ // Incremental generates a huge amount of data per test, which we
+ // don't particularly need. Tests that specifically need to check
+ // the incremental behavior should turn this back on.
+ .env("CARGO_INCREMENTAL", "0")
+ // Don't read the system git config which is out of our control.
+ .env("GIT_CONFIG_NOSYSTEM", "1")
+ .env_remove("__CARGO_DEFAULT_LIB_METADATA")
+ .env_remove("ALL_PROXY")
+ .env_remove("EMAIL")
+ .env_remove("GIT_AUTHOR_EMAIL")
+ .env_remove("GIT_AUTHOR_NAME")
+ .env_remove("GIT_COMMITTER_EMAIL")
+ .env_remove("GIT_COMMITTER_NAME")
+ .env_remove("http_proxy")
+ .env_remove("HTTPS_PROXY")
+ .env_remove("https_proxy")
+ .env_remove("MAKEFLAGS")
+ .env_remove("MFLAGS")
+ .env_remove("MSYSTEM") // assume cmd.exe everywhere on windows
+ .env_remove("RUSTC")
+ .env_remove("RUSTC_WORKSPACE_WRAPPER")
+ .env_remove("RUSTC_WRAPPER")
+ .env_remove("RUSTDOC")
+ .env_remove("RUSTDOCFLAGS")
+ .env_remove("RUSTFLAGS")
+ .env_remove("SSH_AUTH_SOCK") // ensure an outer agent is never contacted
+ .env_remove("USER") // not set on some rust-lang docker images
+ .env_remove("XDG_CONFIG_HOME"); // see #2345
+ if cfg!(target_os = "macos") {
+ // Work-around a bug in macOS 10.15, see `link_or_copy` for details.
+ self = self.env("__CARGO_COPY_DONT_LINK_DO_NOT_USE_THIS", "1");
+ }
+ if cfg!(windows) {
+ self = self.env("USERPROFILE", paths::home());
+ }
+ self
+ }
+
+ fn current_dir<S: AsRef<std::path::Path>>(self, path: S) -> Self;
+ fn env<S: AsRef<std::ffi::OsStr>>(self, key: &str, value: S) -> Self;
+ fn env_remove(self, key: &str) -> Self;
+}
+
+impl TestEnv for &mut ProcessBuilder {
+ fn current_dir<S: AsRef<std::path::Path>>(self, path: S) -> Self {
+ let path = path.as_ref();
+ self.cwd(path)
+ }
+ fn env<S: AsRef<std::ffi::OsStr>>(self, key: &str, value: S) -> Self {
+ self.env(key, value)
+ }
+ fn env_remove(self, key: &str) -> Self {
+ self.env_remove(key)
+ }
+}
+
+impl TestEnv for snapbox::cmd::Command {
+ fn current_dir<S: AsRef<std::path::Path>>(self, path: S) -> Self {
+ self.current_dir(path)
+ }
+ fn env<S: AsRef<std::ffi::OsStr>>(self, key: &str, value: S) -> Self {
+ self.env(key, value)
+ }
+ fn env_remove(self, key: &str) -> Self {
+ self.env_remove(key)
+ }
+}
+
+/// Test the cargo command
+pub trait CargoCommand {
+ fn cargo_ui() -> Self;
+}
+
+impl CargoCommand for snapbox::cmd::Command {
+ fn cargo_ui() -> Self {
+ Self::new(cargo_exe())
+ .with_assert(compare::assert_ui())
+ .test_env()
+ }
+}
+
+/// Add a list of arguments as a line
+pub trait ArgLine: Sized {
+ fn arg_line(mut self, s: &str) -> Self {
+ for mut arg in s.split_whitespace() {
+ if (arg.starts_with('"') && arg.ends_with('"'))
+ || (arg.starts_with('\'') && arg.ends_with('\''))
+ {
+ arg = &arg[1..(arg.len() - 1).max(1)];
+ } else if arg.contains(&['"', '\''][..]) {
+ panic!("shell-style argument parsing is not supported")
+ }
+ self = self.arg(arg);
+ }
+ self
+ }
+
+ fn arg<S: AsRef<std::ffi::OsStr>>(self, s: S) -> Self;
+}
+
+impl ArgLine for &mut ProcessBuilder {
+ fn arg<S: AsRef<std::ffi::OsStr>>(self, s: S) -> Self {
+ self.arg(s)
+ }
+}
+
+impl ArgLine for snapbox::cmd::Command {
+ fn arg<S: AsRef<std::ffi::OsStr>>(self, s: S) -> Self {
+ self.arg(s)
+ }
+}
+
+pub fn cargo_process(s: &str) -> Execs {
+ let cargo = cargo_exe();
+ let mut p = process(&cargo);
+ p.env("CARGO", cargo);
+ p.arg_line(s);
+ execs().with_process_builder(p)
+}
+
+pub fn git_process(s: &str) -> ProcessBuilder {
+ let mut p = process("git");
+ p.arg_line(s);
+ p
+}
+
+pub fn sleep_ms(ms: u64) {
+ ::std::thread::sleep(Duration::from_millis(ms));
+}
+
+/// Returns `true` if the local filesystem has low-resolution mtimes.
+pub fn is_coarse_mtime() -> bool {
+ // If the filetime crate is being used to emulate HFS then
+ // return `true`, without looking at the actual hardware.
+ cfg!(emulate_second_only_system) ||
+ // This should actually be a test that `$CARGO_TARGET_DIR` is on an HFS
+ // filesystem, (or any filesystem with low-resolution mtimes). However,
+ // that's tricky to detect, so for now just deal with CI.
+ cfg!(target_os = "macos") && is_ci()
+}
+
+/// Some CI setups are much slower then the equipment used by Cargo itself.
+/// Architectures that do not have a modern processor, hardware emulation, etc.
+/// This provides a way for those setups to increase the cut off for all the time based test.
+pub fn slow_cpu_multiplier(main: u64) -> Duration {
+ lazy_static::lazy_static! {
+ static ref SLOW_CPU_MULTIPLIER: u64 =
+ env::var("CARGO_TEST_SLOW_CPU_MULTIPLIER").ok().and_then(|m| m.parse().ok()).unwrap_or(1);
+ }
+ Duration::from_secs(*SLOW_CPU_MULTIPLIER * main)
+}
+
+#[cfg(windows)]
+pub fn symlink_supported() -> bool {
+ if is_ci() {
+ // We want to be absolutely sure this runs on CI.
+ return true;
+ }
+ let src = paths::root().join("symlink_src");
+ fs::write(&src, "").unwrap();
+ let dst = paths::root().join("symlink_dst");
+ let result = match os::windows::fs::symlink_file(&src, &dst) {
+ Ok(_) => {
+ fs::remove_file(&dst).unwrap();
+ true
+ }
+ Err(e) => {
+ eprintln!(
+ "symlinks not supported: {:?}\n\
+ Windows 10 users should enable developer mode.",
+ e
+ );
+ false
+ }
+ };
+ fs::remove_file(&src).unwrap();
+ return result;
+}
+
+#[cfg(not(windows))]
+pub fn symlink_supported() -> bool {
+ true
+}
+
+/// The error message for ENOENT.
+pub fn no_such_file_err_msg() -> String {
+ std::io::Error::from_raw_os_error(2).to_string()
+}
diff --git a/crates/cargo-test-support/src/paths.rs b/crates/cargo-test-support/src/paths.rs
new file mode 100644
index 0000000..ef1fddb
--- /dev/null
+++ b/crates/cargo-test-support/src/paths.rs
@@ -0,0 +1,347 @@
+use filetime::{self, FileTime};
+use lazy_static::lazy_static;
+use std::cell::RefCell;
+use std::collections::HashMap;
+use std::env;
+use std::fs;
+use std::io::{self, ErrorKind};
+use std::path::{Path, PathBuf};
+use std::process::Command;
+use std::sync::atomic::{AtomicUsize, Ordering};
+use std::sync::Mutex;
+
+static CARGO_INTEGRATION_TEST_DIR: &str = "cit";
+
+lazy_static! {
+ // TODO: Use `SyncOnceCell` when stable
+ static ref GLOBAL_ROOT: Mutex<Option<PathBuf>> = Mutex::new(None);
+
+ static ref TEST_ROOTS: Mutex<HashMap<String, PathBuf>> = Default::default();
+}
+
+/// This is used when running cargo is pre-CARGO_TARGET_TMPDIR
+/// TODO: Remove when CARGO_TARGET_TMPDIR grows old enough.
+fn global_root_legacy() -> PathBuf {
+ let mut path = t!(env::current_exe());
+ path.pop(); // chop off exe name
+ path.pop(); // chop off "deps"
+ path.push("tmp");
+ path.mkdir_p();
+ path
+}
+
+fn set_global_root(tmp_dir: Option<&'static str>) {
+ let mut lock = GLOBAL_ROOT.lock().unwrap();
+ if lock.is_none() {
+ let mut root = match tmp_dir {
+ Some(tmp_dir) => PathBuf::from(tmp_dir),
+ None => global_root_legacy(),
+ };
+
+ root.push(CARGO_INTEGRATION_TEST_DIR);
+ *lock = Some(root);
+ }
+}
+
+pub fn global_root() -> PathBuf {
+ let lock = GLOBAL_ROOT.lock().unwrap();
+ match lock.as_ref() {
+ Some(p) => p.clone(),
+ None => unreachable!("GLOBAL_ROOT not set yet"),
+ }
+}
+
+// We need to give each test a unique id. The test name could serve this
+// purpose, but the `test` crate doesn't have a way to obtain the current test
+// name.[*] Instead, we used the `cargo-test-macro` crate to automatically
+// insert an init function for each test that sets the test name in a thread
+// local variable.
+//
+// [*] It does set the thread name, but only when running concurrently. If not
+// running concurrently, all tests are run on the main thread.
+thread_local! {
+ static TEST_ID: RefCell<Option<usize>> = RefCell::new(None);
+}
+
+pub struct TestIdGuard {
+ _private: (),
+}
+
+pub fn init_root(tmp_dir: Option<&'static str>) -> TestIdGuard {
+ static NEXT_ID: AtomicUsize = AtomicUsize::new(0);
+
+ let id = NEXT_ID.fetch_add(1, Ordering::SeqCst);
+ TEST_ID.with(|n| *n.borrow_mut() = Some(id));
+
+ let guard = TestIdGuard { _private: () };
+
+ set_global_root(tmp_dir);
+ let r = root();
+ r.rm_rf();
+ r.mkdir_p();
+
+ guard
+}
+
+impl Drop for TestIdGuard {
+ fn drop(&mut self) {
+ TEST_ID.with(|n| *n.borrow_mut() = None);
+ }
+}
+
+pub fn root() -> PathBuf {
+ let id = TEST_ID.with(|n| {
+ n.borrow().expect(
+ "Tests must use the `#[cargo_test]` attribute in \
+ order to be able to use the crate root.",
+ )
+ });
+
+ let mut root = global_root();
+ root.push(&format!("t{}", id));
+ root
+}
+
+pub fn home() -> PathBuf {
+ let mut path = root();
+ path.push("home");
+ path.mkdir_p();
+ path
+}
+
+pub trait CargoPathExt {
+ fn rm_rf(&self);
+ fn mkdir_p(&self);
+
+ fn move_into_the_past(&self) {
+ self.move_in_time(|sec, nsec| (sec - 3600, nsec))
+ }
+
+ fn move_into_the_future(&self) {
+ self.move_in_time(|sec, nsec| (sec + 3600, nsec))
+ }
+
+ fn move_in_time<F>(&self, travel_amount: F)
+ where
+ F: Fn(i64, u32) -> (i64, u32);
+}
+
+impl CargoPathExt for Path {
+ fn rm_rf(&self) {
+ let meta = match self.symlink_metadata() {
+ Ok(meta) => meta,
+ Err(e) => {
+ if e.kind() == ErrorKind::NotFound {
+ return;
+ }
+ panic!("failed to remove {:?}, could not read: {:?}", self, e);
+ }
+ };
+ // There is a race condition between fetching the metadata and
+ // actually performing the removal, but we don't care all that much
+ // for our tests.
+ if meta.is_dir() {
+ if let Err(e) = fs::remove_dir_all(self) {
+ panic!("failed to remove {:?}: {:?}", self, e)
+ }
+ } else if let Err(e) = fs::remove_file(self) {
+ panic!("failed to remove {:?}: {:?}", self, e)
+ }
+ }
+
+ fn mkdir_p(&self) {
+ fs::create_dir_all(self)
+ .unwrap_or_else(|e| panic!("failed to mkdir_p {}: {}", self.display(), e))
+ }
+
+ fn move_in_time<F>(&self, travel_amount: F)
+ where
+ F: Fn(i64, u32) -> (i64, u32),
+ {
+ if self.is_file() {
+ time_travel(self, &travel_amount);
+ } else {
+ recurse(self, &self.join("target"), &travel_amount);
+ }
+
+ fn recurse<F>(p: &Path, bad: &Path, travel_amount: &F)
+ where
+ F: Fn(i64, u32) -> (i64, u32),
+ {
+ if p.is_file() {
+ time_travel(p, travel_amount)
+ } else if !p.starts_with(bad) {
+ for f in t!(fs::read_dir(p)) {
+ let f = t!(f).path();
+ recurse(&f, bad, travel_amount);
+ }
+ }
+ }
+
+ fn time_travel<F>(path: &Path, travel_amount: &F)
+ where
+ F: Fn(i64, u32) -> (i64, u32),
+ {
+ let stat = t!(path.symlink_metadata());
+
+ let mtime = FileTime::from_last_modification_time(&stat);
+
+ let (sec, nsec) = travel_amount(mtime.unix_seconds(), mtime.nanoseconds());
+ let newtime = FileTime::from_unix_time(sec, nsec);
+
+ // Sadly change_file_times has a failure mode where a readonly file
+ // cannot have its times changed on windows.
+ do_op(path, "set file times", |path| {
+ filetime::set_file_times(path, newtime, newtime)
+ });
+ }
+ }
+}
+
+fn do_op<F>(path: &Path, desc: &str, mut f: F)
+where
+ F: FnMut(&Path) -> io::Result<()>,
+{
+ match f(path) {
+ Ok(()) => {}
+ Err(ref e) if e.kind() == ErrorKind::PermissionDenied => {
+ let mut p = t!(path.metadata()).permissions();
+ p.set_readonly(false);
+ t!(fs::set_permissions(path, p));
+
+ // Unix also requires the parent to not be readonly for example when
+ // removing files
+ let parent = path.parent().unwrap();
+ let mut p = t!(parent.metadata()).permissions();
+ p.set_readonly(false);
+ t!(fs::set_permissions(parent, p));
+
+ f(path).unwrap_or_else(|e| {
+ panic!("failed to {} {}: {}", desc, path.display(), e);
+ })
+ }
+ Err(e) => {
+ panic!("failed to {} {}: {}", desc, path.display(), e);
+ }
+ }
+}
+
+/// Get the filename for a library.
+///
+/// `kind` should be one of: "lib", "rlib", "staticlib", "dylib", "proc-macro"
+///
+/// For example, dynamic library named "foo" would return:
+/// - macOS: "libfoo.dylib"
+/// - Windows: "foo.dll"
+/// - Unix: "libfoo.so"
+pub fn get_lib_filename(name: &str, kind: &str) -> String {
+ let prefix = get_lib_prefix(kind);
+ let extension = get_lib_extension(kind);
+ format!("{}{}.{}", prefix, name, extension)
+}
+
+pub fn get_lib_prefix(kind: &str) -> &str {
+ match kind {
+ "lib" | "rlib" => "lib",
+ "staticlib" | "dylib" | "proc-macro" => {
+ if cfg!(windows) {
+ ""
+ } else {
+ "lib"
+ }
+ }
+ _ => unreachable!(),
+ }
+}
+
+pub fn get_lib_extension(kind: &str) -> &str {
+ match kind {
+ "lib" | "rlib" => "rlib",
+ "staticlib" => {
+ if cfg!(windows) {
+ "lib"
+ } else {
+ "a"
+ }
+ }
+ "dylib" | "proc-macro" => {
+ if cfg!(windows) {
+ "dll"
+ } else if cfg!(target_os = "macos") {
+ "dylib"
+ } else {
+ "so"
+ }
+ }
+ _ => unreachable!(),
+ }
+}
+
+/// Returns the sysroot as queried from rustc.
+pub fn sysroot() -> String {
+ let output = Command::new("rustc")
+ .arg("--print=sysroot")
+ .output()
+ .expect("rustc to run");
+ assert!(output.status.success());
+ let sysroot = String::from_utf8(output.stdout).unwrap();
+ sysroot.trim().to_string()
+}
+
+/// Returns true if names such as aux.* are allowed.
+///
+/// Traditionally, Windows did not allow a set of file names (see `is_windows_reserved`
+/// for a list). More recent versions of Windows have relaxed this restriction. This test
+/// determines whether we are running in a mode that allows Windows reserved names.
+#[cfg(windows)]
+pub fn windows_reserved_names_are_allowed() -> bool {
+ use cargo_util::is_ci;
+
+ // Ensure tests still run in CI until we need to migrate.
+ if is_ci() {
+ return false;
+ }
+
+ use std::ffi::OsStr;
+ use std::os::windows::ffi::OsStrExt;
+ use std::ptr;
+ use windows_sys::Win32::Storage::FileSystem::GetFullPathNameW;
+
+ let test_file_name: Vec<_> = OsStr::new("aux.rs").encode_wide().collect();
+
+ let buffer_length =
+ unsafe { GetFullPathNameW(test_file_name.as_ptr(), 0, ptr::null_mut(), ptr::null_mut()) };
+
+ if buffer_length == 0 {
+ // This means the call failed, so we'll conservatively assume reserved names are not allowed.
+ return false;
+ }
+
+ let mut buffer = vec![0u16; buffer_length as usize];
+
+ let result = unsafe {
+ GetFullPathNameW(
+ test_file_name.as_ptr(),
+ buffer_length,
+ buffer.as_mut_ptr(),
+ ptr::null_mut(),
+ )
+ };
+
+ if result == 0 {
+ // Once again, conservatively assume reserved names are not allowed if the
+ // GetFullPathNameW call failed.
+ return false;
+ }
+
+ // Under the old rules, a file name like aux.rs would get converted into \\.\aux, so
+ // we detect this case by checking if the string starts with \\.\
+ //
+ // Otherwise, the filename will be something like C:\Users\Foo\Documents\aux.rs
+ let prefix: Vec<_> = OsStr::new("\\\\.\\").encode_wide().collect();
+ if buffer.starts_with(&prefix) {
+ false
+ } else {
+ true
+ }
+}
diff --git a/crates/cargo-test-support/src/publish.rs b/crates/cargo-test-support/src/publish.rs
new file mode 100644
index 0000000..85bc93c
--- /dev/null
+++ b/crates/cargo-test-support/src/publish.rs
@@ -0,0 +1,245 @@
+use crate::compare::{assert_match_exact, find_json_mismatch};
+use crate::registry::{self, alt_api_path, FeatureMap};
+use flate2::read::GzDecoder;
+use std::collections::{HashMap, HashSet};
+use std::fs;
+use std::fs::File;
+use std::io::{self, prelude::*, SeekFrom};
+use std::path::{Path, PathBuf};
+use tar::Archive;
+
+fn read_le_u32<R>(mut reader: R) -> io::Result<u32>
+where
+ R: Read,
+{
+ let mut buf = [0; 4];
+ reader.read_exact(&mut buf)?;
+ Ok(u32::from_le_bytes(buf))
+}
+
+/// Checks the result of a crate publish.
+pub fn validate_upload(expected_json: &str, expected_crate_name: &str, expected_files: &[&str]) {
+ let new_path = registry::api_path().join("api/v1/crates/new");
+ _validate_upload(
+ &new_path,
+ expected_json,
+ expected_crate_name,
+ expected_files,
+ &[],
+ );
+}
+
+/// Checks the result of a crate publish, along with the contents of the files.
+pub fn validate_upload_with_contents(
+ expected_json: &str,
+ expected_crate_name: &str,
+ expected_files: &[&str],
+ expected_contents: &[(&str, &str)],
+) {
+ let new_path = registry::api_path().join("api/v1/crates/new");
+ _validate_upload(
+ &new_path,
+ expected_json,
+ expected_crate_name,
+ expected_files,
+ expected_contents,
+ );
+}
+
+/// Checks the result of a crate publish to an alternative registry.
+pub fn validate_alt_upload(
+ expected_json: &str,
+ expected_crate_name: &str,
+ expected_files: &[&str],
+) {
+ let new_path = alt_api_path().join("api/v1/crates/new");
+ _validate_upload(
+ &new_path,
+ expected_json,
+ expected_crate_name,
+ expected_files,
+ &[],
+ );
+}
+
+fn _validate_upload(
+ new_path: &Path,
+ expected_json: &str,
+ expected_crate_name: &str,
+ expected_files: &[&str],
+ expected_contents: &[(&str, &str)],
+) {
+ let mut f = File::open(new_path).unwrap();
+ // 32-bit little-endian integer of length of JSON data.
+ let json_sz = read_le_u32(&mut f).expect("read json length");
+ let mut json_bytes = vec![0; json_sz as usize];
+ f.read_exact(&mut json_bytes).expect("read JSON data");
+ let actual_json = serde_json::from_slice(&json_bytes).expect("uploaded JSON should be valid");
+ let expected_json = serde_json::from_str(expected_json).expect("expected JSON does not parse");
+
+ if let Err(e) = find_json_mismatch(&expected_json, &actual_json, None) {
+ panic!("{}", e);
+ }
+
+ // 32-bit little-endian integer of length of crate file.
+ let crate_sz = read_le_u32(&mut f).expect("read crate length");
+ let mut krate_bytes = vec![0; crate_sz as usize];
+ f.read_exact(&mut krate_bytes).expect("read crate data");
+ // Check at end.
+ let current = f.seek(SeekFrom::Current(0)).unwrap();
+ assert_eq!(f.seek(SeekFrom::End(0)).unwrap(), current);
+
+ // Verify the tarball.
+ validate_crate_contents(
+ &krate_bytes[..],
+ expected_crate_name,
+ expected_files,
+ expected_contents,
+ );
+}
+
+/// Checks the contents of a `.crate` file.
+///
+/// - `expected_crate_name` should be something like `foo-0.0.1.crate`.
+/// - `expected_files` should be a complete list of files in the crate
+/// (relative to expected_crate_name).
+/// - `expected_contents` should be a list of `(file_name, contents)` tuples
+/// to validate the contents of the given file. Only the listed files will
+/// be checked (others will be ignored).
+pub fn validate_crate_contents(
+ reader: impl Read,
+ expected_crate_name: &str,
+ expected_files: &[&str],
+ expected_contents: &[(&str, &str)],
+) {
+ let mut rdr = GzDecoder::new(reader);
+ assert_eq!(
+ rdr.header().unwrap().filename().unwrap(),
+ expected_crate_name.as_bytes()
+ );
+ let mut contents = Vec::new();
+ rdr.read_to_end(&mut contents).unwrap();
+ let mut ar = Archive::new(&contents[..]);
+ let files: HashMap<PathBuf, String> = ar
+ .entries()
+ .unwrap()
+ .map(|entry| {
+ let mut entry = entry.unwrap();
+ let name = entry.path().unwrap().into_owned();
+ let mut contents = String::new();
+ entry.read_to_string(&mut contents).unwrap();
+ (name, contents)
+ })
+ .collect();
+ assert!(expected_crate_name.ends_with(".crate"));
+ let base_crate_name = Path::new(&expected_crate_name[..expected_crate_name.len() - 6]);
+ let actual_files: HashSet<PathBuf> = files.keys().cloned().collect();
+ let expected_files: HashSet<PathBuf> = expected_files
+ .iter()
+ .map(|name| base_crate_name.join(name))
+ .collect();
+ let missing: Vec<&PathBuf> = expected_files.difference(&actual_files).collect();
+ let extra: Vec<&PathBuf> = actual_files.difference(&expected_files).collect();
+ if !missing.is_empty() || !extra.is_empty() {
+ panic!(
+ "uploaded archive does not match.\nMissing: {:?}\nExtra: {:?}\n",
+ missing, extra
+ );
+ }
+ if !expected_contents.is_empty() {
+ for (e_file_name, e_file_contents) in expected_contents {
+ let full_e_name = base_crate_name.join(e_file_name);
+ let actual_contents = files
+ .get(&full_e_name)
+ .unwrap_or_else(|| panic!("file `{}` missing in archive", e_file_name));
+ assert_match_exact(e_file_contents, actual_contents);
+ }
+ }
+}
+
+pub(crate) fn create_index_line(
+ name: serde_json::Value,
+ vers: &str,
+ deps: Vec<serde_json::Value>,
+ cksum: &str,
+ features: crate::registry::FeatureMap,
+ yanked: bool,
+ links: Option<String>,
+ v: Option<u32>,
+) -> String {
+ // This emulates what crates.io does to retain backwards compatibility.
+ let (features, features2) = split_index_features(features.clone());
+ let mut json = serde_json::json!({
+ "name": name,
+ "vers": vers,
+ "deps": deps,
+ "cksum": cksum,
+ "features": features,
+ "yanked": yanked,
+ "links": links,
+ });
+ if let Some(f2) = &features2 {
+ json["features2"] = serde_json::json!(f2);
+ json["v"] = serde_json::json!(2);
+ }
+ if let Some(v) = v {
+ json["v"] = serde_json::json!(v);
+ }
+
+ json.to_string()
+}
+
+pub(crate) fn write_to_index(registry_path: &PathBuf, name: &str, line: String, local: bool) {
+ let file = cargo_util::registry::make_dep_path(name, false);
+
+ // Write file/line in the index.
+ let dst = if local {
+ registry_path.join("index").join(&file)
+ } else {
+ registry_path.join(&file)
+ };
+ let prev = fs::read_to_string(&dst).unwrap_or_default();
+ t!(fs::create_dir_all(dst.parent().unwrap()));
+ t!(fs::write(&dst, prev + &line[..] + "\n"));
+
+ // Add the new file to the index.
+ if !local {
+ let repo = t!(git2::Repository::open(&registry_path));
+ let mut index = t!(repo.index());
+ t!(index.add_path(Path::new(&file)));
+ t!(index.write());
+ let id = t!(index.write_tree());
+
+ // Commit this change.
+ let tree = t!(repo.find_tree(id));
+ let sig = t!(repo.signature());
+ let parent = t!(repo.refname_to_id("refs/heads/master"));
+ let parent = t!(repo.find_commit(parent));
+ t!(repo.commit(
+ Some("HEAD"),
+ &sig,
+ &sig,
+ "Another commit",
+ &tree,
+ &[&parent]
+ ));
+ }
+}
+
+fn split_index_features(mut features: FeatureMap) -> (FeatureMap, Option<FeatureMap>) {
+ let mut features2 = FeatureMap::new();
+ for (feat, values) in features.iter_mut() {
+ if values
+ .iter()
+ .any(|value| value.starts_with("dep:") || value.contains("?/"))
+ {
+ let new_values = values.drain(..).collect();
+ features2.insert(feat.clone(), new_values);
+ }
+ }
+ if features2.is_empty() {
+ (features, None)
+ } else {
+ (features, Some(features2))
+ }
+}
diff --git a/crates/cargo-test-support/src/registry.rs b/crates/cargo-test-support/src/registry.rs
new file mode 100644
index 0000000..7b1dc54
--- /dev/null
+++ b/crates/cargo-test-support/src/registry.rs
@@ -0,0 +1,1581 @@
+use crate::git::repo;
+use crate::paths;
+use crate::publish::{create_index_line, write_to_index};
+use cargo_util::paths::append;
+use cargo_util::Sha256;
+use flate2::write::GzEncoder;
+use flate2::Compression;
+use pasetors::keys::{AsymmetricPublicKey, AsymmetricSecretKey};
+use pasetors::paserk::FormatAsPaserk;
+use pasetors::token::UntrustedToken;
+use std::collections::{BTreeMap, HashMap};
+use std::fmt;
+use std::fs::{self, File};
+use std::io::{BufRead, BufReader, Read, Write};
+use std::net::{SocketAddr, TcpListener, TcpStream};
+use std::path::PathBuf;
+use std::thread::{self, JoinHandle};
+use tar::{Builder, Header};
+use time::format_description::well_known::Rfc3339;
+use time::{Duration, OffsetDateTime};
+use url::Url;
+
+/// Gets the path to the local index pretending to be crates.io. This is a Git repo
+/// initialized with a `config.json` file pointing to `dl_path` for downloads
+/// and `api_path` for uploads.
+pub fn registry_path() -> PathBuf {
+ generate_path("registry")
+}
+/// Gets the path for local web API uploads. Cargo will place the contents of a web API
+/// request here. For example, `api/v1/crates/new` is the result of publishing a crate.
+pub fn api_path() -> PathBuf {
+ generate_path("api")
+}
+/// Gets the path where crates can be downloaded using the web API endpoint. Crates
+/// should be organized as `{name}/{version}/download` to match the web API
+/// endpoint. This is rarely used and must be manually set up.
+fn dl_path() -> PathBuf {
+ generate_path("dl")
+}
+/// Gets the alternative-registry version of `registry_path`.
+fn alt_registry_path() -> PathBuf {
+ generate_path("alternative-registry")
+}
+/// Gets the alternative-registry version of `registry_url`.
+fn alt_registry_url() -> Url {
+ generate_url("alternative-registry")
+}
+/// Gets the alternative-registry version of `dl_path`.
+pub fn alt_dl_path() -> PathBuf {
+ generate_path("alternative-dl")
+}
+/// Gets the alternative-registry version of `api_path`.
+pub fn alt_api_path() -> PathBuf {
+ generate_path("alternative-api")
+}
+fn generate_path(name: &str) -> PathBuf {
+ paths::root().join(name)
+}
+fn generate_url(name: &str) -> Url {
+ Url::from_file_path(generate_path(name)).ok().unwrap()
+}
+
+#[derive(Clone)]
+pub enum Token {
+ Plaintext(String),
+ Keys(String, Option<String>),
+}
+
+impl Token {
+ /// This is a valid PASETO secret key.
+ /// This one is already publicly available as part of the text of the RFC so is safe to use for tests.
+ pub fn rfc_key() -> Token {
+ Token::Keys(
+ "k3.secret.fNYVuMvBgOlljt9TDohnaYLblghqaHoQquVZwgR6X12cBFHZLFsaU3q7X3k1Zn36"
+ .to_string(),
+ Some("sub".to_string()),
+ )
+ }
+}
+
+/// A builder for initializing registries.
+pub struct RegistryBuilder {
+ /// If set, configures an alternate registry with the given name.
+ alternative: Option<String>,
+ /// The authorization token for the registry.
+ token: Option<Token>,
+ /// If set, the registry requires authorization for all operations.
+ auth_required: bool,
+ /// If set, serves the index over http.
+ http_index: bool,
+ /// If set, serves the API over http.
+ http_api: bool,
+ /// If set, config.json includes 'api'
+ api: bool,
+ /// Write the token in the configuration.
+ configure_token: bool,
+ /// Write the registry in configuration.
+ configure_registry: bool,
+ /// API responders.
+ custom_responders: HashMap<&'static str, Box<dyn Send + Fn(&Request, &HttpServer) -> Response>>,
+}
+
+pub struct TestRegistry {
+ server: Option<HttpServerHandle>,
+ index_url: Url,
+ path: PathBuf,
+ api_url: Url,
+ dl_url: Url,
+ token: Token,
+}
+
+impl TestRegistry {
+ pub fn index_url(&self) -> &Url {
+ &self.index_url
+ }
+
+ pub fn api_url(&self) -> &Url {
+ &self.api_url
+ }
+
+ pub fn token(&self) -> &str {
+ match &self.token {
+ Token::Plaintext(s) => s,
+ Token::Keys(_, _) => panic!("registry was not configured with a plaintext token"),
+ }
+ }
+
+ pub fn key(&self) -> &str {
+ match &self.token {
+ Token::Plaintext(_) => panic!("registry was not configured with a secret key"),
+ Token::Keys(s, _) => s,
+ }
+ }
+
+ /// Shutdown the server thread and wait for it to stop.
+ /// `Drop` automatically stops the server, but this additionally
+ /// waits for the thread to stop.
+ pub fn join(self) {
+ if let Some(mut server) = self.server {
+ server.stop();
+ let handle = server.handle.take().unwrap();
+ handle.join().unwrap();
+ }
+ }
+}
+
+impl RegistryBuilder {
+ #[must_use]
+ pub fn new() -> RegistryBuilder {
+ RegistryBuilder {
+ alternative: None,
+ token: None,
+ auth_required: false,
+ http_api: false,
+ http_index: false,
+ api: true,
+ configure_registry: true,
+ configure_token: true,
+ custom_responders: HashMap::new(),
+ }
+ }
+
+ /// Adds a custom HTTP response for a specific url
+ #[must_use]
+ pub fn add_responder<R: 'static + Send + Fn(&Request, &HttpServer) -> Response>(
+ mut self,
+ url: &'static str,
+ responder: R,
+ ) -> Self {
+ self.custom_responders.insert(url, Box::new(responder));
+ self
+ }
+
+ /// Sets whether or not to initialize as an alternative registry.
+ #[must_use]
+ pub fn alternative_named(mut self, alt: &str) -> Self {
+ self.alternative = Some(alt.to_string());
+ self
+ }
+
+ /// Sets whether or not to initialize as an alternative registry.
+ #[must_use]
+ pub fn alternative(self) -> Self {
+ self.alternative_named("alternative")
+ }
+
+ /// Prevents placing a token in the configuration
+ #[must_use]
+ pub fn no_configure_token(mut self) -> Self {
+ self.configure_token = false;
+ self
+ }
+
+ /// Prevents adding the registry to the configuration.
+ #[must_use]
+ pub fn no_configure_registry(mut self) -> Self {
+ self.configure_registry = false;
+ self
+ }
+
+ /// Sets the token value
+ #[must_use]
+ pub fn token(mut self, token: Token) -> Self {
+ self.token = Some(token);
+ self
+ }
+
+ /// Sets this registry to require the authentication token for
+ /// all operations.
+ #[must_use]
+ pub fn auth_required(mut self) -> Self {
+ self.auth_required = true;
+ self
+ }
+
+ /// Operate the index over http
+ #[must_use]
+ pub fn http_index(mut self) -> Self {
+ self.http_index = true;
+ self
+ }
+
+ /// Operate the api over http
+ #[must_use]
+ pub fn http_api(mut self) -> Self {
+ self.http_api = true;
+ self
+ }
+
+ /// The registry has no api.
+ #[must_use]
+ pub fn no_api(mut self) -> Self {
+ self.api = false;
+ self
+ }
+
+ /// Initializes the registry.
+ #[must_use]
+ pub fn build(self) -> TestRegistry {
+ let config_path = paths::home().join(".cargo/config");
+ t!(fs::create_dir_all(config_path.parent().unwrap()));
+ let prefix = if let Some(alternative) = &self.alternative {
+ format!("{alternative}-")
+ } else {
+ String::new()
+ };
+ let registry_path = generate_path(&format!("{prefix}registry"));
+ let index_url = generate_url(&format!("{prefix}registry"));
+ let api_url = generate_url(&format!("{prefix}api"));
+ let dl_url = generate_url(&format!("{prefix}dl"));
+ let dl_path = generate_path(&format!("{prefix}dl"));
+ let api_path = generate_path(&format!("{prefix}api"));
+ let token = self
+ .token
+ .unwrap_or_else(|| Token::Plaintext(format!("{prefix}sekrit")));
+
+ let (server, index_url, api_url, dl_url) = if !self.http_index && !self.http_api {
+ // No need to start the HTTP server.
+ (None, index_url, api_url, dl_url)
+ } else {
+ let server = HttpServer::new(
+ registry_path.clone(),
+ dl_path,
+ api_path.clone(),
+ token.clone(),
+ self.auth_required,
+ self.custom_responders,
+ );
+ let index_url = if self.http_index {
+ server.index_url()
+ } else {
+ index_url
+ };
+ let api_url = if self.http_api {
+ server.api_url()
+ } else {
+ api_url
+ };
+ let dl_url = server.dl_url();
+ (Some(server), index_url, api_url, dl_url)
+ };
+
+ let registry = TestRegistry {
+ api_url,
+ index_url,
+ server,
+ dl_url,
+ path: registry_path,
+ token,
+ };
+
+ if self.configure_registry {
+ if let Some(alternative) = &self.alternative {
+ append(
+ &config_path,
+ format!(
+ "
+ [registries.{alternative}]
+ index = '{}'",
+ registry.index_url
+ )
+ .as_bytes(),
+ )
+ .unwrap();
+ } else {
+ append(
+ &config_path,
+ format!(
+ "
+ [source.crates-io]
+ replace-with = 'dummy-registry'
+
+ [registries.dummy-registry]
+ index = '{}'",
+ registry.index_url
+ )
+ .as_bytes(),
+ )
+ .unwrap();
+ }
+ }
+
+ if self.configure_token {
+ let credentials = paths::home().join(".cargo/credentials.toml");
+ match &registry.token {
+ Token::Plaintext(token) => {
+ if let Some(alternative) = &self.alternative {
+ append(
+ &credentials,
+ format!(
+ r#"
+ [registries.{alternative}]
+ token = "{token}"
+ "#
+ )
+ .as_bytes(),
+ )
+ .unwrap();
+ } else {
+ append(
+ &credentials,
+ format!(
+ r#"
+ [registry]
+ token = "{token}"
+ "#
+ )
+ .as_bytes(),
+ )
+ .unwrap();
+ }
+ }
+ Token::Keys(key, subject) => {
+ let mut out = if let Some(alternative) = &self.alternative {
+ format!("\n[registries.{alternative}]\n")
+ } else {
+ format!("\n[registry]\n")
+ };
+ out += &format!("secret-key = \"{key}\"\n");
+ if let Some(subject) = subject {
+ out += &format!("secret-key-subject = \"{subject}\"\n");
+ }
+
+ append(&credentials, out.as_bytes()).unwrap();
+ }
+ }
+ }
+
+ let auth = if self.auth_required {
+ r#","auth-required":true"#
+ } else {
+ ""
+ };
+ let api = if self.api {
+ format!(r#","api":"{}""#, registry.api_url)
+ } else {
+ String::new()
+ };
+ // Initialize a new registry.
+ repo(&registry.path)
+ .file(
+ "config.json",
+ &format!(r#"{{"dl":"{}"{api}{auth}}}"#, registry.dl_url),
+ )
+ .build();
+ fs::create_dir_all(api_path.join("api/v1/crates")).unwrap();
+
+ registry
+ }
+}
+
+/// A builder for creating a new package in a registry.
+///
+/// This uses "source replacement" using an automatically generated
+/// `.cargo/config` file to ensure that dependencies will use these packages
+/// instead of contacting crates.io. See `source-replacement.md` for more
+/// details on how source replacement works.
+///
+/// Call `publish` to finalize and create the package.
+///
+/// If no files are specified, an empty `lib.rs` file is automatically created.
+///
+/// The `Cargo.toml` file is automatically generated based on the methods
+/// called on `Package` (for example, calling `dep()` will add to the
+/// `[dependencies]` automatically). You may also specify a `Cargo.toml` file
+/// to override the generated one.
+///
+/// This supports different registry types:
+/// - Regular source replacement that replaces `crates.io` (the default).
+/// - A "local registry" which is a subset for vendoring (see
+/// `Package::local`).
+/// - An "alternative registry" which requires specifying the registry name
+/// (see `Package::alternative`).
+///
+/// This does not support "directory sources". See `directory.rs` for
+/// `VendorPackage` which implements directory sources.
+///
+/// # Example
+/// ```
+/// // Publish package "a" depending on "b".
+/// Package::new("a", "1.0.0")
+/// .dep("b", "1.0.0")
+/// .file("src/lib.rs", r#"
+/// extern crate b;
+/// pub fn f() -> i32 { b::f() * 2 }
+/// "#)
+/// .publish();
+///
+/// // Publish package "b".
+/// Package::new("b", "1.0.0")
+/// .file("src/lib.rs", r#"
+/// pub fn f() -> i32 { 12 }
+/// "#)
+/// .publish();
+///
+/// // Create a project that uses package "a".
+/// let p = project()
+/// .file("Cargo.toml", r#"
+/// [package]
+/// name = "foo"
+/// version = "0.0.1"
+///
+/// [dependencies]
+/// a = "1.0"
+/// "#)
+/// .file("src/main.rs", r#"
+/// extern crate a;
+/// fn main() { println!("{}", a::f()); }
+/// "#)
+/// .build();
+///
+/// p.cargo("run").with_stdout("24").run();
+/// ```
+#[must_use]
+pub struct Package {
+ name: String,
+ vers: String,
+ deps: Vec<Dependency>,
+ files: Vec<PackageFile>,
+ yanked: bool,
+ features: FeatureMap,
+ local: bool,
+ alternative: bool,
+ invalid_json: bool,
+ proc_macro: bool,
+ links: Option<String>,
+ rust_version: Option<String>,
+ cargo_features: Vec<String>,
+ v: Option<u32>,
+}
+
+pub(crate) type FeatureMap = BTreeMap<String, Vec<String>>;
+
+#[derive(Clone)]
+pub struct Dependency {
+ name: String,
+ vers: String,
+ kind: String,
+ artifact: Option<(String, Option<String>)>,
+ target: Option<String>,
+ features: Vec<String>,
+ registry: Option<String>,
+ package: Option<String>,
+ optional: bool,
+}
+
+/// Entry with data that corresponds to [`tar::EntryType`].
+#[non_exhaustive]
+enum EntryData {
+ Regular(String),
+ Symlink(PathBuf),
+}
+
+/// A file to be created in a package.
+struct PackageFile {
+ path: String,
+ contents: EntryData,
+ /// The Unix mode for the file. Note that when extracted on Windows, this
+ /// is mostly ignored since it doesn't have the same style of permissions.
+ mode: u32,
+ /// If `true`, the file is created in the root of the tarfile, used for
+ /// testing invalid packages.
+ extra: bool,
+}
+
+const DEFAULT_MODE: u32 = 0o644;
+
+/// Initializes the on-disk registry and sets up the config so that crates.io
+/// is replaced with the one on disk.
+pub fn init() -> TestRegistry {
+ RegistryBuilder::new().build()
+}
+
+/// Variant of `init` that initializes the "alternative" registry and crates.io
+/// replacement.
+pub fn alt_init() -> TestRegistry {
+ init();
+ RegistryBuilder::new().alternative().build()
+}
+
+pub struct HttpServerHandle {
+ addr: SocketAddr,
+ handle: Option<JoinHandle<()>>,
+}
+
+impl HttpServerHandle {
+ pub fn index_url(&self) -> Url {
+ Url::parse(&format!("sparse+http://{}/index/", self.addr.to_string())).unwrap()
+ }
+
+ pub fn api_url(&self) -> Url {
+ Url::parse(&format!("http://{}/", self.addr.to_string())).unwrap()
+ }
+
+ pub fn dl_url(&self) -> Url {
+ Url::parse(&format!("http://{}/dl", self.addr.to_string())).unwrap()
+ }
+
+ fn stop(&self) {
+ if let Ok(mut stream) = TcpStream::connect(self.addr) {
+ // shutdown the server
+ let _ = stream.write_all(b"stop");
+ let _ = stream.flush();
+ }
+ }
+}
+
+impl Drop for HttpServerHandle {
+ fn drop(&mut self) {
+ self.stop();
+ }
+}
+
+/// Request to the test http server
+#[derive(Clone)]
+pub struct Request {
+ pub url: Url,
+ pub method: String,
+ pub body: Option<Vec<u8>>,
+ pub authorization: Option<String>,
+ pub if_modified_since: Option<String>,
+ pub if_none_match: Option<String>,
+}
+
+impl fmt::Debug for Request {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ // body is not included as it can produce long debug outputs
+ f.debug_struct("Request")
+ .field("url", &self.url)
+ .field("method", &self.method)
+ .field("authorization", &self.authorization)
+ .field("if_modified_since", &self.if_modified_since)
+ .field("if_none_match", &self.if_none_match)
+ .finish()
+ }
+}
+
+/// Response from the test http server
+pub struct Response {
+ pub code: u32,
+ pub headers: Vec<String>,
+ pub body: Vec<u8>,
+}
+
+pub struct HttpServer {
+ listener: TcpListener,
+ registry_path: PathBuf,
+ dl_path: PathBuf,
+ api_path: PathBuf,
+ addr: SocketAddr,
+ token: Token,
+ auth_required: bool,
+ custom_responders: HashMap<&'static str, Box<dyn Send + Fn(&Request, &HttpServer) -> Response>>,
+}
+
+/// A helper struct that collects the arguments for [HttpServer::check_authorized].
+/// Based on looking at the request, these are the fields that the authentication header should attest to.
+pub struct Mutation<'a> {
+ pub mutation: &'a str,
+ pub name: Option<&'a str>,
+ pub vers: Option<&'a str>,
+ pub cksum: Option<&'a str>,
+}
+
+impl HttpServer {
+ pub fn new(
+ registry_path: PathBuf,
+ dl_path: PathBuf,
+ api_path: PathBuf,
+ token: Token,
+ auth_required: bool,
+ api_responders: HashMap<
+ &'static str,
+ Box<dyn Send + Fn(&Request, &HttpServer) -> Response>,
+ >,
+ ) -> HttpServerHandle {
+ let listener = TcpListener::bind("127.0.0.1:0").unwrap();
+ let addr = listener.local_addr().unwrap();
+ let server = HttpServer {
+ listener,
+ registry_path,
+ dl_path,
+ api_path,
+ addr,
+ token,
+ auth_required,
+ custom_responders: api_responders,
+ };
+ let handle = Some(thread::spawn(move || server.start()));
+ HttpServerHandle { addr, handle }
+ }
+
+ fn start(&self) {
+ let mut line = String::new();
+ 'server: loop {
+ let (socket, _) = self.listener.accept().unwrap();
+ let mut buf = BufReader::new(socket);
+ line.clear();
+ if buf.read_line(&mut line).unwrap() == 0 {
+ // Connection terminated.
+ continue;
+ }
+ // Read the "GET path HTTP/1.1" line.
+ let mut parts = line.split_ascii_whitespace();
+ let method = parts.next().unwrap().to_ascii_lowercase();
+ if method == "stop" {
+ // Shutdown the server.
+ return;
+ }
+ let addr = self.listener.local_addr().unwrap();
+ let url = format!(
+ "http://{}/{}",
+ addr,
+ parts.next().unwrap().trim_start_matches('/')
+ );
+ let url = Url::parse(&url).unwrap();
+
+ // Grab headers we care about.
+ let mut if_modified_since = None;
+ let mut if_none_match = None;
+ let mut authorization = None;
+ let mut content_len = None;
+ loop {
+ line.clear();
+ if buf.read_line(&mut line).unwrap() == 0 {
+ continue 'server;
+ }
+ if line == "\r\n" {
+ // End of headers.
+ line.clear();
+ break;
+ }
+ let (name, value) = line.split_once(':').unwrap();
+ let name = name.trim().to_ascii_lowercase();
+ let value = value.trim().to_string();
+ match name.as_str() {
+ "if-modified-since" => if_modified_since = Some(value),
+ "if-none-match" => if_none_match = Some(value),
+ "authorization" => authorization = Some(value),
+ "content-length" => content_len = Some(value),
+ _ => {}
+ }
+ }
+
+ let mut body = None;
+ if let Some(con_len) = content_len {
+ let len = con_len.parse::<u64>().unwrap();
+ let mut content = vec![0u8; len as usize];
+ buf.read_exact(&mut content).unwrap();
+ body = Some(content)
+ }
+
+ let req = Request {
+ authorization,
+ if_modified_since,
+ if_none_match,
+ method,
+ url,
+ body,
+ };
+ println!("req: {:#?}", req);
+ let response = self.route(&req);
+ let buf = buf.get_mut();
+ write!(buf, "HTTP/1.1 {}\r\n", response.code).unwrap();
+ write!(buf, "Content-Length: {}\r\n", response.body.len()).unwrap();
+ for header in response.headers {
+ write!(buf, "{}\r\n", header).unwrap();
+ }
+ write!(buf, "\r\n").unwrap();
+ buf.write_all(&response.body).unwrap();
+ buf.flush().unwrap();
+ }
+ }
+
+ fn check_authorized(&self, req: &Request, mutation: Option<Mutation>) -> bool {
+ let (private_key, private_key_subject) = if mutation.is_some() || self.auth_required {
+ match &self.token {
+ Token::Plaintext(token) => return Some(token) == req.authorization.as_ref(),
+ Token::Keys(private_key, private_key_subject) => {
+ (private_key.as_str(), private_key_subject)
+ }
+ }
+ } else {
+ assert!(req.authorization.is_none(), "unexpected token");
+ return true;
+ };
+
+ macro_rules! t {
+ ($e:expr) => {
+ match $e {
+ Some(e) => e,
+ None => return false,
+ }
+ };
+ }
+
+ let secret: AsymmetricSecretKey<pasetors::version3::V3> = private_key.try_into().unwrap();
+ let public: AsymmetricPublicKey<pasetors::version3::V3> = (&secret).try_into().unwrap();
+ let pub_key_id: pasetors::paserk::Id = (&public).into();
+ let mut paserk_pub_key_id = String::new();
+ FormatAsPaserk::fmt(&pub_key_id, &mut paserk_pub_key_id).unwrap();
+ // https://github.com/rust-lang/rfcs/blob/master/text/3231-cargo-asymmetric-tokens.md#how-the-registry-server-will-validate-an-asymmetric-token
+
+ // - The PASETO is in v3.public format.
+ let authorization = t!(&req.authorization);
+ let untrusted_token = t!(
+ UntrustedToken::<pasetors::Public, pasetors::version3::V3>::try_from(authorization)
+ .ok()
+ );
+
+ // - The PASETO validates using the public key it looked up based on the key ID.
+ #[derive(serde::Deserialize, Debug)]
+ struct Footer<'a> {
+ url: &'a str,
+ kip: &'a str,
+ }
+ let footer: Footer = t!(serde_json::from_slice(untrusted_token.untrusted_footer()).ok());
+ if footer.kip != paserk_pub_key_id {
+ return false;
+ }
+ let trusted_token =
+ t!(
+ pasetors::version3::PublicToken::verify(&public, &untrusted_token, None, None,)
+ .ok()
+ );
+
+ // - The URL matches the registry base URL
+ if footer.url != "https://github.com/rust-lang/crates.io-index"
+ && footer.url != &format!("sparse+http://{}/index/", self.addr.to_string())
+ {
+ dbg!(footer.url);
+ return false;
+ }
+
+ // - The PASETO is still within its valid time period.
+ #[derive(serde::Deserialize)]
+ struct Message<'a> {
+ iat: &'a str,
+ sub: Option<&'a str>,
+ mutation: Option<&'a str>,
+ name: Option<&'a str>,
+ vers: Option<&'a str>,
+ cksum: Option<&'a str>,
+ _challenge: Option<&'a str>, // todo: PASETO with challenges
+ v: Option<u8>,
+ }
+ let message: Message = t!(serde_json::from_str(trusted_token.payload()).ok());
+ let token_time = t!(OffsetDateTime::parse(message.iat, &Rfc3339).ok());
+ let now = OffsetDateTime::now_utc();
+ if (now - token_time) > Duration::MINUTE {
+ return false;
+ }
+ if private_key_subject.as_deref() != message.sub {
+ dbg!(message.sub);
+ return false;
+ }
+ // - If the claim v is set, that it has the value of 1.
+ if let Some(v) = message.v {
+ if v != 1 {
+ dbg!(message.v);
+ return false;
+ }
+ }
+ // - If the server issues challenges, that the challenge has not yet been answered.
+ // todo: PASETO with challenges
+ // - If the operation is a mutation:
+ if let Some(mutation) = mutation {
+ // - That the operation matches the mutation field and is one of publish, yank, or unyank.
+ if message.mutation != Some(mutation.mutation) {
+ dbg!(message.mutation);
+ return false;
+ }
+ // - That the package, and version match the request.
+ if message.name != mutation.name {
+ dbg!(message.name);
+ return false;
+ }
+ if message.vers != mutation.vers {
+ dbg!(message.vers);
+ return false;
+ }
+ // - If the mutation is publish, that the version has not already been published, and that the hash matches the request.
+ if mutation.mutation == "publish" {
+ if message.cksum != mutation.cksum {
+ dbg!(message.cksum);
+ return false;
+ }
+ }
+ } else {
+ // - If the operation is a read, that the mutation field is not set.
+ if message.mutation.is_some()
+ || message.name.is_some()
+ || message.vers.is_some()
+ || message.cksum.is_some()
+ {
+ return false;
+ }
+ }
+ true
+ }
+
+ /// Route the request
+ fn route(&self, req: &Request) -> Response {
+ // Check for custom responder
+ if let Some(responder) = self.custom_responders.get(req.url.path()) {
+ return responder(&req, self);
+ }
+ let path: Vec<_> = req.url.path()[1..].split('/').collect();
+ match (req.method.as_str(), path.as_slice()) {
+ ("get", ["index", ..]) => {
+ if !self.check_authorized(req, None) {
+ self.unauthorized(req)
+ } else {
+ self.index(&req)
+ }
+ }
+ ("get", ["dl", ..]) => {
+ if !self.check_authorized(req, None) {
+ self.unauthorized(req)
+ } else {
+ self.dl(&req)
+ }
+ }
+ // publish
+ ("put", ["api", "v1", "crates", "new"]) => self.check_authorized_publish(req),
+ // The remainder of the operators in the test framework do nothing other than responding 'ok'.
+ //
+ // Note: We don't need to support anything real here because there are no tests that
+ // currently require anything other than publishing via the http api.
+
+ // yank / unyank
+ ("delete" | "put", ["api", "v1", "crates", crate_name, version, mutation]) => {
+ if !self.check_authorized(
+ req,
+ Some(Mutation {
+ mutation,
+ name: Some(crate_name),
+ vers: Some(version),
+ cksum: None,
+ }),
+ ) {
+ self.unauthorized(req)
+ } else {
+ self.ok(&req)
+ }
+ }
+ // owners
+ ("get" | "put" | "delete", ["api", "v1", "crates", crate_name, "owners"]) => {
+ if !self.check_authorized(
+ req,
+ Some(Mutation {
+ mutation: "owners",
+ name: Some(crate_name),
+ vers: None,
+ cksum: None,
+ }),
+ ) {
+ self.unauthorized(req)
+ } else {
+ self.ok(&req)
+ }
+ }
+ _ => self.not_found(&req),
+ }
+ }
+
+ /// Unauthorized response
+ pub fn unauthorized(&self, _req: &Request) -> Response {
+ Response {
+ code: 401,
+ headers: vec![
+ r#"WWW-Authenticate: Cargo login_url="https://test-registry-login/me""#.to_string(),
+ ],
+ body: b"Unauthorized message from server.".to_vec(),
+ }
+ }
+
+ /// Not found response
+ pub fn not_found(&self, _req: &Request) -> Response {
+ Response {
+ code: 404,
+ headers: vec![],
+ body: b"not found".to_vec(),
+ }
+ }
+
+ /// Respond OK without doing anything
+ pub fn ok(&self, _req: &Request) -> Response {
+ Response {
+ code: 200,
+ headers: vec![],
+ body: br#"{"ok": true, "msg": "completed!"}"#.to_vec(),
+ }
+ }
+
+ /// Return an internal server error (HTTP 500)
+ pub fn internal_server_error(&self, _req: &Request) -> Response {
+ Response {
+ code: 500,
+ headers: vec![],
+ body: br#"internal server error"#.to_vec(),
+ }
+ }
+
+ /// Serve the download endpoint
+ pub fn dl(&self, req: &Request) -> Response {
+ let file = self
+ .dl_path
+ .join(req.url.path().strip_prefix("/dl/").unwrap());
+ println!("{}", file.display());
+ if !file.exists() {
+ return self.not_found(req);
+ }
+ return Response {
+ body: fs::read(&file).unwrap(),
+ code: 200,
+ headers: vec![],
+ };
+ }
+
+ /// Serve the registry index
+ pub fn index(&self, req: &Request) -> Response {
+ let file = self
+ .registry_path
+ .join(req.url.path().strip_prefix("/index/").unwrap());
+ if !file.exists() {
+ return self.not_found(req);
+ } else {
+ // Now grab info about the file.
+ let data = fs::read(&file).unwrap();
+ let etag = Sha256::new().update(&data).finish_hex();
+ let last_modified = format!("{:?}", file.metadata().unwrap().modified().unwrap());
+
+ // Start to construct our response:
+ let mut any_match = false;
+ let mut all_match = true;
+ if let Some(expected) = &req.if_none_match {
+ if &etag != expected {
+ all_match = false;
+ } else {
+ any_match = true;
+ }
+ }
+ if let Some(expected) = &req.if_modified_since {
+ // NOTE: Equality comparison is good enough for tests.
+ if &last_modified != expected {
+ all_match = false;
+ } else {
+ any_match = true;
+ }
+ }
+
+ if any_match && all_match {
+ return Response {
+ body: Vec::new(),
+ code: 304,
+ headers: vec![],
+ };
+ } else {
+ return Response {
+ body: data,
+ code: 200,
+ headers: vec![
+ format!("ETag: \"{}\"", etag),
+ format!("Last-Modified: {}", last_modified),
+ ],
+ };
+ }
+ }
+ }
+
+ pub fn check_authorized_publish(&self, req: &Request) -> Response {
+ if let Some(body) = &req.body {
+ // Mimic the publish behavior for local registries by writing out the request
+ // so tests can verify publishes made to either registry type.
+ let path = self.api_path.join("api/v1/crates/new");
+ t!(fs::create_dir_all(path.parent().unwrap()));
+ t!(fs::write(&path, body));
+
+ // Get the metadata of the package
+ let (len, remaining) = body.split_at(4);
+ let json_len = u32::from_le_bytes(len.try_into().unwrap());
+ let (json, remaining) = remaining.split_at(json_len as usize);
+ let new_crate = serde_json::from_slice::<crates_io::NewCrate>(json).unwrap();
+ // Get the `.crate` file
+ let (len, remaining) = remaining.split_at(4);
+ let file_len = u32::from_le_bytes(len.try_into().unwrap());
+ let (file, _remaining) = remaining.split_at(file_len as usize);
+ let file_cksum = cksum(&file);
+
+ if !self.check_authorized(
+ req,
+ Some(Mutation {
+ mutation: "publish",
+ name: Some(&new_crate.name),
+ vers: Some(&new_crate.vers),
+ cksum: Some(&file_cksum),
+ }),
+ ) {
+ return self.unauthorized(req);
+ }
+
+ // Write the `.crate`
+ let dst = self
+ .dl_path
+ .join(&new_crate.name)
+ .join(&new_crate.vers)
+ .join("download");
+ t!(fs::create_dir_all(dst.parent().unwrap()));
+ t!(fs::write(&dst, file));
+
+ let deps = new_crate
+ .deps
+ .iter()
+ .map(|dep| {
+ let (name, package) = match &dep.explicit_name_in_toml {
+ Some(explicit) => (explicit.to_string(), Some(dep.name.to_string())),
+ None => (dep.name.to_string(), None),
+ };
+ serde_json::json!({
+ "name": name,
+ "req": dep.version_req,
+ "features": dep.features,
+ "default_features": true,
+ "target": dep.target,
+ "optional": dep.optional,
+ "kind": dep.kind,
+ "registry": dep.registry,
+ "package": package,
+ })
+ })
+ .collect::<Vec<_>>();
+
+ let line = create_index_line(
+ serde_json::json!(new_crate.name),
+ &new_crate.vers,
+ deps,
+ &file_cksum,
+ new_crate.features,
+ false,
+ new_crate.links,
+ None,
+ );
+
+ write_to_index(&self.registry_path, &new_crate.name, line, false);
+
+ self.ok(&req)
+ } else {
+ Response {
+ code: 400,
+ headers: vec![],
+ body: b"The request was missing a body".to_vec(),
+ }
+ }
+ }
+}
+
+impl Package {
+ /// Creates a new package builder.
+ /// Call `publish()` to finalize and build the package.
+ pub fn new(name: &str, vers: &str) -> Package {
+ let config = paths::home().join(".cargo/config");
+ if !config.exists() {
+ init();
+ }
+ Package {
+ name: name.to_string(),
+ vers: vers.to_string(),
+ deps: Vec::new(),
+ files: Vec::new(),
+ yanked: false,
+ features: BTreeMap::new(),
+ local: false,
+ alternative: false,
+ invalid_json: false,
+ proc_macro: false,
+ links: None,
+ rust_version: None,
+ cargo_features: Vec::new(),
+ v: None,
+ }
+ }
+
+ /// Call with `true` to publish in a "local registry".
+ ///
+ /// See `source-replacement.html#local-registry-sources` for more details
+ /// on local registries. See `local_registry.rs` for the tests that use
+ /// this.
+ pub fn local(&mut self, local: bool) -> &mut Package {
+ self.local = local;
+ self
+ }
+
+ /// Call with `true` to publish in an "alternative registry".
+ ///
+ /// The name of the alternative registry is called "alternative".
+ ///
+ /// See `src/doc/src/reference/registries.md` for more details on
+ /// alternative registries. See `alt_registry.rs` for the tests that use
+ /// this.
+ pub fn alternative(&mut self, alternative: bool) -> &mut Package {
+ self.alternative = alternative;
+ self
+ }
+
+ /// Adds a file to the package.
+ pub fn file(&mut self, name: &str, contents: &str) -> &mut Package {
+ self.file_with_mode(name, DEFAULT_MODE, contents)
+ }
+
+ /// Adds a file with a specific Unix mode.
+ pub fn file_with_mode(&mut self, path: &str, mode: u32, contents: &str) -> &mut Package {
+ self.files.push(PackageFile {
+ path: path.to_string(),
+ contents: EntryData::Regular(contents.into()),
+ mode,
+ extra: false,
+ });
+ self
+ }
+
+ /// Adds a symlink to a path to the package.
+ pub fn symlink(&mut self, dst: &str, src: &str) -> &mut Package {
+ self.files.push(PackageFile {
+ path: dst.to_string(),
+ contents: EntryData::Symlink(src.into()),
+ mode: DEFAULT_MODE,
+ extra: false,
+ });
+ self
+ }
+
+ /// Adds an "extra" file that is not rooted within the package.
+ ///
+ /// Normal files are automatically placed within a directory named
+ /// `$PACKAGE-$VERSION`. This allows you to override that behavior,
+ /// typically for testing invalid behavior.
+ pub fn extra_file(&mut self, path: &str, contents: &str) -> &mut Package {
+ self.files.push(PackageFile {
+ path: path.to_string(),
+ contents: EntryData::Regular(contents.to_string()),
+ mode: DEFAULT_MODE,
+ extra: true,
+ });
+ self
+ }
+
+ /// Adds a normal dependency. Example:
+ /// ```
+ /// [dependencies]
+ /// foo = {version = "1.0"}
+ /// ```
+ pub fn dep(&mut self, name: &str, vers: &str) -> &mut Package {
+ self.add_dep(&Dependency::new(name, vers))
+ }
+
+ /// Adds a dependency with the given feature. Example:
+ /// ```
+ /// [dependencies]
+ /// foo = {version = "1.0", "features": ["feat1", "feat2"]}
+ /// ```
+ pub fn feature_dep(&mut self, name: &str, vers: &str, features: &[&str]) -> &mut Package {
+ self.add_dep(Dependency::new(name, vers).enable_features(features))
+ }
+
+ /// Adds a platform-specific dependency. Example:
+ /// ```
+ /// [target.'cfg(windows)'.dependencies]
+ /// foo = {version = "1.0"}
+ /// ```
+ pub fn target_dep(&mut self, name: &str, vers: &str, target: &str) -> &mut Package {
+ self.add_dep(Dependency::new(name, vers).target(target))
+ }
+
+ /// Adds a dependency to the alternative registry.
+ pub fn registry_dep(&mut self, name: &str, vers: &str) -> &mut Package {
+ self.add_dep(Dependency::new(name, vers).registry("alternative"))
+ }
+
+ /// Adds a dev-dependency. Example:
+ /// ```
+ /// [dev-dependencies]
+ /// foo = {version = "1.0"}
+ /// ```
+ pub fn dev_dep(&mut self, name: &str, vers: &str) -> &mut Package {
+ self.add_dep(Dependency::new(name, vers).dev())
+ }
+
+ /// Adds a build-dependency. Example:
+ /// ```
+ /// [build-dependencies]
+ /// foo = {version = "1.0"}
+ /// ```
+ pub fn build_dep(&mut self, name: &str, vers: &str) -> &mut Package {
+ self.add_dep(Dependency::new(name, vers).build())
+ }
+
+ pub fn add_dep(&mut self, dep: &Dependency) -> &mut Package {
+ self.deps.push(dep.clone());
+ self
+ }
+
+ /// Specifies whether or not the package is "yanked".
+ pub fn yanked(&mut self, yanked: bool) -> &mut Package {
+ self.yanked = yanked;
+ self
+ }
+
+ /// Specifies whether or not this is a proc macro.
+ pub fn proc_macro(&mut self, proc_macro: bool) -> &mut Package {
+ self.proc_macro = proc_macro;
+ self
+ }
+
+ /// Adds an entry in the `[features]` section.
+ pub fn feature(&mut self, name: &str, deps: &[&str]) -> &mut Package {
+ let deps = deps.iter().map(|s| s.to_string()).collect();
+ self.features.insert(name.to_string(), deps);
+ self
+ }
+
+ /// Specify a minimal Rust version.
+ pub fn rust_version(&mut self, rust_version: &str) -> &mut Package {
+ self.rust_version = Some(rust_version.into());
+ self
+ }
+
+ /// Causes the JSON line emitted in the index to be invalid, presumably
+ /// causing Cargo to skip over this version.
+ pub fn invalid_json(&mut self, invalid: bool) -> &mut Package {
+ self.invalid_json = invalid;
+ self
+ }
+
+ pub fn links(&mut self, links: &str) -> &mut Package {
+ self.links = Some(links.to_string());
+ self
+ }
+
+ pub fn cargo_feature(&mut self, feature: &str) -> &mut Package {
+ self.cargo_features.push(feature.to_owned());
+ self
+ }
+
+ /// Sets the index schema version for this package.
+ ///
+ /// See `cargo::sources::registry::RegistryPackage` for more information.
+ pub fn schema_version(&mut self, version: u32) -> &mut Package {
+ self.v = Some(version);
+ self
+ }
+
+ /// Creates the package and place it in the registry.
+ ///
+ /// This does not actually use Cargo's publishing system, but instead
+ /// manually creates the entry in the registry on the filesystem.
+ ///
+ /// Returns the checksum for the package.
+ pub fn publish(&self) -> String {
+ self.make_archive();
+
+ // Figure out what we're going to write into the index.
+ let deps = self
+ .deps
+ .iter()
+ .map(|dep| {
+ // In the index, the `registry` is null if it is from the same registry.
+ // In Cargo.toml, it is None if it is from crates.io.
+ let registry_url = match (self.alternative, dep.registry.as_deref()) {
+ (false, None) => None,
+ (false, Some("alternative")) => Some(alt_registry_url().to_string()),
+ (true, None) => {
+ Some("https://github.com/rust-lang/crates.io-index".to_string())
+ }
+ (true, Some("alternative")) => None,
+ _ => panic!("registry_dep currently only supports `alternative`"),
+ };
+ serde_json::json!({
+ "name": dep.name,
+ "req": dep.vers,
+ "features": dep.features,
+ "default_features": true,
+ "target": dep.target,
+ "artifact": dep.artifact,
+ "optional": dep.optional,
+ "kind": dep.kind,
+ "registry": registry_url,
+ "package": dep.package,
+ })
+ })
+ .collect::<Vec<_>>();
+ let cksum = {
+ let c = t!(fs::read(&self.archive_dst()));
+ cksum(&c)
+ };
+ let name = if self.invalid_json {
+ serde_json::json!(1)
+ } else {
+ serde_json::json!(self.name)
+ };
+ let line = create_index_line(
+ name,
+ &self.vers,
+ deps,
+ &cksum,
+ self.features.clone(),
+ self.yanked,
+ self.links.clone(),
+ self.v,
+ );
+
+ let registry_path = if self.alternative {
+ alt_registry_path()
+ } else {
+ registry_path()
+ };
+
+ write_to_index(&registry_path, &self.name, line, self.local);
+
+ cksum
+ }
+
+ fn make_archive(&self) {
+ let dst = self.archive_dst();
+ t!(fs::create_dir_all(dst.parent().unwrap()));
+ let f = t!(File::create(&dst));
+ let mut a = Builder::new(GzEncoder::new(f, Compression::default()));
+
+ if !self
+ .files
+ .iter()
+ .any(|PackageFile { path, .. }| path == "Cargo.toml")
+ {
+ self.append_manifest(&mut a);
+ }
+ if self.files.is_empty() {
+ self.append(
+ &mut a,
+ "src/lib.rs",
+ DEFAULT_MODE,
+ &EntryData::Regular("".into()),
+ );
+ } else {
+ for PackageFile {
+ path,
+ contents,
+ mode,
+ extra,
+ } in &self.files
+ {
+ if *extra {
+ self.append_raw(&mut a, path, *mode, contents);
+ } else {
+ self.append(&mut a, path, *mode, contents);
+ }
+ }
+ }
+ }
+
+ fn append_manifest<W: Write>(&self, ar: &mut Builder<W>) {
+ let mut manifest = String::new();
+
+ if !self.cargo_features.is_empty() {
+ let mut features = String::new();
+ serde::Serialize::serialize(
+ &self.cargo_features,
+ toml::ser::ValueSerializer::new(&mut features),
+ )
+ .unwrap();
+ manifest.push_str(&format!("cargo-features = {}\n\n", features));
+ }
+
+ manifest.push_str(&format!(
+ r#"
+ [package]
+ name = "{}"
+ version = "{}"
+ authors = []
+ "#,
+ self.name, self.vers
+ ));
+
+ if let Some(version) = &self.rust_version {
+ manifest.push_str(&format!("rust-version = \"{}\"", version));
+ }
+
+ for dep in self.deps.iter() {
+ let target = match dep.target {
+ None => String::new(),
+ Some(ref s) => format!("target.'{}'.", s),
+ };
+ let kind = match &dep.kind[..] {
+ "build" => "build-",
+ "dev" => "dev-",
+ _ => "",
+ };
+ manifest.push_str(&format!(
+ r#"
+ [{}{}dependencies.{}]
+ version = "{}"
+ "#,
+ target, kind, dep.name, dep.vers
+ ));
+ if let Some((artifact, target)) = &dep.artifact {
+ manifest.push_str(&format!("artifact = \"{}\"\n", artifact));
+ if let Some(target) = &target {
+ manifest.push_str(&format!("target = \"{}\"\n", target))
+ }
+ }
+ if let Some(registry) = &dep.registry {
+ assert_eq!(registry, "alternative");
+ manifest.push_str(&format!("registry-index = \"{}\"", alt_registry_url()));
+ }
+ }
+ if self.proc_macro {
+ manifest.push_str("[lib]\nproc-macro = true\n");
+ }
+
+ self.append(
+ ar,
+ "Cargo.toml",
+ DEFAULT_MODE,
+ &EntryData::Regular(manifest.into()),
+ );
+ }
+
+ fn append<W: Write>(&self, ar: &mut Builder<W>, file: &str, mode: u32, contents: &EntryData) {
+ self.append_raw(
+ ar,
+ &format!("{}-{}/{}", self.name, self.vers, file),
+ mode,
+ contents,
+ );
+ }
+
+ fn append_raw<W: Write>(
+ &self,
+ ar: &mut Builder<W>,
+ path: &str,
+ mode: u32,
+ contents: &EntryData,
+ ) {
+ let mut header = Header::new_ustar();
+ let contents = match contents {
+ EntryData::Regular(contents) => contents.as_str(),
+ EntryData::Symlink(src) => {
+ header.set_entry_type(tar::EntryType::Symlink);
+ t!(header.set_link_name(src));
+ "" // Symlink has no contents.
+ }
+ };
+ header.set_size(contents.len() as u64);
+ t!(header.set_path(path));
+ header.set_mode(mode);
+ header.set_cksum();
+ t!(ar.append(&header, contents.as_bytes()));
+ }
+
+ /// Returns the path to the compressed package file.
+ pub fn archive_dst(&self) -> PathBuf {
+ if self.local {
+ registry_path().join(format!("{}-{}.crate", self.name, self.vers))
+ } else if self.alternative {
+ alt_dl_path()
+ .join(&self.name)
+ .join(&self.vers)
+ .join("download")
+ } else {
+ dl_path().join(&self.name).join(&self.vers).join("download")
+ }
+ }
+}
+
+pub fn cksum(s: &[u8]) -> String {
+ Sha256::new().update(s).finish_hex()
+}
+
+impl Dependency {
+ pub fn new(name: &str, vers: &str) -> Dependency {
+ Dependency {
+ name: name.to_string(),
+ vers: vers.to_string(),
+ kind: "normal".to_string(),
+ artifact: None,
+ target: None,
+ features: Vec::new(),
+ package: None,
+ optional: false,
+ registry: None,
+ }
+ }
+
+ /// Changes this to `[build-dependencies]`.
+ pub fn build(&mut self) -> &mut Self {
+ self.kind = "build".to_string();
+ self
+ }
+
+ /// Changes this to `[dev-dependencies]`.
+ pub fn dev(&mut self) -> &mut Self {
+ self.kind = "dev".to_string();
+ self
+ }
+
+ /// Changes this to `[target.$target.dependencies]`.
+ pub fn target(&mut self, target: &str) -> &mut Self {
+ self.target = Some(target.to_string());
+ self
+ }
+
+ /// Change the artifact to be of the given kind, like "bin", or "staticlib",
+ /// along with a specific target triple if provided.
+ pub fn artifact(&mut self, kind: &str, target: Option<String>) -> &mut Self {
+ self.artifact = Some((kind.to_string(), target));
+ self
+ }
+
+ /// Adds `registry = $registry` to this dependency.
+ pub fn registry(&mut self, registry: &str) -> &mut Self {
+ self.registry = Some(registry.to_string());
+ self
+ }
+
+ /// Adds `features = [ ... ]` to this dependency.
+ pub fn enable_features(&mut self, features: &[&str]) -> &mut Self {
+ self.features.extend(features.iter().map(|s| s.to_string()));
+ self
+ }
+
+ /// Adds `package = ...` to this dependency.
+ pub fn package(&mut self, pkg: &str) -> &mut Self {
+ self.package = Some(pkg.to_string());
+ self
+ }
+
+ /// Changes this to an optional dependency.
+ pub fn optional(&mut self, optional: bool) -> &mut Self {
+ self.optional = optional;
+ self
+ }
+}
diff --git a/crates/cargo-test-support/src/tools.rs b/crates/cargo-test-support/src/tools.rs
new file mode 100644
index 0000000..7c056b6
--- /dev/null
+++ b/crates/cargo-test-support/src/tools.rs
@@ -0,0 +1,108 @@
+//! Common executables that can be reused by various tests.
+
+use crate::{basic_manifest, paths, project, Project};
+use lazy_static::lazy_static;
+use std::path::{Path, PathBuf};
+use std::sync::Mutex;
+
+lazy_static! {
+ static ref ECHO_WRAPPER: Mutex<Option<PathBuf>> = Mutex::new(None);
+ static ref ECHO: Mutex<Option<PathBuf>> = Mutex::new(None);
+}
+
+/// Returns the path to an executable that works as a wrapper around rustc.
+///
+/// The wrapper will echo the command line it was called with to stderr.
+pub fn echo_wrapper() -> PathBuf {
+ let mut lock = ECHO_WRAPPER.lock().unwrap();
+ if let Some(path) = &*lock {
+ return path.clone();
+ }
+ let p = project()
+ .at(paths::global_root().join("rustc-echo-wrapper"))
+ .file("Cargo.toml", &basic_manifest("rustc-echo-wrapper", "1.0.0"))
+ .file(
+ "src/main.rs",
+ r#"
+ use std::fs::read_to_string;
+ use std::path::PathBuf;
+ fn main() {
+ // Handle args from `@path` argfile for rustc
+ let args = std::env::args()
+ .flat_map(|p| if let Some(p) = p.strip_prefix("@") {
+ read_to_string(p).unwrap().lines().map(String::from).collect()
+ } else {
+ vec![p]
+ })
+ .collect::<Vec<_>>();
+ eprintln!("WRAPPER CALLED: {}", args[1..].join(" "));
+ let status = std::process::Command::new(&args[1])
+ .args(&args[2..]).status().unwrap();
+ std::process::exit(status.code().unwrap_or(1));
+ }
+ "#,
+ )
+ .build();
+ p.cargo("build").run();
+ let path = p.bin("rustc-echo-wrapper");
+ *lock = Some(path.clone());
+ path
+}
+
+/// Returns the path to an executable that prints its arguments.
+///
+/// Do not expect this to be anything fancy.
+pub fn echo() -> PathBuf {
+ let mut lock = ECHO.lock().unwrap();
+ if let Some(path) = &*lock {
+ return path.clone();
+ }
+ if let Ok(path) = cargo_util::paths::resolve_executable(Path::new("echo")) {
+ *lock = Some(path.clone());
+ return path;
+ }
+ // Often on Windows, `echo` is not available.
+ let p = project()
+ .at(paths::global_root().join("basic-echo"))
+ .file("Cargo.toml", &basic_manifest("basic-echo", "1.0.0"))
+ .file(
+ "src/main.rs",
+ r#"
+ fn main() {
+ let mut s = String::new();
+ let mut it = std::env::args().skip(1).peekable();
+ while let Some(n) = it.next() {
+ s.push_str(&n);
+ if it.peek().is_some() {
+ s.push(' ');
+ }
+ }
+ println!("{}", s);
+ }
+ "#,
+ )
+ .build();
+ p.cargo("build").run();
+ let path = p.bin("basic-echo");
+ *lock = Some(path.clone());
+ path
+}
+
+/// Returns a project which builds a cargo-echo simple subcommand
+pub fn echo_subcommand() -> Project {
+ let p = project()
+ .at("cargo-echo")
+ .file("Cargo.toml", &basic_manifest("cargo-echo", "0.0.1"))
+ .file(
+ "src/main.rs",
+ r#"
+ fn main() {
+ let args: Vec<_> = ::std::env::args().skip(1).collect();
+ println!("{}", args.join(" "));
+ }
+ "#,
+ )
+ .build();
+ p.cargo("build").run();
+ p
+}