summaryrefslogtreecommitdiffstats
path: root/crates
diff options
context:
space:
mode:
Diffstat (limited to '')
-rw-r--r--crates/cargo-platform/Cargo.toml12
l---------crates/cargo-platform/LICENSE-APACHE1
l---------crates/cargo-platform/LICENSE-MIT1
-rw-r--r--crates/cargo-platform/examples/matches.rs55
-rw-r--r--crates/cargo-platform/src/cfg.rs319
-rw-r--r--crates/cargo-platform/src/error.rs67
-rw-r--r--crates/cargo-platform/src/lib.rs146
-rw-r--r--crates/cargo-platform/tests/test_cfg.rs251
-rw-r--r--crates/cargo-test-macro/Cargo.toml12
-rw-r--r--crates/cargo-test-macro/src/lib.rs245
-rw-r--r--crates/cargo-test-support/Cargo.toml32
-rw-r--r--crates/cargo-test-support/build.rs7
-rw-r--r--crates/cargo-test-support/containers/apache/Dockerfile26
-rw-r--r--crates/cargo-test-support/containers/apache/bar/Cargo.toml4
-rw-r--r--crates/cargo-test-support/containers/apache/bar/src/lib.rs1
-rw-r--r--crates/cargo-test-support/containers/apache/httpd-cargo.conf12
-rw-r--r--crates/cargo-test-support/containers/sshd/Dockerfile29
-rw-r--r--crates/cargo-test-support/containers/sshd/bar/Cargo.toml4
-rw-r--r--crates/cargo-test-support/containers/sshd/bar/src/lib.rs1
-rw-r--r--crates/cargo-test-support/src/compare.rs781
-rw-r--r--crates/cargo-test-support/src/containers.rs285
-rw-r--r--crates/cargo-test-support/src/cross_compile.rs264
-rw-r--r--crates/cargo-test-support/src/diff.rs174
-rw-r--r--crates/cargo-test-support/src/git.rs249
-rw-r--r--crates/cargo-test-support/src/install.rs29
-rw-r--r--crates/cargo-test-support/src/lib.rs1424
-rw-r--r--crates/cargo-test-support/src/paths.rs347
-rw-r--r--crates/cargo-test-support/src/publish.rs245
-rw-r--r--crates/cargo-test-support/src/registry.rs1581
-rw-r--r--crates/cargo-test-support/src/tools.rs108
-rw-r--r--crates/cargo-util/Cargo.toml28
l---------crates/cargo-util/LICENSE-APACHE1
l---------crates/cargo-util/LICENSE-MIT1
-rw-r--r--crates/cargo-util/src/lib.rs18
-rw-r--r--crates/cargo-util/src/paths.rs788
-rw-r--r--crates/cargo-util/src/process_builder.rs689
-rw-r--r--crates/cargo-util/src/process_error.rs200
-rw-r--r--crates/cargo-util/src/read2.rs178
-rw-r--r--crates/cargo-util/src/registry.rs45
-rw-r--r--crates/cargo-util/src/sha256.rs56
-rw-r--r--crates/crates-io/Cargo.toml21
l---------crates/crates-io/LICENSE-APACHE1
l---------crates/crates-io/LICENSE-MIT1
-rw-r--r--crates/crates-io/lib.rs537
-rw-r--r--crates/credential/README.md8
-rw-r--r--crates/credential/cargo-credential-1password/Cargo.toml12
-rw-r--r--crates/credential/cargo-credential-1password/src/main.rs314
-rw-r--r--crates/credential/cargo-credential-gnome-secret/Cargo.toml13
-rw-r--r--crates/credential/cargo-credential-gnome-secret/build.rs3
-rw-r--r--crates/credential/cargo-credential-gnome-secret/src/main.rs194
-rw-r--r--crates/credential/cargo-credential-macos-keychain/Cargo.toml11
-rw-r--r--crates/credential/cargo-credential-macos-keychain/src/main.rs50
-rw-r--r--crates/credential/cargo-credential-wincred/Cargo.toml11
-rw-r--r--crates/credential/cargo-credential-wincred/src/main.rs111
-rw-r--r--crates/credential/cargo-credential/Cargo.toml9
-rw-r--r--crates/credential/cargo-credential/README.md41
-rw-r--r--crates/credential/cargo-credential/src/lib.rs86
-rw-r--r--crates/home/CHANGELOG.md46
-rw-r--r--crates/home/Cargo.toml20
l---------crates/home/LICENSE-APACHE1
l---------crates/home/LICENSE-MIT1
-rw-r--r--crates/home/README.md27
-rw-r--r--crates/home/src/env.rs106
-rw-r--r--crates/home/src/lib.rs149
-rw-r--r--crates/home/src/windows.rs66
-rw-r--r--crates/mdman/Cargo.lock459
-rw-r--r--crates/mdman/Cargo.toml17
-rw-r--r--crates/mdman/README.md7
-rwxr-xr-xcrates/mdman/build-man.sh7
-rw-r--r--crates/mdman/doc/mdman.md95
-rw-r--r--crates/mdman/doc/out/mdman.1124
-rw-r--r--crates/mdman/doc/out/mdman.md95
-rw-r--r--crates/mdman/doc/out/mdman.txt91
-rw-r--r--crates/mdman/src/format.rs20
-rw-r--r--crates/mdman/src/format/man.rs436
-rw-r--r--crates/mdman/src/format/md.rs112
-rw-r--r--crates/mdman/src/format/text.rs605
-rw-r--r--crates/mdman/src/hbs.rs215
-rw-r--r--crates/mdman/src/lib.rs122
-rw-r--r--crates/mdman/src/main.rs133
-rw-r--r--crates/mdman/src/util.rs44
-rw-r--r--crates/mdman/tests/compare.rs48
-rw-r--r--crates/mdman/tests/compare/expected/formatting.1118
-rw-r--r--crates/mdman/tests/compare/expected/formatting.md95
-rw-r--r--crates/mdman/tests/compare/expected/formatting.txt84
-rw-r--r--crates/mdman/tests/compare/expected/links.145
-rw-r--r--crates/mdman/tests/compare/expected/links.md56
-rw-r--r--crates/mdman/tests/compare/expected/links.txt40
-rw-r--r--crates/mdman/tests/compare/expected/options.194
-rw-r--r--crates/mdman/tests/compare/expected/options.md77
-rw-r--r--crates/mdman/tests/compare/expected/options.txt57
-rw-r--r--crates/mdman/tests/compare/expected/tables.1108
-rw-r--r--crates/mdman/tests/compare/expected/tables.md35
-rw-r--r--crates/mdman/tests/compare/expected/tables.txt45
-rw-r--r--crates/mdman/tests/compare/expected/vars.79
-rw-r--r--crates/mdman/tests/compare/expected/vars.md7
-rw-r--r--crates/mdman/tests/compare/expected/vars.txt6
-rw-r--r--crates/mdman/tests/compare/formatting.md95
-rw-r--r--crates/mdman/tests/compare/includes/links-include.md7
-rw-r--r--crates/mdman/tests/compare/includes/options-common.md14
-rw-r--r--crates/mdman/tests/compare/links.md49
-rw-r--r--crates/mdman/tests/compare/options.md62
-rw-r--r--crates/mdman/tests/compare/tables.md35
-rw-r--r--crates/mdman/tests/compare/vars.md7
-rw-r--r--crates/mdman/tests/invalid.rs34
-rw-r--r--crates/mdman/tests/invalid/nested.md6
-rw-r--r--crates/mdman/tests/invalid/not-inside-options.md5
-rw-r--r--crates/resolver-tests/Cargo.toml12
-rw-r--r--crates/resolver-tests/src/lib.rs991
-rw-r--r--crates/resolver-tests/tests/resolve.rs1504
110 files changed, 16882 insertions, 0 deletions
diff --git a/crates/cargo-platform/Cargo.toml b/crates/cargo-platform/Cargo.toml
new file mode 100644
index 0000000..9a31170
--- /dev/null
+++ b/crates/cargo-platform/Cargo.toml
@@ -0,0 +1,12 @@
+[package]
+name = "cargo-platform"
+version = "0.1.2"
+edition = "2021"
+license = "MIT OR Apache-2.0"
+homepage = "https://github.com/rust-lang/cargo"
+repository = "https://github.com/rust-lang/cargo"
+documentation = "https://docs.rs/cargo-platform"
+description = "Cargo's representation of a target platform."
+
+[dependencies]
+serde = { version = "1.0.82", features = ['derive'] }
diff --git a/crates/cargo-platform/LICENSE-APACHE b/crates/cargo-platform/LICENSE-APACHE
new file mode 120000
index 0000000..1cd601d
--- /dev/null
+++ b/crates/cargo-platform/LICENSE-APACHE
@@ -0,0 +1 @@
+../../LICENSE-APACHE \ No newline at end of file
diff --git a/crates/cargo-platform/LICENSE-MIT b/crates/cargo-platform/LICENSE-MIT
new file mode 120000
index 0000000..b2cfbdc
--- /dev/null
+++ b/crates/cargo-platform/LICENSE-MIT
@@ -0,0 +1 @@
+../../LICENSE-MIT \ No newline at end of file
diff --git a/crates/cargo-platform/examples/matches.rs b/crates/cargo-platform/examples/matches.rs
new file mode 100644
index 0000000..9ad5d10
--- /dev/null
+++ b/crates/cargo-platform/examples/matches.rs
@@ -0,0 +1,55 @@
+//! This example demonstrates how to filter a Platform based on the current
+//! host target.
+
+use cargo_platform::{Cfg, Platform};
+use std::process::Command;
+use std::str::FromStr;
+
+static EXAMPLES: &[&str] = &[
+ "cfg(windows)",
+ "cfg(unix)",
+ "cfg(target_os=\"macos\")",
+ "cfg(target_os=\"linux\")",
+ "cfg(any(target_arch=\"x86\", target_arch=\"x86_64\"))",
+];
+
+fn main() {
+ let target = get_target();
+ let cfgs = get_cfgs();
+ println!("host target={} cfgs:", target);
+ for cfg in &cfgs {
+ println!(" {}", cfg);
+ }
+ let mut examples: Vec<&str> = EXAMPLES.iter().copied().collect();
+ examples.push(target.as_str());
+ for example in examples {
+ let p = Platform::from_str(example).unwrap();
+ println!("{:?} matches: {:?}", example, p.matches(&target, &cfgs));
+ }
+}
+
+fn get_target() -> String {
+ let output = Command::new("rustc")
+ .arg("-Vv")
+ .output()
+ .expect("rustc failed to run");
+ let stdout = String::from_utf8(output.stdout).unwrap();
+ for line in stdout.lines() {
+ if line.starts_with("host: ") {
+ return String::from(&line[6..]);
+ }
+ }
+ panic!("Failed to find host: {}", stdout);
+}
+
+fn get_cfgs() -> Vec<Cfg> {
+ let output = Command::new("rustc")
+ .arg("--print=cfg")
+ .output()
+ .expect("rustc failed to run");
+ let stdout = String::from_utf8(output.stdout).unwrap();
+ stdout
+ .lines()
+ .map(|line| Cfg::from_str(line).unwrap())
+ .collect()
+}
diff --git a/crates/cargo-platform/src/cfg.rs b/crates/cargo-platform/src/cfg.rs
new file mode 100644
index 0000000..c3ddb69
--- /dev/null
+++ b/crates/cargo-platform/src/cfg.rs
@@ -0,0 +1,319 @@
+use crate::error::{ParseError, ParseErrorKind::*};
+use std::fmt;
+use std::iter;
+use std::str::{self, FromStr};
+
+/// A cfg expression.
+#[derive(Eq, PartialEq, Hash, Ord, PartialOrd, Clone, Debug)]
+pub enum CfgExpr {
+ Not(Box<CfgExpr>),
+ All(Vec<CfgExpr>),
+ Any(Vec<CfgExpr>),
+ Value(Cfg),
+}
+
+/// A cfg value.
+#[derive(Eq, PartialEq, Hash, Ord, PartialOrd, Clone, Debug)]
+pub enum Cfg {
+ /// A named cfg value, like `unix`.
+ Name(String),
+ /// A key/value cfg pair, like `target_os = "linux"`.
+ KeyPair(String, String),
+}
+
+#[derive(PartialEq)]
+enum Token<'a> {
+ LeftParen,
+ RightParen,
+ Ident(&'a str),
+ Comma,
+ Equals,
+ String(&'a str),
+}
+
+#[derive(Clone)]
+struct Tokenizer<'a> {
+ s: iter::Peekable<str::CharIndices<'a>>,
+ orig: &'a str,
+}
+
+struct Parser<'a> {
+ t: Tokenizer<'a>,
+}
+
+impl FromStr for Cfg {
+ type Err = ParseError;
+
+ fn from_str(s: &str) -> Result<Cfg, Self::Err> {
+ let mut p = Parser::new(s);
+ let e = p.cfg()?;
+ if let Some(rest) = p.rest() {
+ return Err(ParseError::new(
+ p.t.orig,
+ UnterminatedExpression(rest.to_string()),
+ ));
+ }
+ Ok(e)
+ }
+}
+
+impl fmt::Display for Cfg {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match *self {
+ Cfg::Name(ref s) => s.fmt(f),
+ Cfg::KeyPair(ref k, ref v) => write!(f, "{} = \"{}\"", k, v),
+ }
+ }
+}
+
+impl CfgExpr {
+ /// Utility function to check if the key, "cfg(..)" matches the `target_cfg`
+ pub fn matches_key(key: &str, target_cfg: &[Cfg]) -> bool {
+ if key.starts_with("cfg(") && key.ends_with(')') {
+ let cfg = &key[4..key.len() - 1];
+
+ CfgExpr::from_str(cfg)
+ .ok()
+ .map(|ce| ce.matches(target_cfg))
+ .unwrap_or(false)
+ } else {
+ false
+ }
+ }
+
+ pub fn matches(&self, cfg: &[Cfg]) -> bool {
+ match *self {
+ CfgExpr::Not(ref e) => !e.matches(cfg),
+ CfgExpr::All(ref e) => e.iter().all(|e| e.matches(cfg)),
+ CfgExpr::Any(ref e) => e.iter().any(|e| e.matches(cfg)),
+ CfgExpr::Value(ref e) => cfg.contains(e),
+ }
+ }
+}
+
+impl FromStr for CfgExpr {
+ type Err = ParseError;
+
+ fn from_str(s: &str) -> Result<CfgExpr, Self::Err> {
+ let mut p = Parser::new(s);
+ let e = p.expr()?;
+ if let Some(rest) = p.rest() {
+ return Err(ParseError::new(
+ p.t.orig,
+ UnterminatedExpression(rest.to_string()),
+ ));
+ }
+ Ok(e)
+ }
+}
+
+impl fmt::Display for CfgExpr {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match *self {
+ CfgExpr::Not(ref e) => write!(f, "not({})", e),
+ CfgExpr::All(ref e) => write!(f, "all({})", CommaSep(e)),
+ CfgExpr::Any(ref e) => write!(f, "any({})", CommaSep(e)),
+ CfgExpr::Value(ref e) => write!(f, "{}", e),
+ }
+ }
+}
+
+struct CommaSep<'a, T>(&'a [T]);
+
+impl<'a, T: fmt::Display> fmt::Display for CommaSep<'a, T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ for (i, v) in self.0.iter().enumerate() {
+ if i > 0 {
+ write!(f, ", ")?;
+ }
+ write!(f, "{}", v)?;
+ }
+ Ok(())
+ }
+}
+
+impl<'a> Parser<'a> {
+ fn new(s: &'a str) -> Parser<'a> {
+ Parser {
+ t: Tokenizer {
+ s: s.char_indices().peekable(),
+ orig: s,
+ },
+ }
+ }
+
+ fn expr(&mut self) -> Result<CfgExpr, ParseError> {
+ match self.peek() {
+ Some(Ok(Token::Ident(op @ "all"))) | Some(Ok(Token::Ident(op @ "any"))) => {
+ self.t.next();
+ let mut e = Vec::new();
+ self.eat(&Token::LeftParen)?;
+ while !self.r#try(&Token::RightParen) {
+ e.push(self.expr()?);
+ if !self.r#try(&Token::Comma) {
+ self.eat(&Token::RightParen)?;
+ break;
+ }
+ }
+ if op == "all" {
+ Ok(CfgExpr::All(e))
+ } else {
+ Ok(CfgExpr::Any(e))
+ }
+ }
+ Some(Ok(Token::Ident("not"))) => {
+ self.t.next();
+ self.eat(&Token::LeftParen)?;
+ let e = self.expr()?;
+ self.eat(&Token::RightParen)?;
+ Ok(CfgExpr::Not(Box::new(e)))
+ }
+ Some(Ok(..)) => self.cfg().map(CfgExpr::Value),
+ Some(Err(..)) => Err(self.t.next().unwrap().err().unwrap()),
+ None => Err(ParseError::new(
+ self.t.orig,
+ IncompleteExpr("start of a cfg expression"),
+ )),
+ }
+ }
+
+ fn cfg(&mut self) -> Result<Cfg, ParseError> {
+ match self.t.next() {
+ Some(Ok(Token::Ident(name))) => {
+ let e = if self.r#try(&Token::Equals) {
+ let val = match self.t.next() {
+ Some(Ok(Token::String(s))) => s,
+ Some(Ok(t)) => {
+ return Err(ParseError::new(
+ self.t.orig,
+ UnexpectedToken {
+ expected: "a string",
+ found: t.classify(),
+ },
+ ))
+ }
+ Some(Err(e)) => return Err(e),
+ None => {
+ return Err(ParseError::new(self.t.orig, IncompleteExpr("a string")))
+ }
+ };
+ Cfg::KeyPair(name.to_string(), val.to_string())
+ } else {
+ Cfg::Name(name.to_string())
+ };
+ Ok(e)
+ }
+ Some(Ok(t)) => Err(ParseError::new(
+ self.t.orig,
+ UnexpectedToken {
+ expected: "identifier",
+ found: t.classify(),
+ },
+ )),
+ Some(Err(e)) => Err(e),
+ None => Err(ParseError::new(self.t.orig, IncompleteExpr("identifier"))),
+ }
+ }
+
+ fn peek(&mut self) -> Option<Result<Token<'a>, ParseError>> {
+ self.t.clone().next()
+ }
+
+ fn r#try(&mut self, token: &Token<'a>) -> bool {
+ match self.peek() {
+ Some(Ok(ref t)) if token == t => {}
+ _ => return false,
+ }
+ self.t.next();
+ true
+ }
+
+ fn eat(&mut self, token: &Token<'a>) -> Result<(), ParseError> {
+ match self.t.next() {
+ Some(Ok(ref t)) if token == t => Ok(()),
+ Some(Ok(t)) => Err(ParseError::new(
+ self.t.orig,
+ UnexpectedToken {
+ expected: token.classify(),
+ found: t.classify(),
+ },
+ )),
+ Some(Err(e)) => Err(e),
+ None => Err(ParseError::new(
+ self.t.orig,
+ IncompleteExpr(token.classify()),
+ )),
+ }
+ }
+
+ /// Returns the rest of the input from the current location.
+ fn rest(&self) -> Option<&str> {
+ let mut s = self.t.s.clone();
+ loop {
+ match s.next() {
+ Some((_, ' ')) => {}
+ Some((start, _ch)) => return Some(&self.t.orig[start..]),
+ None => return None,
+ }
+ }
+ }
+}
+
+impl<'a> Iterator for Tokenizer<'a> {
+ type Item = Result<Token<'a>, ParseError>;
+
+ fn next(&mut self) -> Option<Result<Token<'a>, ParseError>> {
+ loop {
+ match self.s.next() {
+ Some((_, ' ')) => {}
+ Some((_, '(')) => return Some(Ok(Token::LeftParen)),
+ Some((_, ')')) => return Some(Ok(Token::RightParen)),
+ Some((_, ',')) => return Some(Ok(Token::Comma)),
+ Some((_, '=')) => return Some(Ok(Token::Equals)),
+ Some((start, '"')) => {
+ while let Some((end, ch)) = self.s.next() {
+ if ch == '"' {
+ return Some(Ok(Token::String(&self.orig[start + 1..end])));
+ }
+ }
+ return Some(Err(ParseError::new(self.orig, UnterminatedString)));
+ }
+ Some((start, ch)) if is_ident_start(ch) => {
+ while let Some(&(end, ch)) = self.s.peek() {
+ if !is_ident_rest(ch) {
+ return Some(Ok(Token::Ident(&self.orig[start..end])));
+ } else {
+ self.s.next();
+ }
+ }
+ return Some(Ok(Token::Ident(&self.orig[start..])));
+ }
+ Some((_, ch)) => {
+ return Some(Err(ParseError::new(self.orig, UnexpectedChar(ch))));
+ }
+ None => return None,
+ }
+ }
+ }
+}
+
+fn is_ident_start(ch: char) -> bool {
+ ch == '_' || ch.is_ascii_alphabetic()
+}
+
+fn is_ident_rest(ch: char) -> bool {
+ is_ident_start(ch) || ch.is_ascii_digit()
+}
+
+impl<'a> Token<'a> {
+ fn classify(&self) -> &'static str {
+ match *self {
+ Token::LeftParen => "`(`",
+ Token::RightParen => "`)`",
+ Token::Ident(..) => "an identifier",
+ Token::Comma => "`,`",
+ Token::Equals => "`=`",
+ Token::String(..) => "a string",
+ }
+ }
+}
diff --git a/crates/cargo-platform/src/error.rs b/crates/cargo-platform/src/error.rs
new file mode 100644
index 0000000..bf4b35f
--- /dev/null
+++ b/crates/cargo-platform/src/error.rs
@@ -0,0 +1,67 @@
+use std::fmt;
+
+#[derive(Debug)]
+pub struct ParseError {
+ kind: ParseErrorKind,
+ orig: String,
+}
+
+#[non_exhaustive]
+#[derive(Debug)]
+pub enum ParseErrorKind {
+ UnterminatedString,
+ UnexpectedChar(char),
+ UnexpectedToken {
+ expected: &'static str,
+ found: &'static str,
+ },
+ IncompleteExpr(&'static str),
+ UnterminatedExpression(String),
+ InvalidTarget(String),
+}
+
+impl fmt::Display for ParseError {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ write!(
+ f,
+ "failed to parse `{}` as a cfg expression: {}",
+ self.orig, self.kind
+ )
+ }
+}
+
+impl fmt::Display for ParseErrorKind {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ use ParseErrorKind::*;
+ match self {
+ UnterminatedString => write!(f, "unterminated string in cfg"),
+ UnexpectedChar(ch) => write!(
+ f,
+ "unexpected character `{}` in cfg, expected parens, a comma, \
+ an identifier, or a string",
+ ch
+ ),
+ UnexpectedToken { expected, found } => {
+ write!(f, "expected {}, found {}", expected, found)
+ }
+ IncompleteExpr(expected) => {
+ write!(f, "expected {}, but cfg expression ended", expected)
+ }
+ UnterminatedExpression(s) => {
+ write!(f, "unexpected content `{}` found after cfg expression", s)
+ }
+ InvalidTarget(s) => write!(f, "invalid target specifier: {}", s),
+ }
+ }
+}
+
+impl std::error::Error for ParseError {}
+
+impl ParseError {
+ pub fn new(orig: &str, kind: ParseErrorKind) -> ParseError {
+ ParseError {
+ kind,
+ orig: orig.to_string(),
+ }
+ }
+}
diff --git a/crates/cargo-platform/src/lib.rs b/crates/cargo-platform/src/lib.rs
new file mode 100644
index 0000000..0a3dcf1
--- /dev/null
+++ b/crates/cargo-platform/src/lib.rs
@@ -0,0 +1,146 @@
+//! Platform definition used by Cargo.
+//!
+//! This defines a [`Platform`] type which is used in Cargo to specify a target platform.
+//! There are two kinds, a named target like `x86_64-apple-darwin`, and a "cfg expression"
+//! like `cfg(any(target_os = "macos", target_os = "ios"))`.
+//!
+//! See `examples/matches.rs` for an example of how to match against a `Platform`.
+//!
+//! [`Platform`]: enum.Platform.html
+
+use std::fmt;
+use std::str::FromStr;
+
+mod cfg;
+mod error;
+
+pub use cfg::{Cfg, CfgExpr};
+pub use error::{ParseError, ParseErrorKind};
+
+/// Platform definition.
+#[derive(Eq, PartialEq, Hash, Ord, PartialOrd, Clone, Debug)]
+pub enum Platform {
+ /// A named platform, like `x86_64-apple-darwin`.
+ Name(String),
+ /// A cfg expression, like `cfg(windows)`.
+ Cfg(CfgExpr),
+}
+
+impl Platform {
+ /// Returns whether the Platform matches the given target and cfg.
+ ///
+ /// The named target and cfg values should be obtained from `rustc`.
+ pub fn matches(&self, name: &str, cfg: &[Cfg]) -> bool {
+ match *self {
+ Platform::Name(ref p) => p == name,
+ Platform::Cfg(ref p) => p.matches(cfg),
+ }
+ }
+
+ fn validate_named_platform(name: &str) -> Result<(), ParseError> {
+ if let Some(ch) = name
+ .chars()
+ .find(|&c| !(c.is_alphanumeric() || c == '_' || c == '-' || c == '.'))
+ {
+ if name.chars().any(|c| c == '(') {
+ return Err(ParseError::new(
+ name,
+ ParseErrorKind::InvalidTarget(
+ "unexpected `(` character, cfg expressions must start with `cfg(`"
+ .to_string(),
+ ),
+ ));
+ }
+ return Err(ParseError::new(
+ name,
+ ParseErrorKind::InvalidTarget(format!(
+ "unexpected character {} in target name",
+ ch
+ )),
+ ));
+ }
+ Ok(())
+ }
+
+ pub fn check_cfg_attributes(&self, warnings: &mut Vec<String>) {
+ fn check_cfg_expr(expr: &CfgExpr, warnings: &mut Vec<String>) {
+ match *expr {
+ CfgExpr::Not(ref e) => check_cfg_expr(e, warnings),
+ CfgExpr::All(ref e) | CfgExpr::Any(ref e) => {
+ for e in e {
+ check_cfg_expr(e, warnings);
+ }
+ }
+ CfgExpr::Value(ref e) => match e {
+ Cfg::Name(name) => match name.as_str() {
+ "test" | "debug_assertions" | "proc_macro" =>
+ warnings.push(format!(
+ "Found `{}` in `target.'cfg(...)'.dependencies`. \
+ This value is not supported for selecting dependencies \
+ and will not work as expected. \
+ To learn more visit \
+ https://doc.rust-lang.org/cargo/reference/specifying-dependencies.html#platform-specific-dependencies",
+ name
+ )),
+ _ => (),
+ },
+ Cfg::KeyPair(name, _) => if name.as_str() == "feature" {
+ warnings.push(String::from(
+ "Found `feature = ...` in `target.'cfg(...)'.dependencies`. \
+ This key is not supported for selecting dependencies \
+ and will not work as expected. \
+ Use the [features] section instead: \
+ https://doc.rust-lang.org/cargo/reference/features.html"
+ ))
+ },
+ }
+ }
+ }
+
+ if let Platform::Cfg(cfg) = self {
+ check_cfg_expr(cfg, warnings);
+ }
+ }
+}
+
+impl serde::Serialize for Platform {
+ fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
+ where
+ S: serde::Serializer,
+ {
+ self.to_string().serialize(s)
+ }
+}
+
+impl<'de> serde::Deserialize<'de> for Platform {
+ fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+ where
+ D: serde::Deserializer<'de>,
+ {
+ let s = String::deserialize(deserializer)?;
+ FromStr::from_str(&s).map_err(serde::de::Error::custom)
+ }
+}
+
+impl FromStr for Platform {
+ type Err = ParseError;
+
+ fn from_str(s: &str) -> Result<Platform, ParseError> {
+ if s.starts_with("cfg(") && s.ends_with(')') {
+ let s = &s[4..s.len() - 1];
+ s.parse().map(Platform::Cfg)
+ } else {
+ Platform::validate_named_platform(s)?;
+ Ok(Platform::Name(s.to_string()))
+ }
+ }
+}
+
+impl fmt::Display for Platform {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match *self {
+ Platform::Name(ref n) => n.fmt(f),
+ Platform::Cfg(ref e) => write!(f, "cfg({})", e),
+ }
+ }
+}
diff --git a/crates/cargo-platform/tests/test_cfg.rs b/crates/cargo-platform/tests/test_cfg.rs
new file mode 100644
index 0000000..dd99d9a
--- /dev/null
+++ b/crates/cargo-platform/tests/test_cfg.rs
@@ -0,0 +1,251 @@
+use cargo_platform::{Cfg, CfgExpr, Platform};
+use std::fmt;
+use std::str::FromStr;
+
+macro_rules! c {
+ ($a:ident) => {
+ Cfg::Name(stringify!($a).to_string())
+ };
+ ($a:ident = $e:expr) => {
+ Cfg::KeyPair(stringify!($a).to_string(), $e.to_string())
+ };
+}
+
+macro_rules! e {
+ (any($($t:tt),*)) => (CfgExpr::Any(vec![$(e!($t)),*]));
+ (all($($t:tt),*)) => (CfgExpr::All(vec![$(e!($t)),*]));
+ (not($($t:tt)*)) => (CfgExpr::Not(Box::new(e!($($t)*))));
+ (($($t:tt)*)) => (e!($($t)*));
+ ($($t:tt)*) => (CfgExpr::Value(c!($($t)*)));
+}
+
+fn good<T>(s: &str, expected: T)
+where
+ T: FromStr + PartialEq + fmt::Debug,
+ T::Err: fmt::Display,
+{
+ let c = match T::from_str(s) {
+ Ok(c) => c,
+ Err(e) => panic!("failed to parse `{}`: {}", s, e),
+ };
+ assert_eq!(c, expected);
+}
+
+fn bad<T>(s: &str, err: &str)
+where
+ T: FromStr + fmt::Display,
+ T::Err: fmt::Display,
+{
+ let e = match T::from_str(s) {
+ Ok(cfg) => panic!("expected `{}` to not parse but got {}", s, cfg),
+ Err(e) => e.to_string(),
+ };
+ assert!(
+ e.contains(err),
+ "when parsing `{}`,\n\"{}\" not contained \
+ inside: {}",
+ s,
+ err,
+ e
+ );
+}
+
+#[test]
+fn cfg_syntax() {
+ good("foo", c!(foo));
+ good("_bar", c!(_bar));
+ good(" foo", c!(foo));
+ good(" foo ", c!(foo));
+ good(" foo = \"bar\"", c!(foo = "bar"));
+ good("foo=\"\"", c!(foo = ""));
+ good(" foo=\"3\" ", c!(foo = "3"));
+ good("foo = \"3 e\"", c!(foo = "3 e"));
+}
+
+#[test]
+fn cfg_syntax_bad() {
+ bad::<Cfg>("", "but cfg expression ended");
+ bad::<Cfg>(" ", "but cfg expression ended");
+ bad::<Cfg>("\t", "unexpected character");
+ bad::<Cfg>("7", "unexpected character");
+ bad::<Cfg>("=", "expected identifier");
+ bad::<Cfg>(",", "expected identifier");
+ bad::<Cfg>("(", "expected identifier");
+ bad::<Cfg>("foo (", "unexpected content `(` found after cfg expression");
+ bad::<Cfg>("bar =", "expected a string");
+ bad::<Cfg>("bar = \"", "unterminated string");
+ bad::<Cfg>(
+ "foo, bar",
+ "unexpected content `, bar` found after cfg expression",
+ );
+}
+
+#[test]
+fn cfg_expr() {
+ good("foo", e!(foo));
+ good("_bar", e!(_bar));
+ good(" foo", e!(foo));
+ good(" foo ", e!(foo));
+ good(" foo = \"bar\"", e!(foo = "bar"));
+ good("foo=\"\"", e!(foo = ""));
+ good(" foo=\"3\" ", e!(foo = "3"));
+ good("foo = \"3 e\"", e!(foo = "3 e"));
+
+ good("all()", e!(all()));
+ good("all(a)", e!(all(a)));
+ good("all(a, b)", e!(all(a, b)));
+ good("all(a, )", e!(all(a)));
+ good("not(a = \"b\")", e!(not(a = "b")));
+ good("not(all(a))", e!(not(all(a))));
+}
+
+#[test]
+fn cfg_expr_bad() {
+ bad::<CfgExpr>(" ", "but cfg expression ended");
+ bad::<CfgExpr>(" all", "expected `(`");
+ bad::<CfgExpr>("all(a", "expected `)`");
+ bad::<CfgExpr>("not", "expected `(`");
+ bad::<CfgExpr>("not(a", "expected `)`");
+ bad::<CfgExpr>("a = ", "expected a string");
+ bad::<CfgExpr>("all(not())", "expected identifier");
+ bad::<CfgExpr>(
+ "foo(a)",
+ "unexpected content `(a)` found after cfg expression",
+ );
+}
+
+#[test]
+fn cfg_matches() {
+ assert!(e!(foo).matches(&[c!(bar), c!(foo), c!(baz)]));
+ assert!(e!(any(foo)).matches(&[c!(bar), c!(foo), c!(baz)]));
+ assert!(e!(any(foo, bar)).matches(&[c!(bar)]));
+ assert!(e!(any(foo, bar)).matches(&[c!(foo)]));
+ assert!(e!(all(foo, bar)).matches(&[c!(foo), c!(bar)]));
+ assert!(e!(all(foo, bar)).matches(&[c!(foo), c!(bar)]));
+ assert!(e!(not(foo)).matches(&[c!(bar)]));
+ assert!(e!(not(foo)).matches(&[]));
+ assert!(e!(any((not(foo)), (all(foo, bar)))).matches(&[c!(bar)]));
+ assert!(e!(any((not(foo)), (all(foo, bar)))).matches(&[c!(foo), c!(bar)]));
+
+ assert!(!e!(foo).matches(&[]));
+ assert!(!e!(foo).matches(&[c!(bar)]));
+ assert!(!e!(foo).matches(&[c!(fo)]));
+ assert!(!e!(any(foo)).matches(&[]));
+ assert!(!e!(any(foo)).matches(&[c!(bar)]));
+ assert!(!e!(any(foo)).matches(&[c!(bar), c!(baz)]));
+ assert!(!e!(all(foo)).matches(&[c!(bar), c!(baz)]));
+ assert!(!e!(all(foo, bar)).matches(&[c!(bar)]));
+ assert!(!e!(all(foo, bar)).matches(&[c!(foo)]));
+ assert!(!e!(all(foo, bar)).matches(&[]));
+ assert!(!e!(not(bar)).matches(&[c!(bar)]));
+ assert!(!e!(not(bar)).matches(&[c!(baz), c!(bar)]));
+ assert!(!e!(any((not(foo)), (all(foo, bar)))).matches(&[c!(foo)]));
+}
+
+#[test]
+fn bad_target_name() {
+ bad::<Platform>(
+ "any(cfg(unix), cfg(windows))",
+ "failed to parse `any(cfg(unix), cfg(windows))` as a cfg expression: \
+ invalid target specifier: unexpected `(` character, \
+ cfg expressions must start with `cfg(`",
+ );
+ bad::<Platform>(
+ "!foo",
+ "failed to parse `!foo` as a cfg expression: \
+ invalid target specifier: unexpected character ! in target name",
+ );
+}
+
+#[test]
+fn round_trip_platform() {
+ fn rt(s: &str) {
+ let p = Platform::from_str(s).unwrap();
+ let s2 = p.to_string();
+ let p2 = Platform::from_str(&s2).unwrap();
+ assert_eq!(p, p2);
+ }
+ rt("x86_64-apple-darwin");
+ rt("foo");
+ rt("cfg(windows)");
+ rt("cfg(target_os = \"windows\")");
+ rt(
+ "cfg(any(all(any(target_os = \"android\", target_os = \"linux\"), \
+ any(target_arch = \"aarch64\", target_arch = \"arm\", target_arch = \"powerpc64\", \
+ target_arch = \"x86\", target_arch = \"x86_64\")), \
+ all(target_os = \"freebsd\", target_arch = \"x86_64\")))",
+ );
+}
+
+#[test]
+fn check_cfg_attributes() {
+ fn ok(s: &str) {
+ let p = Platform::Cfg(s.parse().unwrap());
+ let mut warnings = Vec::new();
+ p.check_cfg_attributes(&mut warnings);
+ assert!(
+ warnings.is_empty(),
+ "Expected no warnings but got: {:?}",
+ warnings,
+ );
+ }
+
+ fn warn(s: &str, names: &[&str]) {
+ let p = Platform::Cfg(s.parse().unwrap());
+ let mut warnings = Vec::new();
+ p.check_cfg_attributes(&mut warnings);
+ assert_eq!(
+ warnings.len(),
+ names.len(),
+ "Expecter warnings about {:?} but got {:?}",
+ names,
+ warnings,
+ );
+ for (name, warning) in names.iter().zip(warnings.iter()) {
+ assert!(
+ warning.contains(name),
+ "Expected warning about '{}' but got: {}",
+ name,
+ warning,
+ );
+ }
+ }
+
+ ok("unix");
+ ok("windows");
+ ok("any(not(unix), windows)");
+ ok("foo");
+
+ ok("target_arch = \"abc\"");
+ ok("target_feature = \"abc\"");
+ ok("target_os = \"abc\"");
+ ok("target_family = \"abc\"");
+ ok("target_env = \"abc\"");
+ ok("target_endian = \"abc\"");
+ ok("target_pointer_width = \"abc\"");
+ ok("target_vendor = \"abc\"");
+ ok("bar = \"def\"");
+
+ warn("test", &["test"]);
+ warn("debug_assertions", &["debug_assertions"]);
+ warn("proc_macro", &["proc_macro"]);
+ warn("feature = \"abc\"", &["feature"]);
+
+ warn("any(not(debug_assertions), windows)", &["debug_assertions"]);
+ warn(
+ "any(not(feature = \"def\"), target_arch = \"abc\")",
+ &["feature"],
+ );
+ warn(
+ "any(not(target_os = \"windows\"), proc_macro)",
+ &["proc_macro"],
+ );
+ warn(
+ "any(not(feature = \"windows\"), proc_macro)",
+ &["feature", "proc_macro"],
+ );
+ warn(
+ "all(not(debug_assertions), any(windows, proc_macro))",
+ &["debug_assertions", "proc_macro"],
+ );
+}
diff --git a/crates/cargo-test-macro/Cargo.toml b/crates/cargo-test-macro/Cargo.toml
new file mode 100644
index 0000000..04dafc0
--- /dev/null
+++ b/crates/cargo-test-macro/Cargo.toml
@@ -0,0 +1,12 @@
+[package]
+name = "cargo-test-macro"
+version = "0.1.0"
+edition = "2021"
+license = "MIT OR Apache-2.0"
+homepage = "https://github.com/rust-lang/cargo"
+repository = "https://github.com/rust-lang/cargo"
+documentation = "https://github.com/rust-lang/cargo"
+description = "Helper proc-macro for Cargo's testsuite."
+
+[lib]
+proc-macro = true
diff --git a/crates/cargo-test-macro/src/lib.rs b/crates/cargo-test-macro/src/lib.rs
new file mode 100644
index 0000000..aa06f47
--- /dev/null
+++ b/crates/cargo-test-macro/src/lib.rs
@@ -0,0 +1,245 @@
+extern crate proc_macro;
+
+use proc_macro::*;
+use std::process::Command;
+use std::sync::Once;
+
+#[proc_macro_attribute]
+pub fn cargo_test(attr: TokenStream, item: TokenStream) -> TokenStream {
+ // Ideally these options would be embedded in the test itself. However, I
+ // find it very helpful to have the test clearly state whether or not it
+ // is ignored. It would be nice to have some kind of runtime ignore
+ // support (such as
+ // https://internals.rust-lang.org/t/pre-rfc-skippable-tests/14611).
+ //
+ // Unfortunately a big drawback here is that if the environment changes
+ // (such as the existence of the `git` CLI), this will not trigger a
+ // rebuild and the test will still be ignored. In theory, something like
+ // `tracked_env` or `tracked_path`
+ // (https://github.com/rust-lang/rust/issues/99515) could help with this,
+ // but they don't really handle the absence of files well.
+ let mut ignore = false;
+ let mut requires_reason = false;
+ let mut explicit_reason = None;
+ let mut implicit_reasons = Vec::new();
+ macro_rules! set_ignore {
+ ($predicate:expr, $($arg:tt)*) => {
+ let p = $predicate;
+ ignore |= p;
+ if p {
+ implicit_reasons.push(std::fmt::format(format_args!($($arg)*)));
+ }
+ };
+ }
+ let is_not_nightly = !version().1;
+ for rule in split_rules(attr) {
+ match rule.as_str() {
+ "build_std_real" => {
+ // Only run the "real" build-std tests on nightly and with an
+ // explicit opt-in (these generally only work on linux, and
+ // have some extra requirements, and are slow, and can pollute
+ // the environment since it downloads dependencies).
+ set_ignore!(is_not_nightly, "requires nightly");
+ set_ignore!(
+ option_env!("CARGO_RUN_BUILD_STD_TESTS").is_none(),
+ "CARGO_RUN_BUILD_STD_TESTS must be set"
+ );
+ }
+ "build_std_mock" => {
+ // Only run the "mock" build-std tests on nightly and disable
+ // for windows-gnu which is missing object files (see
+ // https://github.com/rust-lang/wg-cargo-std-aware/issues/46).
+ set_ignore!(is_not_nightly, "requires nightly");
+ set_ignore!(
+ cfg!(all(target_os = "windows", target_env = "gnu")),
+ "does not work on windows-gnu"
+ );
+ }
+ "container_test" => {
+ // These tests must be opt-in because they require docker.
+ set_ignore!(
+ option_env!("CARGO_CONTAINER_TESTS").is_none(),
+ "CARGO_CONTAINER_TESTS must be set"
+ );
+ }
+ "public_network_test" => {
+ // These tests must be opt-in because they touch the public
+ // network. The use of these should be **EXTREMELY RARE**, and
+ // should only touch things which would nearly certainly work
+ // in CI (like github.com).
+ set_ignore!(
+ option_env!("CARGO_PUBLIC_NETWORK_TESTS").is_none(),
+ "CARGO_PUBLIC_NETWORK_TESTS must be set"
+ );
+ }
+ "nightly" => {
+ requires_reason = true;
+ set_ignore!(is_not_nightly, "requires nightly");
+ }
+ s if s.starts_with("requires_") => {
+ let command = &s[9..];
+ set_ignore!(!has_command(command), "{command} not installed");
+ }
+ s if s.starts_with(">=1.") => {
+ requires_reason = true;
+ let min_minor = s[4..].parse().unwrap();
+ let minor = version().0;
+ set_ignore!(minor < min_minor, "requires rustc 1.{minor} or newer");
+ }
+ s if s.starts_with("reason=") => {
+ explicit_reason = Some(s[7..].parse().unwrap());
+ }
+ s if s.starts_with("ignore_windows=") => {
+ set_ignore!(cfg!(windows), "{}", &s[16..s.len() - 1]);
+ }
+ _ => panic!("unknown rule {:?}", rule),
+ }
+ }
+ if requires_reason && explicit_reason.is_none() {
+ panic!(
+ "#[cargo_test] with a rule also requires a reason, \
+ such as #[cargo_test(nightly, reason = \"needs -Z unstable-thing\")]"
+ );
+ }
+
+ // Construct the appropriate attributes.
+ let span = Span::call_site();
+ let mut ret = TokenStream::new();
+ let add_attr = |ret: &mut TokenStream, attr_name, attr_input| {
+ ret.extend(Some(TokenTree::from(Punct::new('#', Spacing::Alone))));
+ let attr = TokenTree::from(Ident::new(attr_name, span));
+ let mut attr_stream: TokenStream = attr.into();
+ if let Some(input) = attr_input {
+ attr_stream.extend(input);
+ }
+ ret.extend(Some(TokenTree::from(Group::new(
+ Delimiter::Bracket,
+ attr_stream,
+ ))));
+ };
+ add_attr(&mut ret, "test", None);
+ if ignore {
+ let reason = explicit_reason
+ .or_else(|| {
+ (!implicit_reasons.is_empty())
+ .then(|| TokenTree::from(Literal::string(&implicit_reasons.join(", "))).into())
+ })
+ .map(|reason: TokenStream| {
+ let mut stream = TokenStream::new();
+ stream.extend(Some(TokenTree::from(Punct::new('=', Spacing::Alone))));
+ stream.extend(Some(reason));
+ stream
+ });
+ add_attr(&mut ret, "ignore", reason);
+ }
+
+ // Find where the function body starts, and add the boilerplate at the start.
+ for token in item {
+ let group = match token {
+ TokenTree::Group(g) => {
+ if g.delimiter() == Delimiter::Brace {
+ g
+ } else {
+ ret.extend(Some(TokenTree::Group(g)));
+ continue;
+ }
+ }
+ other => {
+ ret.extend(Some(other));
+ continue;
+ }
+ };
+
+ let mut new_body = to_token_stream(
+ r#"let _test_guard = {
+ let tmp_dir = option_env!("CARGO_TARGET_TMPDIR");
+ cargo_test_support::paths::init_root(tmp_dir)
+ };"#,
+ );
+
+ new_body.extend(group.stream());
+ ret.extend(Some(TokenTree::from(Group::new(
+ group.delimiter(),
+ new_body,
+ ))));
+ }
+
+ ret
+}
+
+fn split_rules(t: TokenStream) -> Vec<String> {
+ let tts: Vec<_> = t.into_iter().collect();
+ tts.split(|tt| match tt {
+ TokenTree::Punct(p) => p.as_char() == ',',
+ _ => false,
+ })
+ .filter(|parts| !parts.is_empty())
+ .map(|parts| {
+ parts
+ .into_iter()
+ .map(|part| part.to_string())
+ .collect::<String>()
+ })
+ .collect()
+}
+
+fn to_token_stream(code: &str) -> TokenStream {
+ code.parse().unwrap()
+}
+
+static mut VERSION: (u32, bool) = (0, false);
+
+fn version() -> &'static (u32, bool) {
+ static INIT: Once = Once::new();
+ INIT.call_once(|| {
+ let output = Command::new("rustc")
+ .arg("-V")
+ .output()
+ .expect("rustc should run");
+ let stdout = std::str::from_utf8(&output.stdout).expect("utf8");
+ let vers = stdout.split_whitespace().skip(1).next().unwrap();
+ let is_nightly = option_env!("CARGO_TEST_DISABLE_NIGHTLY").is_none()
+ && (vers.contains("-nightly") || vers.contains("-dev"));
+ let minor = vers.split('.').skip(1).next().unwrap().parse().unwrap();
+ unsafe { VERSION = (minor, is_nightly) }
+ });
+ unsafe { &VERSION }
+}
+
+fn has_command(command: &str) -> bool {
+ let output = match Command::new(command).arg("--version").output() {
+ Ok(output) => output,
+ Err(e) => {
+ // hg is not installed on GitHub macOS or certain constrained
+ // environments like Docker. Consider installing it if Cargo gains
+ // more hg support, but otherwise it isn't critical.
+ if is_ci() && command != "hg" {
+ panic!(
+ "expected command `{}` to be somewhere in PATH: {}",
+ command, e
+ );
+ }
+ return false;
+ }
+ };
+ if !output.status.success() {
+ panic!(
+ "expected command `{}` to be runnable, got error {}:\n\
+ stderr:{}\n\
+ stdout:{}\n",
+ command,
+ output.status,
+ String::from_utf8_lossy(&output.stderr),
+ String::from_utf8_lossy(&output.stdout)
+ );
+ }
+ true
+}
+
+/// Whether or not this running in a Continuous Integration environment.
+fn is_ci() -> bool {
+ // Consider using `tracked_env` instead of option_env! when it is stabilized.
+ // `tracked_env` will handle changes, but not require rebuilding the macro
+ // itself like option_env does.
+ option_env!("CI").is_some() || option_env!("TF_BUILD").is_some()
+}
diff --git a/crates/cargo-test-support/Cargo.toml b/crates/cargo-test-support/Cargo.toml
new file mode 100644
index 0000000..6c4b251
--- /dev/null
+++ b/crates/cargo-test-support/Cargo.toml
@@ -0,0 +1,32 @@
+[package]
+name = "cargo-test-support"
+version = "0.1.0"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+
+[lib]
+doctest = false
+
+[dependencies]
+anyhow = "1.0.34"
+cargo-test-macro = { path = "../cargo-test-macro" }
+cargo-util = { path = "../cargo-util" }
+crates-io = { path = "../crates-io" }
+filetime = "0.2"
+flate2 = { version = "1.0", default-features = false, features = ["zlib"] }
+git2 = "0.16.0"
+glob = "0.3"
+itertools = "0.10.0"
+lazy_static = "1.0"
+pasetors = { version = "0.6.4", features = ["v3", "paserk", "std", "serde"] }
+serde = { version = "1.0.123", features = ["derive"] }
+serde_json = "1.0"
+snapbox = { version = "0.4.0", features = ["diff", "path"] }
+tar = { version = "0.4.38", default-features = false }
+termcolor = "1.1.2"
+time = { version = "0.3", features = ["parsing", "formatting"]}
+toml = "0.7.0"
+url = "2.2.2"
+
+[target.'cfg(windows)'.dependencies]
+windows-sys = { version = "0.45.0", features = ["Win32_Storage_FileSystem"] }
diff --git a/crates/cargo-test-support/build.rs b/crates/cargo-test-support/build.rs
new file mode 100644
index 0000000..478da7d
--- /dev/null
+++ b/crates/cargo-test-support/build.rs
@@ -0,0 +1,7 @@
+fn main() {
+ println!(
+ "cargo:rustc-env=NATIVE_ARCH={}",
+ std::env::var("TARGET").unwrap()
+ );
+ println!("cargo:rerun-if-changed=build.rs");
+}
diff --git a/crates/cargo-test-support/containers/apache/Dockerfile b/crates/cargo-test-support/containers/apache/Dockerfile
new file mode 100644
index 0000000..8726024
--- /dev/null
+++ b/crates/cargo-test-support/containers/apache/Dockerfile
@@ -0,0 +1,26 @@
+FROM httpd:2.4-alpine
+
+RUN apk add --no-cache git git-daemon openssl
+
+COPY bar /repos/bar
+WORKDIR /repos/bar
+RUN git config --global user.email "testuser@example.com" &&\
+ git config --global user.name "Test User" &&\
+ git init -b master . &&\
+ git add Cargo.toml src &&\
+ git commit -m "Initial commit" &&\
+ mv .git ../bar.git &&\
+ cd ../bar.git &&\
+ git config --bool core.bare true &&\
+ rm -rf ../bar
+WORKDIR /
+
+EXPOSE 443
+
+WORKDIR /usr/local/apache2/conf
+COPY httpd-cargo.conf .
+RUN cat httpd-cargo.conf >> httpd.conf
+RUN openssl req -x509 -nodes -days 3650 -newkey rsa:2048 \
+ -keyout server.key -out server.crt \
+ -subj "/emailAddress=webmaster@example.com/C=US/ST=California/L=San Francisco/O=Rust/OU=Cargo/CN=127.0.0.1"
+WORKDIR /
diff --git a/crates/cargo-test-support/containers/apache/bar/Cargo.toml b/crates/cargo-test-support/containers/apache/bar/Cargo.toml
new file mode 100644
index 0000000..84fd5d8
--- /dev/null
+++ b/crates/cargo-test-support/containers/apache/bar/Cargo.toml
@@ -0,0 +1,4 @@
+[package]
+name = "bar"
+version = "1.0.0"
+edition = "2021"
diff --git a/crates/cargo-test-support/containers/apache/bar/src/lib.rs b/crates/cargo-test-support/containers/apache/bar/src/lib.rs
new file mode 100644
index 0000000..ca74e3a
--- /dev/null
+++ b/crates/cargo-test-support/containers/apache/bar/src/lib.rs
@@ -0,0 +1 @@
+// Intentionally blank.
diff --git a/crates/cargo-test-support/containers/apache/httpd-cargo.conf b/crates/cargo-test-support/containers/apache/httpd-cargo.conf
new file mode 100644
index 0000000..a4ba7d5
--- /dev/null
+++ b/crates/cargo-test-support/containers/apache/httpd-cargo.conf
@@ -0,0 +1,12 @@
+SetEnv GIT_PROJECT_ROOT /repos
+SetEnv GIT_HTTP_EXPORT_ALL
+ScriptAlias /repos /usr/libexec/git-core/git-http-backend/
+LoadModule cgid_module modules/mod_cgid.so
+
+<Files "git-http-backend">
+ Require all granted
+</Files>
+
+Include conf/extra/httpd-ssl.conf
+LoadModule ssl_module modules/mod_ssl.so
+LoadModule socache_shmcb_module modules/mod_socache_shmcb.so
diff --git a/crates/cargo-test-support/containers/sshd/Dockerfile b/crates/cargo-test-support/containers/sshd/Dockerfile
new file mode 100644
index 0000000..b52eefb
--- /dev/null
+++ b/crates/cargo-test-support/containers/sshd/Dockerfile
@@ -0,0 +1,29 @@
+FROM alpine:3.17
+
+RUN apk add --no-cache openssh git
+RUN ssh-keygen -A
+
+RUN addgroup -S testuser && adduser -S testuser -G testuser -s /bin/ash
+# NOTE: Ideally the password should be set to *, but I am uncertain how to do
+# that in alpine. It shouldn't matter since PermitEmptyPasswords is "no".
+RUN passwd -u testuser
+
+RUN mkdir /repos && chown testuser /repos
+COPY --chown=testuser:testuser bar /repos/bar
+USER testuser
+WORKDIR /repos/bar
+RUN git config --global user.email "testuser@example.com" &&\
+ git config --global user.name "Test User" &&\
+ git init -b master . &&\
+ git add Cargo.toml src &&\
+ git commit -m "Initial commit" &&\
+ mv .git ../bar.git &&\
+ cd ../bar.git &&\
+ git config --bool core.bare true &&\
+ rm -rf ../bar
+WORKDIR /
+USER root
+
+EXPOSE 22
+
+ENTRYPOINT ["/usr/sbin/sshd", "-D", "-E", "/var/log/auth.log"]
diff --git a/crates/cargo-test-support/containers/sshd/bar/Cargo.toml b/crates/cargo-test-support/containers/sshd/bar/Cargo.toml
new file mode 100644
index 0000000..84fd5d8
--- /dev/null
+++ b/crates/cargo-test-support/containers/sshd/bar/Cargo.toml
@@ -0,0 +1,4 @@
+[package]
+name = "bar"
+version = "1.0.0"
+edition = "2021"
diff --git a/crates/cargo-test-support/containers/sshd/bar/src/lib.rs b/crates/cargo-test-support/containers/sshd/bar/src/lib.rs
new file mode 100644
index 0000000..ca74e3a
--- /dev/null
+++ b/crates/cargo-test-support/containers/sshd/bar/src/lib.rs
@@ -0,0 +1 @@
+// Intentionally blank.
diff --git a/crates/cargo-test-support/src/compare.rs b/crates/cargo-test-support/src/compare.rs
new file mode 100644
index 0000000..da1d099
--- /dev/null
+++ b/crates/cargo-test-support/src/compare.rs
@@ -0,0 +1,781 @@
+//! Routines for comparing and diffing output.
+//!
+//! # Patterns
+//!
+//! Many of these functions support special markup to assist with comparing
+//! text that may vary or is otherwise uninteresting for the test at hand. The
+//! supported patterns are:
+//!
+//! - `[..]` is a wildcard that matches 0 or more characters on the same line
+//! (similar to `.*` in a regex). It is non-greedy.
+//! - `[EXE]` optionally adds `.exe` on Windows (empty string on other
+//! platforms).
+//! - `[ROOT]` is the path to the test directory's root.
+//! - `[CWD]` is the working directory of the process that was run.
+//! - There is a wide range of substitutions (such as `[COMPILING]` or
+//! `[WARNING]`) to match cargo's "status" output and allows you to ignore
+//! the alignment. See the source of `substitute_macros` for a complete list
+//! of substitutions.
+//! - `[DIRTY-MSVC]` (only when the line starts with it) would be replaced by
+//! `[DIRTY]` when `cfg(target_env = "msvc")` or the line will be ignored otherwise.
+//! Tests that work around [issue 7358](https://github.com/rust-lang/cargo/issues/7358)
+//! can use this to avoid duplicating the `with_stderr` call like:
+//! `if cfg!(target_env = "msvc") {e.with_stderr("...[DIRTY]...");} else {e.with_stderr("...");}`.
+//!
+//! # Normalization
+//!
+//! In addition to the patterns described above, the strings are normalized
+//! in such a way to avoid unwanted differences. The normalizations are:
+//!
+//! - Raw tab characters are converted to the string `<tab>`. This is helpful
+//! so that raw tabs do not need to be written in the expected string, and
+//! to avoid confusion of tabs vs spaces.
+//! - Backslashes are converted to forward slashes to deal with Windows paths.
+//! This helps so that all tests can be written assuming forward slashes.
+//! Other heuristics are applied to try to ensure Windows-style paths aren't
+//! a problem.
+//! - Carriage returns are removed, which can help when running on Windows.
+
+use crate::diff;
+use crate::paths;
+use anyhow::{bail, Context, Result};
+use serde_json::Value;
+use std::env;
+use std::fmt;
+use std::path::Path;
+use std::str;
+use url::Url;
+
+/// Default `snapbox` Assertions
+///
+/// # Snapshots
+///
+/// Updating of snapshots is controlled with the `SNAPSHOTS` environment variable:
+///
+/// - `skip`: do not run the tests
+/// - `ignore`: run the tests but ignore their failure
+/// - `verify`: run the tests
+/// - `overwrite`: update the snapshots based on the output of the tests
+///
+/// # Patterns
+///
+/// - `[..]` is a character wildcard, stopping at line breaks
+/// - `\n...\n` is a multi-line wildcard
+/// - `[EXE]` matches the exe suffix for the current platform
+/// - `[ROOT]` matches [`paths::root()`][crate::paths::root]
+/// - `[ROOTURL]` matches [`paths::root()`][crate::paths::root] as a URL
+///
+/// # Normalization
+///
+/// In addition to the patterns described above, text is normalized
+/// in such a way to avoid unwanted differences. The normalizations are:
+///
+/// - Backslashes are converted to forward slashes to deal with Windows paths.
+/// This helps so that all tests can be written assuming forward slashes.
+/// Other heuristics are applied to try to ensure Windows-style paths aren't
+/// a problem.
+/// - Carriage returns are removed, which can help when running on Windows.
+pub fn assert_ui() -> snapbox::Assert {
+ let root = paths::root();
+ // Use `from_file_path` instead of `from_dir_path` so the trailing slash is
+ // put in the users output, rather than hidden in the variable
+ let root_url = url::Url::from_file_path(&root).unwrap().to_string();
+ let root = root.display().to_string();
+
+ let mut subs = snapbox::Substitutions::new();
+ subs.extend([
+ (
+ "[EXE]",
+ std::borrow::Cow::Borrowed(std::env::consts::EXE_SUFFIX),
+ ),
+ ("[ROOT]", std::borrow::Cow::Owned(root)),
+ ("[ROOTURL]", std::borrow::Cow::Owned(root_url)),
+ ])
+ .unwrap();
+ snapbox::Assert::new()
+ .action_env(snapbox::DEFAULT_ACTION_ENV)
+ .substitutions(subs)
+}
+
+/// Normalizes the output so that it can be compared against the expected value.
+fn normalize_actual(actual: &str, cwd: Option<&Path>) -> String {
+ // It's easier to read tabs in outputs if they don't show up as literal
+ // hidden characters
+ let actual = actual.replace('\t', "<tab>");
+ if cfg!(windows) {
+ // Let's not deal with \r\n vs \n on windows...
+ let actual = actual.replace('\r', "");
+ normalize_windows(&actual, cwd)
+ } else {
+ actual
+ }
+}
+
+/// Normalizes the expected string so that it can be compared against the actual output.
+fn normalize_expected(expected: &str, cwd: Option<&Path>) -> String {
+ let expected = replace_dirty_msvc(expected);
+ let expected = substitute_macros(&expected);
+
+ if cfg!(windows) {
+ normalize_windows(&expected, cwd)
+ } else {
+ let expected = match cwd {
+ None => expected,
+ Some(cwd) => expected.replace("[CWD]", &cwd.display().to_string()),
+ };
+ let expected = expected.replace("[ROOT]", &paths::root().display().to_string());
+ expected
+ }
+}
+
+fn replace_dirty_msvc_impl(s: &str, is_msvc: bool) -> String {
+ if is_msvc {
+ s.replace("[DIRTY-MSVC]", "[DIRTY]")
+ } else {
+ use itertools::Itertools;
+
+ let mut new = s
+ .lines()
+ .filter(|it| !it.starts_with("[DIRTY-MSVC]"))
+ .join("\n");
+
+ if s.ends_with("\n") {
+ new.push_str("\n");
+ }
+
+ new
+ }
+}
+
+fn replace_dirty_msvc(s: &str) -> String {
+ replace_dirty_msvc_impl(s, cfg!(target_env = "msvc"))
+}
+
+/// Normalizes text for both actual and expected strings on Windows.
+fn normalize_windows(text: &str, cwd: Option<&Path>) -> String {
+ // Let's not deal with / vs \ (windows...)
+ let text = text.replace('\\', "/");
+
+ // Weirdness for paths on Windows extends beyond `/` vs `\` apparently.
+ // Namely paths like `c:\` and `C:\` are equivalent and that can cause
+ // issues. The return value of `env::current_dir()` may return a
+ // lowercase drive name, but we round-trip a lot of values through `Url`
+ // which will auto-uppercase the drive name. To just ignore this
+ // distinction we try to canonicalize as much as possible, taking all
+ // forms of a path and canonicalizing them to one.
+ let replace_path = |s: &str, path: &Path, with: &str| {
+ let path_through_url = Url::from_file_path(path).unwrap().to_file_path().unwrap();
+ let path1 = path.display().to_string().replace('\\', "/");
+ let path2 = path_through_url.display().to_string().replace('\\', "/");
+ s.replace(&path1, with)
+ .replace(&path2, with)
+ .replace(with, &path1)
+ };
+
+ let text = match cwd {
+ None => text,
+ Some(p) => replace_path(&text, p, "[CWD]"),
+ };
+
+ // Similar to cwd above, perform similar treatment to the root path
+ // which in theory all of our paths should otherwise get rooted at.
+ let root = paths::root();
+ let text = replace_path(&text, &root, "[ROOT]");
+
+ text
+}
+
+fn substitute_macros(input: &str) -> String {
+ let macros = [
+ ("[RUNNING]", " Running"),
+ ("[COMPILING]", " Compiling"),
+ ("[CHECKING]", " Checking"),
+ ("[COMPLETED]", " Completed"),
+ ("[CREATED]", " Created"),
+ ("[FINISHED]", " Finished"),
+ ("[ERROR]", "error:"),
+ ("[WARNING]", "warning:"),
+ ("[NOTE]", "note:"),
+ ("[HELP]", "help:"),
+ ("[DOCUMENTING]", " Documenting"),
+ ("[SCRAPING]", " Scraping"),
+ ("[FRESH]", " Fresh"),
+ ("[DIRTY]", " Dirty"),
+ ("[UPDATING]", " Updating"),
+ ("[ADDING]", " Adding"),
+ ("[REMOVING]", " Removing"),
+ ("[DOCTEST]", " Doc-tests"),
+ ("[PACKAGING]", " Packaging"),
+ ("[PACKAGED]", " Packaged"),
+ ("[DOWNLOADING]", " Downloading"),
+ ("[DOWNLOADED]", " Downloaded"),
+ ("[UPLOADING]", " Uploading"),
+ ("[VERIFYING]", " Verifying"),
+ ("[ARCHIVING]", " Archiving"),
+ ("[INSTALLING]", " Installing"),
+ ("[REPLACING]", " Replacing"),
+ ("[UNPACKING]", " Unpacking"),
+ ("[SUMMARY]", " Summary"),
+ ("[FIXED]", " Fixed"),
+ ("[FIXING]", " Fixing"),
+ ("[EXE]", env::consts::EXE_SUFFIX),
+ ("[IGNORED]", " Ignored"),
+ ("[INSTALLED]", " Installed"),
+ ("[REPLACED]", " Replaced"),
+ ("[BUILDING]", " Building"),
+ ("[LOGIN]", " Login"),
+ ("[LOGOUT]", " Logout"),
+ ("[YANK]", " Yank"),
+ ("[OWNER]", " Owner"),
+ ("[MIGRATING]", " Migrating"),
+ ("[EXECUTABLE]", " Executable"),
+ ("[SKIPPING]", " Skipping"),
+ ("[WAITING]", " Waiting"),
+ ];
+ let mut result = input.to_owned();
+ for &(pat, subst) in &macros {
+ result = result.replace(pat, subst)
+ }
+ result
+}
+
+/// Compares one string against another, checking that they both match.
+///
+/// See [Patterns](index.html#patterns) for more information on pattern matching.
+///
+/// - `description` explains where the output is from (usually "stdout" or "stderr").
+/// - `other_output` is other output to display in the error (usually stdout or stderr).
+pub fn match_exact(
+ expected: &str,
+ actual: &str,
+ description: &str,
+ other_output: &str,
+ cwd: Option<&Path>,
+) -> Result<()> {
+ let expected = normalize_expected(expected, cwd);
+ let actual = normalize_actual(actual, cwd);
+ let e: Vec<_> = expected.lines().map(WildStr::new).collect();
+ let a: Vec<_> = actual.lines().map(WildStr::new).collect();
+ if e == a {
+ return Ok(());
+ }
+ let diff = diff::colored_diff(&e, &a);
+ bail!(
+ "{} did not match:\n\
+ {}\n\n\
+ other output:\n\
+ {}\n",
+ description,
+ diff,
+ other_output,
+ );
+}
+
+/// Convenience wrapper around [`match_exact`] which will panic on error.
+#[track_caller]
+pub fn assert_match_exact(expected: &str, actual: &str) {
+ if let Err(e) = match_exact(expected, actual, "", "", None) {
+ crate::panic_error("", e);
+ }
+}
+
+/// Checks that the given string contains the given lines, ignoring the order
+/// of the lines.
+///
+/// See [Patterns](index.html#patterns) for more information on pattern matching.
+pub fn match_unordered(expected: &str, actual: &str, cwd: Option<&Path>) -> Result<()> {
+ let expected = normalize_expected(expected, cwd);
+ let actual = normalize_actual(actual, cwd);
+ let e: Vec<_> = expected.lines().map(|line| WildStr::new(line)).collect();
+ let mut a: Vec<_> = actual.lines().map(|line| WildStr::new(line)).collect();
+ // match more-constrained lines first, although in theory we'll
+ // need some sort of recursive match here. This handles the case
+ // that you expect "a\n[..]b" and two lines are printed out,
+ // "ab\n"a", where technically we do match unordered but a naive
+ // search fails to find this. This simple sort at least gets the
+ // test suite to pass for now, but we may need to get more fancy
+ // if tests start failing again.
+ a.sort_by_key(|s| s.line.len());
+ let mut changes = Vec::new();
+ let mut a_index = 0;
+ let mut failure = false;
+
+ use crate::diff::Change;
+ for (e_i, e_line) in e.into_iter().enumerate() {
+ match a.iter().position(|a_line| e_line == *a_line) {
+ Some(index) => {
+ let a_line = a.remove(index);
+ changes.push(Change::Keep(e_i, index, a_line));
+ a_index += 1;
+ }
+ None => {
+ failure = true;
+ changes.push(Change::Remove(e_i, e_line));
+ }
+ }
+ }
+ for unmatched in a {
+ failure = true;
+ changes.push(Change::Add(a_index, unmatched));
+ a_index += 1;
+ }
+ if failure {
+ bail!(
+ "Expected lines did not match (ignoring order):\n{}\n",
+ diff::render_colored_changes(&changes)
+ );
+ } else {
+ Ok(())
+ }
+}
+
+/// Checks that the given string contains the given contiguous lines
+/// somewhere.
+///
+/// See [Patterns](index.html#patterns) for more information on pattern matching.
+pub fn match_contains(expected: &str, actual: &str, cwd: Option<&Path>) -> Result<()> {
+ let expected = normalize_expected(expected, cwd);
+ let actual = normalize_actual(actual, cwd);
+ let e: Vec<_> = expected.lines().map(|line| WildStr::new(line)).collect();
+ let a: Vec<_> = actual.lines().map(|line| WildStr::new(line)).collect();
+ if e.len() == 0 {
+ bail!("expected length must not be zero");
+ }
+ for window in a.windows(e.len()) {
+ if window == e {
+ return Ok(());
+ }
+ }
+ bail!(
+ "expected to find:\n\
+ {}\n\n\
+ did not find in output:\n\
+ {}",
+ expected,
+ actual
+ );
+}
+
+/// Checks that the given string does not contain the given contiguous lines
+/// anywhere.
+///
+/// See [Patterns](index.html#patterns) for more information on pattern matching.
+pub fn match_does_not_contain(expected: &str, actual: &str, cwd: Option<&Path>) -> Result<()> {
+ if match_contains(expected, actual, cwd).is_ok() {
+ bail!(
+ "expected not to find:\n\
+ {}\n\n\
+ but found in output:\n\
+ {}",
+ expected,
+ actual
+ );
+ } else {
+ Ok(())
+ }
+}
+
+/// Checks that the given string contains the given contiguous lines
+/// somewhere, and should be repeated `number` times.
+///
+/// See [Patterns](index.html#patterns) for more information on pattern matching.
+pub fn match_contains_n(
+ expected: &str,
+ number: usize,
+ actual: &str,
+ cwd: Option<&Path>,
+) -> Result<()> {
+ let expected = normalize_expected(expected, cwd);
+ let actual = normalize_actual(actual, cwd);
+ let e: Vec<_> = expected.lines().map(|line| WildStr::new(line)).collect();
+ let a: Vec<_> = actual.lines().map(|line| WildStr::new(line)).collect();
+ if e.len() == 0 {
+ bail!("expected length must not be zero");
+ }
+ let matches = a.windows(e.len()).filter(|window| *window == e).count();
+ if matches == number {
+ Ok(())
+ } else {
+ bail!(
+ "expected to find {} occurrences of:\n\
+ {}\n\n\
+ but found {} matches in the output:\n\
+ {}",
+ number,
+ expected,
+ matches,
+ actual
+ )
+ }
+}
+
+/// Checks that the given string has a line that contains the given patterns,
+/// and that line also does not contain the `without` patterns.
+///
+/// See [Patterns](index.html#patterns) for more information on pattern matching.
+///
+/// See [`crate::Execs::with_stderr_line_without`] for an example and cautions
+/// against using.
+pub fn match_with_without(
+ actual: &str,
+ with: &[String],
+ without: &[String],
+ cwd: Option<&Path>,
+) -> Result<()> {
+ let actual = normalize_actual(actual, cwd);
+ let norm = |s: &String| format!("[..]{}[..]", normalize_expected(s, cwd));
+ let with: Vec<_> = with.iter().map(norm).collect();
+ let without: Vec<_> = without.iter().map(norm).collect();
+ let with_wild: Vec<_> = with.iter().map(|w| WildStr::new(w)).collect();
+ let without_wild: Vec<_> = without.iter().map(|w| WildStr::new(w)).collect();
+
+ let matches: Vec<_> = actual
+ .lines()
+ .map(WildStr::new)
+ .filter(|line| with_wild.iter().all(|with| with == line))
+ .filter(|line| !without_wild.iter().any(|without| without == line))
+ .collect();
+ match matches.len() {
+ 0 => bail!(
+ "Could not find expected line in output.\n\
+ With contents: {:?}\n\
+ Without contents: {:?}\n\
+ Actual stderr:\n\
+ {}\n",
+ with,
+ without,
+ actual
+ ),
+ 1 => Ok(()),
+ _ => bail!(
+ "Found multiple matching lines, but only expected one.\n\
+ With contents: {:?}\n\
+ Without contents: {:?}\n\
+ Matching lines:\n\
+ {}\n",
+ with,
+ without,
+ itertools::join(matches, "\n")
+ ),
+ }
+}
+
+/// Checks that the given string of JSON objects match the given set of
+/// expected JSON objects.
+///
+/// See [`crate::Execs::with_json`] for more details.
+pub fn match_json(expected: &str, actual: &str, cwd: Option<&Path>) -> Result<()> {
+ let (exp_objs, act_objs) = collect_json_objects(expected, actual)?;
+ if exp_objs.len() != act_objs.len() {
+ bail!(
+ "expected {} json lines, got {}, stdout:\n{}",
+ exp_objs.len(),
+ act_objs.len(),
+ actual
+ );
+ }
+ for (exp_obj, act_obj) in exp_objs.iter().zip(act_objs) {
+ find_json_mismatch(exp_obj, &act_obj, cwd)?;
+ }
+ Ok(())
+}
+
+/// Checks that the given string of JSON objects match the given set of
+/// expected JSON objects, ignoring their order.
+///
+/// See [`crate::Execs::with_json_contains_unordered`] for more details and
+/// cautions when using.
+pub fn match_json_contains_unordered(
+ expected: &str,
+ actual: &str,
+ cwd: Option<&Path>,
+) -> Result<()> {
+ let (exp_objs, mut act_objs) = collect_json_objects(expected, actual)?;
+ for exp_obj in exp_objs {
+ match act_objs
+ .iter()
+ .position(|act_obj| find_json_mismatch(&exp_obj, act_obj, cwd).is_ok())
+ {
+ Some(index) => act_objs.remove(index),
+ None => {
+ bail!(
+ "Did not find expected JSON:\n\
+ {}\n\
+ Remaining available output:\n\
+ {}\n",
+ serde_json::to_string_pretty(&exp_obj).unwrap(),
+ itertools::join(
+ act_objs.iter().map(|o| serde_json::to_string(o).unwrap()),
+ "\n"
+ )
+ );
+ }
+ };
+ }
+ Ok(())
+}
+
+fn collect_json_objects(
+ expected: &str,
+ actual: &str,
+) -> Result<(Vec<serde_json::Value>, Vec<serde_json::Value>)> {
+ let expected_objs: Vec<_> = expected
+ .split("\n\n")
+ .map(|expect| {
+ expect
+ .parse()
+ .with_context(|| format!("failed to parse expected JSON object:\n{}", expect))
+ })
+ .collect::<Result<_>>()?;
+ let actual_objs: Vec<_> = actual
+ .lines()
+ .filter(|line| line.starts_with('{'))
+ .map(|line| {
+ line.parse()
+ .with_context(|| format!("failed to parse JSON object:\n{}", line))
+ })
+ .collect::<Result<_>>()?;
+ Ok((expected_objs, actual_objs))
+}
+
+/// Compares JSON object for approximate equality.
+/// You can use `[..]` wildcard in strings (useful for OS-dependent things such
+/// as paths). You can use a `"{...}"` string literal as a wildcard for
+/// arbitrary nested JSON (useful for parts of object emitted by other programs
+/// (e.g., rustc) rather than Cargo itself).
+pub fn find_json_mismatch(expected: &Value, actual: &Value, cwd: Option<&Path>) -> Result<()> {
+ match find_json_mismatch_r(expected, actual, cwd) {
+ Some((expected_part, actual_part)) => bail!(
+ "JSON mismatch\nExpected:\n{}\nWas:\n{}\nExpected part:\n{}\nActual part:\n{}\n",
+ serde_json::to_string_pretty(expected).unwrap(),
+ serde_json::to_string_pretty(&actual).unwrap(),
+ serde_json::to_string_pretty(expected_part).unwrap(),
+ serde_json::to_string_pretty(actual_part).unwrap(),
+ ),
+ None => Ok(()),
+ }
+}
+
+fn find_json_mismatch_r<'a>(
+ expected: &'a Value,
+ actual: &'a Value,
+ cwd: Option<&Path>,
+) -> Option<(&'a Value, &'a Value)> {
+ use serde_json::Value::*;
+ match (expected, actual) {
+ (&Number(ref l), &Number(ref r)) if l == r => None,
+ (&Bool(l), &Bool(r)) if l == r => None,
+ (&String(ref l), _) if l == "{...}" => None,
+ (&String(ref l), &String(ref r)) => {
+ if match_exact(l, r, "", "", cwd).is_err() {
+ Some((expected, actual))
+ } else {
+ None
+ }
+ }
+ (&Array(ref l), &Array(ref r)) => {
+ if l.len() != r.len() {
+ return Some((expected, actual));
+ }
+
+ l.iter()
+ .zip(r.iter())
+ .filter_map(|(l, r)| find_json_mismatch_r(l, r, cwd))
+ .next()
+ }
+ (&Object(ref l), &Object(ref r)) => {
+ let same_keys = l.len() == r.len() && l.keys().all(|k| r.contains_key(k));
+ if !same_keys {
+ return Some((expected, actual));
+ }
+
+ l.values()
+ .zip(r.values())
+ .filter_map(|(l, r)| find_json_mismatch_r(l, r, cwd))
+ .next()
+ }
+ (&Null, &Null) => None,
+ // Magic string literal `"{...}"` acts as wildcard for any sub-JSON.
+ _ => Some((expected, actual)),
+ }
+}
+
+/// A single line string that supports `[..]` wildcard matching.
+pub struct WildStr<'a> {
+ has_meta: bool,
+ line: &'a str,
+}
+
+impl<'a> WildStr<'a> {
+ pub fn new(line: &'a str) -> WildStr<'a> {
+ WildStr {
+ has_meta: line.contains("[..]"),
+ line,
+ }
+ }
+}
+
+impl<'a> PartialEq for WildStr<'a> {
+ fn eq(&self, other: &Self) -> bool {
+ match (self.has_meta, other.has_meta) {
+ (false, false) => self.line == other.line,
+ (true, false) => meta_cmp(self.line, other.line),
+ (false, true) => meta_cmp(other.line, self.line),
+ (true, true) => panic!("both lines cannot have [..]"),
+ }
+ }
+}
+
+fn meta_cmp(a: &str, mut b: &str) -> bool {
+ for (i, part) in a.split("[..]").enumerate() {
+ match b.find(part) {
+ Some(j) => {
+ if i == 0 && j != 0 {
+ return false;
+ }
+ b = &b[j + part.len()..];
+ }
+ None => return false,
+ }
+ }
+ b.is_empty() || a.ends_with("[..]")
+}
+
+impl fmt::Display for WildStr<'_> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.line)
+ }
+}
+
+impl fmt::Debug for WildStr<'_> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "{:?}", self.line)
+ }
+}
+
+#[test]
+fn wild_str_cmp() {
+ for (a, b) in &[
+ ("a b", "a b"),
+ ("a[..]b", "a b"),
+ ("a[..]", "a b"),
+ ("[..]", "a b"),
+ ("[..]b", "a b"),
+ ] {
+ assert_eq!(WildStr::new(a), WildStr::new(b));
+ }
+ for (a, b) in &[("[..]b", "c"), ("b", "c"), ("b", "cb")] {
+ assert_ne!(WildStr::new(a), WildStr::new(b));
+ }
+}
+
+#[test]
+fn dirty_msvc() {
+ let case = |expected: &str, wild: &str, msvc: bool| {
+ assert_eq!(expected, &replace_dirty_msvc_impl(wild, msvc));
+ };
+
+ // no replacements
+ case("aa", "aa", false);
+ case("aa", "aa", true);
+
+ // with replacements
+ case(
+ "\
+[DIRTY] a",
+ "\
+[DIRTY-MSVC] a",
+ true,
+ );
+ case(
+ "",
+ "\
+[DIRTY-MSVC] a",
+ false,
+ );
+ case(
+ "\
+[DIRTY] a
+[COMPILING] a",
+ "\
+[DIRTY-MSVC] a
+[COMPILING] a",
+ true,
+ );
+ case(
+ "\
+[COMPILING] a",
+ "\
+[DIRTY-MSVC] a
+[COMPILING] a",
+ false,
+ );
+
+ // test trailing newline behavior
+ case(
+ "\
+A
+B
+", "\
+A
+B
+", true,
+ );
+
+ case(
+ "\
+A
+B
+", "\
+A
+B
+", false,
+ );
+
+ case(
+ "\
+A
+B", "\
+A
+B", true,
+ );
+
+ case(
+ "\
+A
+B", "\
+A
+B", false,
+ );
+
+ case(
+ "\
+[DIRTY] a
+",
+ "\
+[DIRTY-MSVC] a
+",
+ true,
+ );
+ case(
+ "\n",
+ "\
+[DIRTY-MSVC] a
+",
+ false,
+ );
+
+ case(
+ "\
+[DIRTY] a",
+ "\
+[DIRTY-MSVC] a",
+ true,
+ );
+ case(
+ "",
+ "\
+[DIRTY-MSVC] a",
+ false,
+ );
+}
diff --git a/crates/cargo-test-support/src/containers.rs b/crates/cargo-test-support/src/containers.rs
new file mode 100644
index 0000000..17040d8
--- /dev/null
+++ b/crates/cargo-test-support/src/containers.rs
@@ -0,0 +1,285 @@
+//! Support for testing using Docker containers.
+//!
+//! The [`Container`] type is a builder for configuring a container to run.
+//! After you call `launch`, you can use the [`ContainerHandle`] to interact
+//! with the running container.
+//!
+//! Tests using containers must use `#[cargo_test(container_test)]` to disable
+//! them unless the CARGO_CONTAINER_TESTS environment variable is set.
+
+use cargo_util::ProcessBuilder;
+use std::collections::HashMap;
+use std::io::Read;
+use std::path::PathBuf;
+use std::process::Command;
+use std::sync::atomic::{AtomicUsize, Ordering};
+use std::sync::Mutex;
+use tar::Header;
+
+/// A builder for configuring a container to run.
+pub struct Container {
+ /// The host directory that forms the basis of the Docker image.
+ build_context: PathBuf,
+ /// Files to copy over to the image.
+ files: Vec<MkFile>,
+}
+
+/// A handle to a running container.
+///
+/// You can use this to interact with the container.
+pub struct ContainerHandle {
+ /// The name of the container.
+ name: String,
+ /// The IP address of the container.
+ ///
+ /// NOTE: This is currently unused, but may be useful so I left it in.
+ /// This can only be used on Linux. macOS and Windows docker doesn't allow
+ /// direct connection to the container.
+ pub ip_address: String,
+ /// Port mappings of container_port to host_port for ports exposed via EXPOSE.
+ pub port_mappings: HashMap<u16, u16>,
+}
+
+impl Container {
+ pub fn new(context_dir: &str) -> Container {
+ assert!(std::env::var_os("CARGO_CONTAINER_TESTS").is_some());
+ let mut build_context = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
+ build_context.push("containers");
+ build_context.push(context_dir);
+ Container {
+ build_context,
+ files: Vec::new(),
+ }
+ }
+
+ /// Adds a file to be copied into the container.
+ pub fn file(mut self, file: MkFile) -> Self {
+ self.files.push(file);
+ self
+ }
+
+ /// Starts the container.
+ pub fn launch(mut self) -> ContainerHandle {
+ static NEXT_ID: AtomicUsize = AtomicUsize::new(0);
+
+ let id = NEXT_ID.fetch_add(1, Ordering::SeqCst);
+ let name = format!("cargo_test_{id}");
+ remove_if_exists(&name);
+ self.create_container(&name);
+ self.copy_files(&name);
+ self.start_container(&name);
+ let info = self.container_inspect(&name);
+ let ip_address = if cfg!(target_os = "linux") {
+ info[0]["NetworkSettings"]["IPAddress"]
+ .as_str()
+ .unwrap()
+ .to_string()
+ } else {
+ // macOS and Windows can't make direct connections to the
+ // container. It only works through exposed ports or mapped ports.
+ "127.0.0.1".to_string()
+ };
+ let port_mappings = self.port_mappings(&info);
+ self.wait_till_ready(&port_mappings);
+
+ ContainerHandle {
+ name,
+ ip_address,
+ port_mappings,
+ }
+ }
+
+ fn create_container(&self, name: &str) {
+ static BUILD_LOCK: Mutex<()> = Mutex::new(());
+
+ let image_base = self.build_context.file_name().unwrap();
+ let image_name = format!("cargo-test-{}", image_base.to_str().unwrap());
+ let _lock = BUILD_LOCK.lock().unwrap();
+ ProcessBuilder::new("docker")
+ .args(&["build", "--tag", image_name.as_str()])
+ .arg(&self.build_context)
+ .exec_with_output()
+ .unwrap();
+
+ ProcessBuilder::new("docker")
+ .args(&[
+ "container",
+ "create",
+ "--publish-all",
+ "--rm",
+ "--name",
+ name,
+ ])
+ .arg(image_name)
+ .exec_with_output()
+ .unwrap();
+ }
+
+ fn copy_files(&mut self, name: &str) {
+ if self.files.is_empty() {
+ return;
+ }
+ let mut ar = tar::Builder::new(Vec::new());
+ let files = std::mem::replace(&mut self.files, Vec::new());
+ for mut file in files {
+ ar.append_data(&mut file.header, &file.path, file.contents.as_slice())
+ .unwrap();
+ }
+ let ar = ar.into_inner().unwrap();
+ ProcessBuilder::new("docker")
+ .args(&["cp", "-"])
+ .arg(format!("{name}:/"))
+ .stdin(ar)
+ .exec_with_output()
+ .unwrap();
+ }
+
+ fn start_container(&self, name: &str) {
+ ProcessBuilder::new("docker")
+ .args(&["container", "start"])
+ .arg(name)
+ .exec_with_output()
+ .unwrap();
+ }
+
+ fn container_inspect(&self, name: &str) -> serde_json::Value {
+ let output = ProcessBuilder::new("docker")
+ .args(&["inspect", name])
+ .exec_with_output()
+ .unwrap();
+ serde_json::from_slice(&output.stdout).unwrap()
+ }
+
+ /// Returns the mapping of container_port->host_port for ports that were
+ /// exposed with EXPOSE.
+ fn port_mappings(&self, info: &serde_json::Value) -> HashMap<u16, u16> {
+ info[0]["NetworkSettings"]["Ports"]
+ .as_object()
+ .unwrap()
+ .iter()
+ .map(|(key, value)| {
+ let key = key
+ .strip_suffix("/tcp")
+ .expect("expected TCP only ports")
+ .parse()
+ .unwrap();
+ let values = value.as_array().unwrap();
+ let value = values
+ .iter()
+ .find(|value| value["HostIp"].as_str().unwrap() == "0.0.0.0")
+ .expect("expected localhost IP");
+ let host_port = value["HostPort"].as_str().unwrap().parse().unwrap();
+ (key, host_port)
+ })
+ .collect()
+ }
+
+ fn wait_till_ready(&self, port_mappings: &HashMap<u16, u16>) {
+ for port in port_mappings.values() {
+ let mut ok = false;
+ for _ in 0..30 {
+ match std::net::TcpStream::connect(format!("127.0.0.1:{port}")) {
+ Ok(_) => {
+ ok = true;
+ break;
+ }
+ Err(e) => {
+ if e.kind() != std::io::ErrorKind::ConnectionRefused {
+ panic!("unexpected localhost connection error: {e:?}");
+ }
+ std::thread::sleep(std::time::Duration::new(1, 0));
+ }
+ }
+ }
+ if !ok {
+ panic!("no listener on localhost port {port}");
+ }
+ }
+ }
+}
+
+impl ContainerHandle {
+ /// Executes a program inside a running container.
+ pub fn exec(&self, args: &[&str]) -> std::process::Output {
+ ProcessBuilder::new("docker")
+ .args(&["container", "exec", &self.name])
+ .args(args)
+ .exec_with_output()
+ .unwrap()
+ }
+
+ /// Returns the contents of a file inside the container.
+ pub fn read_file(&self, path: &str) -> String {
+ let output = ProcessBuilder::new("docker")
+ .args(&["cp", &format!("{}:{}", self.name, path), "-"])
+ .exec_with_output()
+ .unwrap();
+ let mut ar = tar::Archive::new(output.stdout.as_slice());
+ let mut entry = ar.entries().unwrap().next().unwrap().unwrap();
+ let mut contents = String::new();
+ entry.read_to_string(&mut contents).unwrap();
+ contents
+ }
+}
+
+impl Drop for ContainerHandle {
+ fn drop(&mut self) {
+ // To help with debugging, this will keep the container alive.
+ if std::env::var_os("CARGO_CONTAINER_TEST_KEEP").is_some() {
+ return;
+ }
+ remove_if_exists(&self.name);
+ }
+}
+
+fn remove_if_exists(name: &str) {
+ if let Err(e) = Command::new("docker")
+ .args(&["container", "rm", "--force", name])
+ .output()
+ {
+ panic!("failed to run docker: {e}");
+ }
+}
+
+/// Builder for configuring a file to copy into a container.
+pub struct MkFile {
+ path: String,
+ contents: Vec<u8>,
+ header: Header,
+}
+
+impl MkFile {
+ /// Defines a file to add to the container.
+ ///
+ /// This should be passed to `Container::file`.
+ ///
+ /// The path is the path inside the container to create the file.
+ pub fn path(path: &str) -> MkFile {
+ MkFile {
+ path: path.to_string(),
+ contents: Vec::new(),
+ header: Header::new_gnu(),
+ }
+ }
+
+ pub fn contents(mut self, contents: impl Into<Vec<u8>>) -> Self {
+ self.contents = contents.into();
+ self.header.set_size(self.contents.len() as u64);
+ self
+ }
+
+ pub fn mode(mut self, mode: u32) -> Self {
+ self.header.set_mode(mode);
+ self
+ }
+
+ pub fn uid(mut self, uid: u64) -> Self {
+ self.header.set_uid(uid);
+ self
+ }
+
+ pub fn gid(mut self, gid: u64) -> Self {
+ self.header.set_gid(gid);
+ self
+ }
+}
diff --git a/crates/cargo-test-support/src/cross_compile.rs b/crates/cargo-test-support/src/cross_compile.rs
new file mode 100644
index 0000000..a2daf88
--- /dev/null
+++ b/crates/cargo-test-support/src/cross_compile.rs
@@ -0,0 +1,264 @@
+//! Support for cross-compile tests with the `--target` flag.
+//!
+//! Note that cross-testing is very limited. You need to install the
+//! "alternate" target to the host (32-bit for 64-bit hosts or vice-versa).
+//!
+//! Set CFG_DISABLE_CROSS_TESTS=1 environment variable to disable these tests
+//! if you are unable to use the alternate target. Unfortunately 32-bit
+//! support on macOS is going away, so macOS users are out of luck.
+//!
+//! These tests are all disabled on rust-lang/rust's CI, but run in Cargo's CI.
+
+use crate::{basic_manifest, main_file, project};
+use cargo_util::ProcessError;
+use std::env;
+use std::fmt::Write;
+use std::process::{Command, Output};
+use std::sync::atomic::{AtomicBool, Ordering};
+use std::sync::Once;
+
+/// Whether or not the resulting cross binaries can run on the host.
+static CAN_RUN_ON_HOST: AtomicBool = AtomicBool::new(false);
+
+pub fn disabled() -> bool {
+ // First, disable if requested.
+ match env::var("CFG_DISABLE_CROSS_TESTS") {
+ Ok(ref s) if *s == "1" => return true,
+ _ => {}
+ }
+
+ // Cross tests are only tested to work on macos, linux, and MSVC windows.
+ if !(cfg!(target_os = "macos") || cfg!(target_os = "linux") || cfg!(target_env = "msvc")) {
+ return true;
+ }
+
+ // It's not particularly common to have a cross-compilation setup, so
+ // try to detect that before we fail a bunch of tests through no fault
+ // of the user.
+ static CAN_BUILD_CROSS_TESTS: AtomicBool = AtomicBool::new(false);
+ static CHECK: Once = Once::new();
+
+ let cross_target = alternate();
+
+ let run_cross_test = || -> anyhow::Result<Output> {
+ let p = project()
+ .at("cross_test")
+ .file("Cargo.toml", &basic_manifest("cross_test", "1.0.0"))
+ .file("src/main.rs", &main_file(r#""testing!""#, &[]))
+ .build();
+
+ let build_result = p
+ .cargo("build --target")
+ .arg(&cross_target)
+ .exec_with_output();
+
+ if build_result.is_ok() {
+ CAN_BUILD_CROSS_TESTS.store(true, Ordering::SeqCst);
+ }
+
+ let result = p
+ .cargo("run --target")
+ .arg(&cross_target)
+ .exec_with_output();
+
+ if result.is_ok() {
+ CAN_RUN_ON_HOST.store(true, Ordering::SeqCst);
+ }
+ build_result
+ };
+
+ CHECK.call_once(|| {
+ drop(run_cross_test());
+ });
+
+ if CAN_BUILD_CROSS_TESTS.load(Ordering::SeqCst) {
+ // We were able to compile a simple project, so the user has the
+ // necessary `std::` bits installed. Therefore, tests should not
+ // be disabled.
+ return false;
+ }
+
+ // We can't compile a simple cross project. We want to warn the user
+ // by failing a single test and having the remainder of the cross tests
+ // pass. We don't use `std::sync::Once` here because panicking inside its
+ // `call_once` method would poison the `Once` instance, which is not what
+ // we want.
+ static HAVE_WARNED: AtomicBool = AtomicBool::new(false);
+
+ if HAVE_WARNED.swap(true, Ordering::SeqCst) {
+ // We are some other test and somebody else is handling the warning.
+ // Just disable the current test.
+ return true;
+ }
+
+ // We are responsible for warning the user, which we do by panicking.
+ let mut message = format!(
+ "
+Cannot cross compile to {}.
+
+This failure can be safely ignored. If you would prefer to not see this
+failure, you can set the environment variable CFG_DISABLE_CROSS_TESTS to \"1\".
+
+Alternatively, you can install the necessary libraries to enable cross
+compilation tests. Cross compilation tests depend on your host platform.
+",
+ cross_target
+ );
+
+ if cfg!(target_os = "linux") {
+ message.push_str(
+ "
+Linux cross tests target i686-unknown-linux-gnu, which requires the ability to
+build and run 32-bit targets. This requires the 32-bit libraries to be
+installed. For example, on Ubuntu, run `sudo apt install gcc-multilib` to
+install the necessary libraries.
+",
+ );
+ } else if cfg!(all(target_os = "macos", target_arch = "aarch64")) {
+ message.push_str(
+ "
+macOS on aarch64 cross tests to target x86_64-apple-darwin.
+This should be natively supported via Xcode, nothing additional besides the
+rustup target should be needed.
+",
+ );
+ } else if cfg!(target_os = "macos") {
+ message.push_str(
+ "
+macOS on x86_64 cross tests to target x86_64-apple-ios, which requires the iOS
+SDK to be installed. This should be included with Xcode automatically. If you
+are using the Xcode command line tools, you'll need to install the full Xcode
+app (from the Apple App Store), and switch to it with this command:
+
+ sudo xcode-select --switch /Applications/Xcode.app/Contents/Developer
+
+Some cross-tests want to *run* the executables on the host. These tests will
+be ignored if this is not possible. On macOS, this means you need an iOS
+simulator installed to run these tests. To install a simulator, open Xcode, go
+to preferences > Components, and download the latest iOS simulator.
+",
+ );
+ } else if cfg!(target_os = "windows") {
+ message.push_str(
+ "
+Windows cross tests target i686-pc-windows-msvc, which requires the ability
+to build and run 32-bit targets. This should work automatically if you have
+properly installed Visual Studio build tools.
+",
+ );
+ } else {
+ // The check at the top should prevent this.
+ panic!("platform should have been skipped");
+ }
+
+ let rustup_available = Command::new("rustup").output().is_ok();
+ if rustup_available {
+ write!(
+ message,
+ "
+Make sure that the appropriate `rustc` target is installed with rustup:
+
+ rustup target add {}
+",
+ cross_target
+ )
+ .unwrap();
+ } else {
+ write!(
+ message,
+ "
+rustup does not appear to be installed. Make sure that the appropriate
+`rustc` target is installed for the target `{}`.
+",
+ cross_target
+ )
+ .unwrap();
+ }
+
+ // Show the actual error message.
+ match run_cross_test() {
+ Ok(_) => message.push_str("\nUh oh, second run succeeded?\n"),
+ Err(err) => match err.downcast_ref::<ProcessError>() {
+ Some(proc_err) => write!(message, "\nTest error: {}\n", proc_err).unwrap(),
+ None => write!(message, "\nUnexpected non-process error: {}\n", err).unwrap(),
+ },
+ }
+
+ panic!("{}", message);
+}
+
+/// The arch triple of the test-running host.
+pub fn native() -> &'static str {
+ env!("NATIVE_ARCH")
+}
+
+pub fn native_arch() -> &'static str {
+ match native()
+ .split("-")
+ .next()
+ .expect("Target triple has unexpected format")
+ {
+ "x86_64" => "x86_64",
+ "aarch64" => "aarch64",
+ "i686" => "x86",
+ _ => panic!("This test should be gated on cross_compile::disabled."),
+ }
+}
+
+/// The alternate target-triple to build with.
+///
+/// Only use this function on tests that check `cross_compile::disabled`.
+pub fn alternate() -> &'static str {
+ if cfg!(all(target_os = "macos", target_arch = "aarch64")) {
+ "x86_64-apple-darwin"
+ } else if cfg!(target_os = "macos") {
+ "x86_64-apple-ios"
+ } else if cfg!(target_os = "linux") {
+ "i686-unknown-linux-gnu"
+ } else if cfg!(all(target_os = "windows", target_env = "msvc")) {
+ "i686-pc-windows-msvc"
+ } else if cfg!(all(target_os = "windows", target_env = "gnu")) {
+ "i686-pc-windows-gnu"
+ } else {
+ panic!("This test should be gated on cross_compile::disabled.");
+ }
+}
+
+pub fn alternate_arch() -> &'static str {
+ if cfg!(target_os = "macos") {
+ "x86_64"
+ } else {
+ "x86"
+ }
+}
+
+/// A target-triple that is neither the host nor the target.
+///
+/// Rustc may not work with it and it's alright, apart from being a
+/// valid target triple it is supposed to be used only as a
+/// placeholder for targets that should not be considered.
+pub fn unused() -> &'static str {
+ "wasm32-unknown-unknown"
+}
+
+/// Whether or not the host can run cross-compiled executables.
+pub fn can_run_on_host() -> bool {
+ if disabled() {
+ return false;
+ }
+ // macos is currently configured to cross compile to x86_64-apple-ios
+ // which requires a simulator to run. Azure's CI image appears to have the
+ // SDK installed, but are not configured to launch iOS images with a
+ // simulator.
+ if cfg!(target_os = "macos") {
+ if CAN_RUN_ON_HOST.load(Ordering::SeqCst) {
+ return true;
+ } else {
+ println!("Note: Cannot run on host, skipping.");
+ return false;
+ }
+ } else {
+ assert!(CAN_RUN_ON_HOST.load(Ordering::SeqCst));
+ return true;
+ }
+}
diff --git a/crates/cargo-test-support/src/diff.rs b/crates/cargo-test-support/src/diff.rs
new file mode 100644
index 0000000..f3b283b
--- /dev/null
+++ b/crates/cargo-test-support/src/diff.rs
@@ -0,0 +1,174 @@
+//! A simple Myers diff implementation.
+//!
+//! This focuses on being short and simple, and the expense of being
+//! inefficient. A key characteristic here is that this supports cargotest's
+//! `[..]` wildcard matching. That means things like hashing can't be used.
+//! Since Cargo's output tends to be small, this should be sufficient.
+
+use std::fmt;
+use std::io::Write;
+use termcolor::{Ansi, Color, ColorSpec, NoColor, WriteColor};
+
+/// A single line change to be applied to the original.
+#[derive(Debug, Eq, PartialEq)]
+pub enum Change<T> {
+ Add(usize, T),
+ Remove(usize, T),
+ Keep(usize, usize, T),
+}
+
+pub fn diff<'a, T>(a: &'a [T], b: &'a [T]) -> Vec<Change<&'a T>>
+where
+ T: PartialEq,
+{
+ if a.is_empty() && b.is_empty() {
+ return vec![];
+ }
+ let mut diff = vec![];
+ for (prev_x, prev_y, x, y) in backtrack(&a, &b) {
+ if x == prev_x {
+ diff.push(Change::Add(prev_y + 1, &b[prev_y]));
+ } else if y == prev_y {
+ diff.push(Change::Remove(prev_x + 1, &a[prev_x]));
+ } else {
+ diff.push(Change::Keep(prev_x + 1, prev_y + 1, &a[prev_x]));
+ }
+ }
+ diff.reverse();
+ diff
+}
+
+fn shortest_edit<T>(a: &[T], b: &[T]) -> Vec<Vec<usize>>
+where
+ T: PartialEq,
+{
+ let max = a.len() + b.len();
+ let mut v = vec![0; 2 * max + 1];
+ let mut trace = vec![];
+ for d in 0..=max {
+ trace.push(v.clone());
+ for k in (0..=(2 * d)).step_by(2) {
+ let mut x = if k == 0 || (k != 2 * d && v[max - d + k - 1] < v[max - d + k + 1]) {
+ // Move down
+ v[max - d + k + 1]
+ } else {
+ // Move right
+ v[max - d + k - 1] + 1
+ };
+ let mut y = x + d - k;
+ // Step diagonally as far as possible.
+ while x < a.len() && y < b.len() && a[x] == b[y] {
+ x += 1;
+ y += 1;
+ }
+ v[max - d + k] = x;
+ // Return if reached the bottom-right position.
+ if x >= a.len() && y >= b.len() {
+ return trace;
+ }
+ }
+ }
+ panic!("finished without hitting end?");
+}
+
+fn backtrack<T>(a: &[T], b: &[T]) -> Vec<(usize, usize, usize, usize)>
+where
+ T: PartialEq,
+{
+ let mut result = vec![];
+ let mut x = a.len();
+ let mut y = b.len();
+ let max = x + y;
+ for (d, v) in shortest_edit(a, b).iter().enumerate().rev() {
+ let k = x + d - y;
+ let prev_k = if k == 0 || (k != 2 * d && v[max - d + k - 1] < v[max - d + k + 1]) {
+ k + 1
+ } else {
+ k - 1
+ };
+ let prev_x = v[max - d + prev_k];
+ let prev_y = (prev_x + d).saturating_sub(prev_k);
+ while x > prev_x && y > prev_y {
+ result.push((x - 1, y - 1, x, y));
+ x -= 1;
+ y -= 1;
+ }
+ if d > 0 {
+ result.push((prev_x, prev_y, x, y));
+ }
+ x = prev_x;
+ y = prev_y;
+ }
+ return result;
+}
+
+pub fn colored_diff<'a, T>(a: &'a [T], b: &'a [T]) -> String
+where
+ T: PartialEq + fmt::Display,
+{
+ let changes = diff(a, b);
+ render_colored_changes(&changes)
+}
+
+pub fn render_colored_changes<T: fmt::Display>(changes: &[Change<T>]) -> String {
+ // termcolor is not very ergonomic, but I don't want to bring in another dependency.
+ let mut red = ColorSpec::new();
+ red.set_fg(Some(Color::Red));
+ let mut green = ColorSpec::new();
+ green.set_fg(Some(Color::Green));
+ let mut dim = ColorSpec::new();
+ dim.set_dimmed(true);
+ let mut v = Vec::new();
+ let mut result: Box<dyn WriteColor> = if crate::is_ci() {
+ // Don't use color on CI. Even though GitHub can display colors, it
+ // makes reading the raw logs more difficult.
+ Box::new(NoColor::new(&mut v))
+ } else {
+ Box::new(Ansi::new(&mut v))
+ };
+
+ for change in changes {
+ let (nums, sign, color, text) = match change {
+ Change::Add(i, s) => (format!(" {:<4} ", i), '+', &green, s),
+ Change::Remove(i, s) => (format!("{:<4} ", i), '-', &red, s),
+ Change::Keep(x, y, s) => (format!("{:<4}{:<4} ", x, y), ' ', &dim, s),
+ };
+ result.set_color(&dim).unwrap();
+ write!(result, "{}", nums).unwrap();
+ let mut bold = color.clone();
+ bold.set_bold(true);
+ result.set_color(&bold).unwrap();
+ write!(result, "{}", sign).unwrap();
+ result.reset().unwrap();
+ result.set_color(&color).unwrap();
+ write!(result, "{}", text).unwrap();
+ result.reset().unwrap();
+ writeln!(result).unwrap();
+ }
+ drop(result);
+ String::from_utf8(v).unwrap()
+}
+
+#[cfg(test)]
+pub fn compare(a: &str, b: &str) {
+ let a: Vec<_> = a.chars().collect();
+ let b: Vec<_> = b.chars().collect();
+ let changes = diff(&a, &b);
+ let mut result = vec![];
+ for change in changes {
+ match change {
+ Change::Add(_, s) => result.push(*s),
+ Change::Remove(_, _s) => {}
+ Change::Keep(_, _, s) => result.push(*s),
+ }
+ }
+ assert_eq!(b, result);
+}
+
+#[test]
+fn basic_tests() {
+ compare("", "");
+ compare("A", "");
+ compare("", "B");
+ compare("ABCABBA", "CBABAC");
+}
diff --git a/crates/cargo-test-support/src/git.rs b/crates/cargo-test-support/src/git.rs
new file mode 100644
index 0000000..18c4646
--- /dev/null
+++ b/crates/cargo-test-support/src/git.rs
@@ -0,0 +1,249 @@
+/*
+# Git Testing Support
+
+## Creating a git dependency
+`git::new()` is an easy way to create a new git repository containing a
+project that you can then use as a dependency. It will automatically add all
+the files you specify in the project and commit them to the repository.
+Example:
+
+```
+let git_project = git::new("dep1", |project| {
+ project
+ .file("Cargo.toml", &basic_manifest("dep1"))
+ .file("src/lib.rs", r#"pub fn f() { println!("hi!"); } "#)
+});
+
+// Use the `url()` method to get the file url to the new repository.
+let p = project()
+ .file("Cargo.toml", &format!(r#"
+ [package]
+ name = "a"
+ version = "1.0.0"
+
+ [dependencies]
+ dep1 = {{ git = '{}' }}
+ "#, git_project.url()))
+ .file("src/lib.rs", "extern crate dep1;")
+ .build();
+```
+
+## Manually creating repositories
+`git::repo()` can be used to create a `RepoBuilder` which provides a way of
+adding files to a blank repository and committing them.
+
+If you want to then manipulate the repository (such as adding new files or
+tags), you can use `git2::Repository::open()` to open the repository and then
+use some of the helper functions in this file to interact with the repository.
+
+*/
+
+use crate::{path2url, project, Project, ProjectBuilder};
+use std::fs;
+use std::path::{Path, PathBuf};
+use std::sync::Once;
+use url::Url;
+
+#[must_use]
+pub struct RepoBuilder {
+ repo: git2::Repository,
+ files: Vec<PathBuf>,
+}
+
+pub struct Repository(git2::Repository);
+
+/// Create a `RepoBuilder` to build a new git repository.
+///
+/// Call `build()` to finalize and create the repository.
+pub fn repo(p: &Path) -> RepoBuilder {
+ RepoBuilder::init(p)
+}
+
+impl RepoBuilder {
+ pub fn init(p: &Path) -> RepoBuilder {
+ t!(fs::create_dir_all(p.parent().unwrap()));
+ let repo = init(p);
+ RepoBuilder {
+ repo,
+ files: Vec::new(),
+ }
+ }
+
+ /// Add a file to the repository.
+ pub fn file(self, path: &str, contents: &str) -> RepoBuilder {
+ let mut me = self.nocommit_file(path, contents);
+ me.files.push(PathBuf::from(path));
+ me
+ }
+
+ /// Add a file that will be left in the working directory, but not added
+ /// to the repository.
+ pub fn nocommit_file(self, path: &str, contents: &str) -> RepoBuilder {
+ let dst = self.repo.workdir().unwrap().join(path);
+ t!(fs::create_dir_all(dst.parent().unwrap()));
+ t!(fs::write(&dst, contents));
+ self
+ }
+
+ /// Create the repository and commit the new files.
+ pub fn build(self) -> Repository {
+ {
+ let mut index = t!(self.repo.index());
+ for file in self.files.iter() {
+ t!(index.add_path(file));
+ }
+ t!(index.write());
+ let id = t!(index.write_tree());
+ let tree = t!(self.repo.find_tree(id));
+ let sig = t!(self.repo.signature());
+ t!(self
+ .repo
+ .commit(Some("HEAD"), &sig, &sig, "Initial commit", &tree, &[]));
+ }
+ let RepoBuilder { repo, .. } = self;
+ Repository(repo)
+ }
+}
+
+impl Repository {
+ pub fn root(&self) -> &Path {
+ self.0.workdir().unwrap()
+ }
+
+ pub fn url(&self) -> Url {
+ path2url(self.0.workdir().unwrap().to_path_buf())
+ }
+
+ pub fn revparse_head(&self) -> String {
+ self.0
+ .revparse_single("HEAD")
+ .expect("revparse HEAD")
+ .id()
+ .to_string()
+ }
+}
+
+/// Initialize a new repository at the given path.
+pub fn init(path: &Path) -> git2::Repository {
+ default_search_path();
+ let repo = t!(git2::Repository::init(path));
+ default_repo_cfg(&repo);
+ repo
+}
+
+fn default_search_path() {
+ use crate::paths::global_root;
+ use git2::{opts::set_search_path, ConfigLevel};
+
+ static INIT: Once = Once::new();
+ INIT.call_once(|| unsafe {
+ let path = global_root().join("blank_git_search_path");
+ t!(set_search_path(ConfigLevel::System, &path));
+ t!(set_search_path(ConfigLevel::Global, &path));
+ t!(set_search_path(ConfigLevel::XDG, &path));
+ t!(set_search_path(ConfigLevel::ProgramData, &path));
+ })
+}
+
+fn default_repo_cfg(repo: &git2::Repository) {
+ let mut cfg = t!(repo.config());
+ t!(cfg.set_str("user.email", "foo@bar.com"));
+ t!(cfg.set_str("user.name", "Foo Bar"));
+}
+
+/// Create a new git repository with a project.
+pub fn new<F>(name: &str, callback: F) -> Project
+where
+ F: FnOnce(ProjectBuilder) -> ProjectBuilder,
+{
+ new_repo(name, callback).0
+}
+
+/// Create a new git repository with a project.
+/// Returns both the Project and the git Repository.
+pub fn new_repo<F>(name: &str, callback: F) -> (Project, git2::Repository)
+where
+ F: FnOnce(ProjectBuilder) -> ProjectBuilder,
+{
+ let mut git_project = project().at(name);
+ git_project = callback(git_project);
+ let git_project = git_project.build();
+
+ let repo = init(&git_project.root());
+ add(&repo);
+ commit(&repo);
+ (git_project, repo)
+}
+
+/// Add all files in the working directory to the git index.
+pub fn add(repo: &git2::Repository) {
+ // FIXME(libgit2/libgit2#2514): apparently, `add_all` will add all submodules
+ // as well, and then fail because they're directories. As a stop-gap, we just
+ // ignore all submodules.
+ let mut s = t!(repo.submodules());
+ for submodule in s.iter_mut() {
+ t!(submodule.add_to_index(false));
+ }
+ let mut index = t!(repo.index());
+ t!(index.add_all(
+ ["*"].iter(),
+ git2::IndexAddOption::DEFAULT,
+ Some(
+ &mut (|a, _b| if s.iter().any(|s| a.starts_with(s.path())) {
+ 1
+ } else {
+ 0
+ })
+ )
+ ));
+ t!(index.write());
+}
+
+/// Add a git submodule to the repository.
+pub fn add_submodule<'a>(
+ repo: &'a git2::Repository,
+ url: &str,
+ path: &Path,
+) -> git2::Submodule<'a> {
+ let path = path.to_str().unwrap().replace(r"\", "/");
+ let mut s = t!(repo.submodule(url, Path::new(&path), false));
+ let subrepo = t!(s.open());
+ default_repo_cfg(&subrepo);
+ t!(subrepo.remote_add_fetch("origin", "refs/heads/*:refs/heads/*"));
+ let mut origin = t!(subrepo.find_remote("origin"));
+ t!(origin.fetch(&Vec::<String>::new(), None, None));
+ t!(subrepo.checkout_head(None));
+ t!(s.add_finalize());
+ s
+}
+
+/// Commit changes to the git repository.
+pub fn commit(repo: &git2::Repository) -> git2::Oid {
+ let tree_id = t!(t!(repo.index()).write_tree());
+ let sig = t!(repo.signature());
+ let mut parents = Vec::new();
+ if let Some(parent) = repo.head().ok().map(|h| h.target().unwrap()) {
+ parents.push(t!(repo.find_commit(parent)))
+ }
+ let parents = parents.iter().collect::<Vec<_>>();
+ t!(repo.commit(
+ Some("HEAD"),
+ &sig,
+ &sig,
+ "test",
+ &t!(repo.find_tree(tree_id)),
+ &parents
+ ))
+}
+
+/// Create a new tag in the git repository.
+pub fn tag(repo: &git2::Repository, name: &str) {
+ let head = repo.head().unwrap().target().unwrap();
+ t!(repo.tag(
+ name,
+ &t!(repo.find_object(head, None)),
+ &t!(repo.signature()),
+ "make a new tag",
+ false
+ ));
+}
diff --git a/crates/cargo-test-support/src/install.rs b/crates/cargo-test-support/src/install.rs
new file mode 100644
index 0000000..478b482
--- /dev/null
+++ b/crates/cargo-test-support/src/install.rs
@@ -0,0 +1,29 @@
+use crate::paths;
+use std::env::consts::EXE_SUFFIX;
+use std::path::{Path, PathBuf};
+
+/// Used by `cargo install` tests to assert an executable binary
+/// has been installed. Example usage:
+///
+/// assert_has_installed_exe(cargo_home(), "foo");
+#[track_caller]
+pub fn assert_has_installed_exe<P: AsRef<Path>>(path: P, name: &'static str) {
+ assert!(check_has_installed_exe(path, name));
+}
+
+#[track_caller]
+pub fn assert_has_not_installed_exe<P: AsRef<Path>>(path: P, name: &'static str) {
+ assert!(!check_has_installed_exe(path, name));
+}
+
+fn check_has_installed_exe<P: AsRef<Path>>(path: P, name: &'static str) -> bool {
+ path.as_ref().join("bin").join(exe(name)).is_file()
+}
+
+pub fn cargo_home() -> PathBuf {
+ paths::home().join(".cargo")
+}
+
+pub fn exe(name: &str) -> String {
+ format!("{}{}", name, EXE_SUFFIX)
+}
diff --git a/crates/cargo-test-support/src/lib.rs b/crates/cargo-test-support/src/lib.rs
new file mode 100644
index 0000000..04d6ce9
--- /dev/null
+++ b/crates/cargo-test-support/src/lib.rs
@@ -0,0 +1,1424 @@
+//! # Cargo test support.
+//!
+//! See <https://rust-lang.github.io/cargo/contrib/> for a guide on writing tests.
+
+#![allow(clippy::all)]
+
+use std::env;
+use std::ffi::OsStr;
+use std::fmt::Write;
+use std::fs;
+use std::os;
+use std::path::{Path, PathBuf};
+use std::process::{Command, Output};
+use std::str;
+use std::time::{self, Duration};
+
+use anyhow::{bail, Result};
+use cargo_util::{is_ci, ProcessBuilder, ProcessError};
+use serde_json;
+use url::Url;
+
+use self::paths::CargoPathExt;
+
+#[macro_export]
+macro_rules! t {
+ ($e:expr) => {
+ match $e {
+ Ok(e) => e,
+ Err(e) => $crate::panic_error(&format!("failed running {}", stringify!($e)), e),
+ }
+ };
+}
+
+#[macro_export]
+macro_rules! curr_dir {
+ () => {
+ $crate::_curr_dir(std::path::Path::new(file!()));
+ };
+}
+
+#[doc(hidden)]
+pub fn _curr_dir(mut file_path: &'static Path) -> &'static Path {
+ if !file_path.exists() {
+ // HACK: Must be running in the rust-lang/rust workspace, adjust the paths accordingly.
+ let prefix = PathBuf::from("src").join("tools").join("cargo");
+ if let Ok(crate_relative) = file_path.strip_prefix(prefix) {
+ file_path = crate_relative
+ }
+ }
+ assert!(file_path.exists(), "{} does not exist", file_path.display());
+ file_path.parent().unwrap()
+}
+
+#[track_caller]
+pub fn panic_error(what: &str, err: impl Into<anyhow::Error>) -> ! {
+ let err = err.into();
+ pe(what, err);
+ #[track_caller]
+ fn pe(what: &str, err: anyhow::Error) -> ! {
+ let mut result = format!("{}\nerror: {}", what, err);
+ for cause in err.chain().skip(1) {
+ drop(writeln!(result, "\nCaused by:"));
+ drop(write!(result, "{}", cause));
+ }
+ panic!("\n{}", result);
+ }
+}
+
+pub use cargo_test_macro::cargo_test;
+
+pub mod compare;
+pub mod containers;
+pub mod cross_compile;
+mod diff;
+pub mod git;
+pub mod install;
+pub mod paths;
+pub mod publish;
+pub mod registry;
+pub mod tools;
+
+pub mod prelude {
+ pub use crate::ArgLine;
+ pub use crate::CargoCommand;
+ pub use crate::ChannelChanger;
+ pub use crate::TestEnv;
+}
+
+/*
+ *
+ * ===== Builders =====
+ *
+ */
+
+#[derive(PartialEq, Clone)]
+struct FileBuilder {
+ path: PathBuf,
+ body: String,
+ executable: bool,
+}
+
+impl FileBuilder {
+ pub fn new(path: PathBuf, body: &str, executable: bool) -> FileBuilder {
+ FileBuilder {
+ path,
+ body: body.to_string(),
+ executable: executable,
+ }
+ }
+
+ fn mk(&mut self) {
+ if self.executable {
+ self.path.set_extension(env::consts::EXE_EXTENSION);
+ }
+
+ self.dirname().mkdir_p();
+ fs::write(&self.path, &self.body)
+ .unwrap_or_else(|e| panic!("could not create file {}: {}", self.path.display(), e));
+
+ #[cfg(unix)]
+ if self.executable {
+ use std::os::unix::fs::PermissionsExt;
+
+ let mut perms = fs::metadata(&self.path).unwrap().permissions();
+ let mode = perms.mode();
+ perms.set_mode(mode | 0o111);
+ fs::set_permissions(&self.path, perms).unwrap();
+ }
+ }
+
+ fn dirname(&self) -> &Path {
+ self.path.parent().unwrap()
+ }
+}
+
+#[derive(PartialEq, Clone)]
+struct SymlinkBuilder {
+ dst: PathBuf,
+ src: PathBuf,
+ src_is_dir: bool,
+}
+
+impl SymlinkBuilder {
+ pub fn new(dst: PathBuf, src: PathBuf) -> SymlinkBuilder {
+ SymlinkBuilder {
+ dst,
+ src,
+ src_is_dir: false,
+ }
+ }
+
+ pub fn new_dir(dst: PathBuf, src: PathBuf) -> SymlinkBuilder {
+ SymlinkBuilder {
+ dst,
+ src,
+ src_is_dir: true,
+ }
+ }
+
+ #[cfg(unix)]
+ fn mk(&self) {
+ self.dirname().mkdir_p();
+ t!(os::unix::fs::symlink(&self.dst, &self.src));
+ }
+
+ #[cfg(windows)]
+ fn mk(&mut self) {
+ self.dirname().mkdir_p();
+ if self.src_is_dir {
+ t!(os::windows::fs::symlink_dir(&self.dst, &self.src));
+ } else {
+ if let Some(ext) = self.dst.extension() {
+ if ext == env::consts::EXE_EXTENSION {
+ self.src.set_extension(ext);
+ }
+ }
+ t!(os::windows::fs::symlink_file(&self.dst, &self.src));
+ }
+ }
+
+ fn dirname(&self) -> &Path {
+ self.src.parent().unwrap()
+ }
+}
+
+/// A cargo project to run tests against.
+///
+/// See [`ProjectBuilder`] or [`Project::from_template`] to get started.
+pub struct Project {
+ root: PathBuf,
+}
+
+/// Create a project to run tests against
+///
+/// The project can be constructed programmatically or from the filesystem with [`Project::from_template`]
+#[must_use]
+pub struct ProjectBuilder {
+ root: Project,
+ files: Vec<FileBuilder>,
+ symlinks: Vec<SymlinkBuilder>,
+ no_manifest: bool,
+}
+
+impl ProjectBuilder {
+ /// Root of the project, ex: `/path/to/cargo/target/cit/t0/foo`
+ pub fn root(&self) -> PathBuf {
+ self.root.root()
+ }
+
+ /// Project's debug dir, ex: `/path/to/cargo/target/cit/t0/foo/target/debug`
+ pub fn target_debug_dir(&self) -> PathBuf {
+ self.root.target_debug_dir()
+ }
+
+ pub fn new(root: PathBuf) -> ProjectBuilder {
+ ProjectBuilder {
+ root: Project { root },
+ files: vec![],
+ symlinks: vec![],
+ no_manifest: false,
+ }
+ }
+
+ pub fn at<P: AsRef<Path>>(mut self, path: P) -> Self {
+ self.root = Project {
+ root: paths::root().join(path),
+ };
+ self
+ }
+
+ /// Adds a file to the project.
+ pub fn file<B: AsRef<Path>>(mut self, path: B, body: &str) -> Self {
+ self._file(path.as_ref(), body, false);
+ self
+ }
+
+ /// Adds an executable file to the project.
+ pub fn executable<B: AsRef<Path>>(mut self, path: B, body: &str) -> Self {
+ self._file(path.as_ref(), body, true);
+ self
+ }
+
+ fn _file(&mut self, path: &Path, body: &str, executable: bool) {
+ self.files.push(FileBuilder::new(
+ self.root.root().join(path),
+ body,
+ executable,
+ ));
+ }
+
+ /// Adds a symlink to a file to the project.
+ pub fn symlink<T: AsRef<Path>>(mut self, dst: T, src: T) -> Self {
+ self.symlinks.push(SymlinkBuilder::new(
+ self.root.root().join(dst),
+ self.root.root().join(src),
+ ));
+ self
+ }
+
+ /// Create a symlink to a directory
+ pub fn symlink_dir<T: AsRef<Path>>(mut self, dst: T, src: T) -> Self {
+ self.symlinks.push(SymlinkBuilder::new_dir(
+ self.root.root().join(dst),
+ self.root.root().join(src),
+ ));
+ self
+ }
+
+ pub fn no_manifest(mut self) -> Self {
+ self.no_manifest = true;
+ self
+ }
+
+ /// Creates the project.
+ pub fn build(mut self) -> Project {
+ // First, clean the directory if it already exists
+ self.rm_root();
+
+ // Create the empty directory
+ self.root.root().mkdir_p();
+
+ let manifest_path = self.root.root().join("Cargo.toml");
+ if !self.no_manifest && self.files.iter().all(|fb| fb.path != manifest_path) {
+ self._file(
+ Path::new("Cargo.toml"),
+ &basic_manifest("foo", "0.0.1"),
+ false,
+ )
+ }
+
+ let past = time::SystemTime::now() - Duration::new(1, 0);
+ let ftime = filetime::FileTime::from_system_time(past);
+
+ for file in self.files.iter_mut() {
+ file.mk();
+ if is_coarse_mtime() {
+ // Place the entire project 1 second in the past to ensure
+ // that if cargo is called multiple times, the 2nd call will
+ // see targets as "fresh". Without this, if cargo finishes in
+ // under 1 second, the second call will see the mtime of
+ // source == mtime of output and consider it dirty.
+ filetime::set_file_times(&file.path, ftime, ftime).unwrap();
+ }
+ }
+
+ for symlink in self.symlinks.iter_mut() {
+ symlink.mk();
+ }
+
+ let ProjectBuilder { root, .. } = self;
+ root
+ }
+
+ fn rm_root(&self) {
+ self.root.root().rm_rf()
+ }
+}
+
+impl Project {
+ /// Copy the test project from a fixed state
+ pub fn from_template(template_path: impl AsRef<std::path::Path>) -> Self {
+ let root = paths::root();
+ let project_root = root.join("case");
+ snapbox::path::copy_template(template_path.as_ref(), &project_root).unwrap();
+ Self { root: project_root }
+ }
+
+ /// Root of the project, ex: `/path/to/cargo/target/cit/t0/foo`
+ pub fn root(&self) -> PathBuf {
+ self.root.clone()
+ }
+
+ /// Project's target dir, ex: `/path/to/cargo/target/cit/t0/foo/target`
+ pub fn build_dir(&self) -> PathBuf {
+ self.root().join("target")
+ }
+
+ /// Project's debug dir, ex: `/path/to/cargo/target/cit/t0/foo/target/debug`
+ pub fn target_debug_dir(&self) -> PathBuf {
+ self.build_dir().join("debug")
+ }
+
+ /// File url for root, ex: `file:///path/to/cargo/target/cit/t0/foo`
+ pub fn url(&self) -> Url {
+ path2url(self.root())
+ }
+
+ /// Path to an example built as a library.
+ /// `kind` should be one of: "lib", "rlib", "staticlib", "dylib", "proc-macro"
+ /// ex: `/path/to/cargo/target/cit/t0/foo/target/debug/examples/libex.rlib`
+ pub fn example_lib(&self, name: &str, kind: &str) -> PathBuf {
+ self.target_debug_dir()
+ .join("examples")
+ .join(paths::get_lib_filename(name, kind))
+ }
+
+ /// Path to a debug binary.
+ /// ex: `/path/to/cargo/target/cit/t0/foo/target/debug/foo`
+ pub fn bin(&self, b: &str) -> PathBuf {
+ self.build_dir()
+ .join("debug")
+ .join(&format!("{}{}", b, env::consts::EXE_SUFFIX))
+ }
+
+ /// Path to a release binary.
+ /// ex: `/path/to/cargo/target/cit/t0/foo/target/release/foo`
+ pub fn release_bin(&self, b: &str) -> PathBuf {
+ self.build_dir()
+ .join("release")
+ .join(&format!("{}{}", b, env::consts::EXE_SUFFIX))
+ }
+
+ /// Path to a debug binary for a specific target triple.
+ /// ex: `/path/to/cargo/target/cit/t0/foo/target/i686-apple-darwin/debug/foo`
+ pub fn target_bin(&self, target: &str, b: &str) -> PathBuf {
+ self.build_dir().join(target).join("debug").join(&format!(
+ "{}{}",
+ b,
+ env::consts::EXE_SUFFIX
+ ))
+ }
+
+ /// Returns an iterator of paths matching the glob pattern, which is
+ /// relative to the project root.
+ pub fn glob<P: AsRef<Path>>(&self, pattern: P) -> glob::Paths {
+ let pattern = self.root().join(pattern);
+ glob::glob(pattern.to_str().expect("failed to convert pattern to str"))
+ .expect("failed to glob")
+ }
+
+ /// Changes the contents of an existing file.
+ pub fn change_file(&self, path: &str, body: &str) {
+ FileBuilder::new(self.root().join(path), body, false).mk()
+ }
+
+ /// Creates a `ProcessBuilder` to run a program in the project
+ /// and wrap it in an Execs to assert on the execution.
+ /// Example:
+ /// p.process(&p.bin("foo"))
+ /// .with_stdout("bar\n")
+ /// .run();
+ pub fn process<T: AsRef<OsStr>>(&self, program: T) -> Execs {
+ let mut p = process(program);
+ p.cwd(self.root());
+ execs().with_process_builder(p)
+ }
+
+ /// Creates a `ProcessBuilder` to run cargo.
+ /// Arguments can be separated by spaces.
+ /// Example:
+ /// p.cargo("build --bin foo").run();
+ pub fn cargo(&self, cmd: &str) -> Execs {
+ let cargo = cargo_exe();
+ let mut execs = self.process(&cargo);
+ if let Some(ref mut p) = execs.process_builder {
+ p.env("CARGO", cargo);
+ p.arg_line(cmd);
+ }
+ execs
+ }
+
+ /// Safely run a process after `cargo build`.
+ ///
+ /// Windows has a problem where a process cannot be reliably
+ /// be replaced, removed, or renamed immediately after executing it.
+ /// The action may fail (with errors like Access is denied), or
+ /// it may succeed, but future attempts to use the same filename
+ /// will fail with "Already Exists".
+ ///
+ /// If you have a test that needs to do `cargo run` multiple
+ /// times, you should instead use `cargo build` and use this
+ /// method to run the executable. Each time you call this,
+ /// use a new name for `dst`.
+ /// See rust-lang/cargo#5481.
+ pub fn rename_run(&self, src: &str, dst: &str) -> Execs {
+ let src = self.bin(src);
+ let dst = self.bin(dst);
+ fs::rename(&src, &dst)
+ .unwrap_or_else(|e| panic!("Failed to rename `{:?}` to `{:?}`: {}", src, dst, e));
+ self.process(dst)
+ }
+
+ /// Returns the contents of `Cargo.lock`.
+ pub fn read_lockfile(&self) -> String {
+ self.read_file("Cargo.lock")
+ }
+
+ /// Returns the contents of a path in the project root
+ pub fn read_file(&self, path: &str) -> String {
+ let full = self.root().join(path);
+ fs::read_to_string(&full)
+ .unwrap_or_else(|e| panic!("could not read file {}: {}", full.display(), e))
+ }
+
+ /// Modifies `Cargo.toml` to remove all commented lines.
+ pub fn uncomment_root_manifest(&self) {
+ let contents = self.read_file("Cargo.toml").replace("#", "");
+ fs::write(self.root().join("Cargo.toml"), contents).unwrap();
+ }
+
+ pub fn symlink(&self, src: impl AsRef<Path>, dst: impl AsRef<Path>) {
+ let src = self.root().join(src.as_ref());
+ let dst = self.root().join(dst.as_ref());
+ #[cfg(unix)]
+ {
+ if let Err(e) = os::unix::fs::symlink(&src, &dst) {
+ panic!("failed to symlink {:?} to {:?}: {:?}", src, dst, e);
+ }
+ }
+ #[cfg(windows)]
+ {
+ if src.is_dir() {
+ if let Err(e) = os::windows::fs::symlink_dir(&src, &dst) {
+ panic!("failed to symlink {:?} to {:?}: {:?}", src, dst, e);
+ }
+ } else {
+ if let Err(e) = os::windows::fs::symlink_file(&src, &dst) {
+ panic!("failed to symlink {:?} to {:?}: {:?}", src, dst, e);
+ }
+ }
+ }
+ }
+}
+
+// Generates a project layout
+pub fn project() -> ProjectBuilder {
+ ProjectBuilder::new(paths::root().join("foo"))
+}
+
+// Generates a project layout in given directory
+pub fn project_in(dir: &str) -> ProjectBuilder {
+ ProjectBuilder::new(paths::root().join(dir).join("foo"))
+}
+
+// Generates a project layout inside our fake home dir
+pub fn project_in_home(name: &str) -> ProjectBuilder {
+ ProjectBuilder::new(paths::home().join(name))
+}
+
+// === Helpers ===
+
+pub fn main_file(println: &str, deps: &[&str]) -> String {
+ let mut buf = String::new();
+
+ for dep in deps.iter() {
+ buf.push_str(&format!("extern crate {};\n", dep));
+ }
+
+ buf.push_str("fn main() { println!(");
+ buf.push_str(println);
+ buf.push_str("); }\n");
+
+ buf
+}
+
+pub fn cargo_exe() -> PathBuf {
+ snapbox::cmd::cargo_bin("cargo")
+}
+
+/// This is the raw output from the process.
+///
+/// This is similar to `std::process::Output`, however the `status` is
+/// translated to the raw `code`. This is necessary because `ProcessError`
+/// does not have access to the raw `ExitStatus` because `ProcessError` needs
+/// to be serializable (for the Rustc cache), and `ExitStatus` does not
+/// provide a constructor.
+pub struct RawOutput {
+ pub code: Option<i32>,
+ pub stdout: Vec<u8>,
+ pub stderr: Vec<u8>,
+}
+
+#[must_use]
+#[derive(Clone)]
+pub struct Execs {
+ ran: bool,
+ process_builder: Option<ProcessBuilder>,
+ expect_stdout: Option<String>,
+ expect_stdin: Option<String>,
+ expect_stderr: Option<String>,
+ expect_exit_code: Option<i32>,
+ expect_stdout_contains: Vec<String>,
+ expect_stderr_contains: Vec<String>,
+ expect_stdout_contains_n: Vec<(String, usize)>,
+ expect_stdout_not_contains: Vec<String>,
+ expect_stderr_not_contains: Vec<String>,
+ expect_stderr_unordered: Vec<String>,
+ expect_stderr_with_without: Vec<(Vec<String>, Vec<String>)>,
+ expect_json: Option<String>,
+ expect_json_contains_unordered: Option<String>,
+ stream_output: bool,
+}
+
+impl Execs {
+ pub fn with_process_builder(mut self, p: ProcessBuilder) -> Execs {
+ self.process_builder = Some(p);
+ self
+ }
+
+ /// Verifies that stdout is equal to the given lines.
+ /// See [`compare`] for supported patterns.
+ pub fn with_stdout<S: ToString>(&mut self, expected: S) -> &mut Self {
+ self.expect_stdout = Some(expected.to_string());
+ self
+ }
+
+ /// Verifies that stderr is equal to the given lines.
+ /// See [`compare`] for supported patterns.
+ pub fn with_stderr<S: ToString>(&mut self, expected: S) -> &mut Self {
+ self.expect_stderr = Some(expected.to_string());
+ self
+ }
+
+ /// Writes the given lines to stdin.
+ pub fn with_stdin<S: ToString>(&mut self, expected: S) -> &mut Self {
+ self.expect_stdin = Some(expected.to_string());
+ self
+ }
+
+ /// Verifies the exit code from the process.
+ ///
+ /// This is not necessary if the expected exit code is `0`.
+ pub fn with_status(&mut self, expected: i32) -> &mut Self {
+ self.expect_exit_code = Some(expected);
+ self
+ }
+
+ /// Removes exit code check for the process.
+ ///
+ /// By default, the expected exit code is `0`.
+ pub fn without_status(&mut self) -> &mut Self {
+ self.expect_exit_code = None;
+ self
+ }
+
+ /// Verifies that stdout contains the given contiguous lines somewhere in
+ /// its output.
+ ///
+ /// See [`compare`] for supported patterns.
+ pub fn with_stdout_contains<S: ToString>(&mut self, expected: S) -> &mut Self {
+ self.expect_stdout_contains.push(expected.to_string());
+ self
+ }
+
+ /// Verifies that stderr contains the given contiguous lines somewhere in
+ /// its output.
+ ///
+ /// See [`compare`] for supported patterns.
+ pub fn with_stderr_contains<S: ToString>(&mut self, expected: S) -> &mut Self {
+ self.expect_stderr_contains.push(expected.to_string());
+ self
+ }
+
+ /// Verifies that stdout contains the given contiguous lines somewhere in
+ /// its output, and should be repeated `number` times.
+ ///
+ /// See [`compare`] for supported patterns.
+ pub fn with_stdout_contains_n<S: ToString>(&mut self, expected: S, number: usize) -> &mut Self {
+ self.expect_stdout_contains_n
+ .push((expected.to_string(), number));
+ self
+ }
+
+ /// Verifies that stdout does not contain the given contiguous lines.
+ ///
+ /// See [`compare`] for supported patterns.
+ ///
+ /// See note on [`Self::with_stderr_does_not_contain`].
+ pub fn with_stdout_does_not_contain<S: ToString>(&mut self, expected: S) -> &mut Self {
+ self.expect_stdout_not_contains.push(expected.to_string());
+ self
+ }
+
+ /// Verifies that stderr does not contain the given contiguous lines.
+ ///
+ /// See [`compare`] for supported patterns.
+ ///
+ /// Care should be taken when using this method because there is a
+ /// limitless number of possible things that *won't* appear. A typo means
+ /// your test will pass without verifying the correct behavior. If
+ /// possible, write the test first so that it fails, and then implement
+ /// your fix/feature to make it pass.
+ pub fn with_stderr_does_not_contain<S: ToString>(&mut self, expected: S) -> &mut Self {
+ self.expect_stderr_not_contains.push(expected.to_string());
+ self
+ }
+
+ /// Verifies that all of the stderr output is equal to the given lines,
+ /// ignoring the order of the lines.
+ ///
+ /// See [`compare`] for supported patterns.
+ ///
+ /// This is useful when checking the output of `cargo build -v` since
+ /// the order of the output is not always deterministic.
+ /// Recommend use `with_stderr_contains` instead unless you really want to
+ /// check *every* line of output.
+ ///
+ /// Be careful when using patterns such as `[..]`, because you may end up
+ /// with multiple lines that might match, and this is not smart enough to
+ /// do anything like longest-match. For example, avoid something like:
+ ///
+ /// ```text
+ /// [RUNNING] `rustc [..]
+ /// [RUNNING] `rustc --crate-name foo [..]
+ /// ```
+ ///
+ /// This will randomly fail if the other crate name is `bar`, and the
+ /// order changes.
+ pub fn with_stderr_unordered<S: ToString>(&mut self, expected: S) -> &mut Self {
+ self.expect_stderr_unordered.push(expected.to_string());
+ self
+ }
+
+ /// Verify that a particular line appears in stderr with and without the
+ /// given substrings. Exactly one line must match.
+ ///
+ /// The substrings are matched as `contains`. Example:
+ ///
+ /// ```no_run
+ /// execs.with_stderr_line_without(
+ /// &[
+ /// "[RUNNING] `rustc --crate-name build_script_build",
+ /// "-C opt-level=3",
+ /// ],
+ /// &["-C debuginfo", "-C incremental"],
+ /// )
+ /// ```
+ ///
+ /// This will check that a build line includes `-C opt-level=3` but does
+ /// not contain `-C debuginfo` or `-C incremental`.
+ ///
+ /// Be careful writing the `without` fragments, see note in
+ /// `with_stderr_does_not_contain`.
+ pub fn with_stderr_line_without<S: ToString>(
+ &mut self,
+ with: &[S],
+ without: &[S],
+ ) -> &mut Self {
+ let with = with.iter().map(|s| s.to_string()).collect();
+ let without = without.iter().map(|s| s.to_string()).collect();
+ self.expect_stderr_with_without.push((with, without));
+ self
+ }
+
+ /// Verifies the JSON output matches the given JSON.
+ ///
+ /// This is typically used when testing cargo commands that emit JSON.
+ /// Each separate JSON object should be separated by a blank line.
+ /// Example:
+ ///
+ /// ```rust,ignore
+ /// assert_that(
+ /// p.cargo("metadata"),
+ /// execs().with_json(r#"
+ /// {"example": "abc"}
+ ///
+ /// {"example": "def"}
+ /// "#)
+ /// );
+ /// ```
+ ///
+ /// - Objects should match in the order given.
+ /// - The order of arrays is ignored.
+ /// - Strings support patterns described in [`compare`].
+ /// - Use `"{...}"` to match any object.
+ pub fn with_json(&mut self, expected: &str) -> &mut Self {
+ self.expect_json = Some(expected.to_string());
+ self
+ }
+
+ /// Verifies JSON output contains the given objects (in any order) somewhere
+ /// in its output.
+ ///
+ /// CAUTION: Be very careful when using this. Make sure every object is
+ /// unique (not a subset of one another). Also avoid using objects that
+ /// could possibly match multiple output lines unless you're very sure of
+ /// what you are doing.
+ ///
+ /// See `with_json` for more detail.
+ pub fn with_json_contains_unordered(&mut self, expected: &str) -> &mut Self {
+ match &mut self.expect_json_contains_unordered {
+ None => self.expect_json_contains_unordered = Some(expected.to_string()),
+ Some(e) => {
+ e.push_str("\n\n");
+ e.push_str(expected);
+ }
+ }
+ self
+ }
+
+ /// Forward subordinate process stdout/stderr to the terminal.
+ /// Useful for printf debugging of the tests.
+ /// CAUTION: CI will fail if you leave this in your test!
+ #[allow(unused)]
+ pub fn stream(&mut self) -> &mut Self {
+ self.stream_output = true;
+ self
+ }
+
+ pub fn arg<T: AsRef<OsStr>>(&mut self, arg: T) -> &mut Self {
+ if let Some(ref mut p) = self.process_builder {
+ p.arg(arg);
+ }
+ self
+ }
+
+ pub fn cwd<T: AsRef<OsStr>>(&mut self, path: T) -> &mut Self {
+ if let Some(ref mut p) = self.process_builder {
+ if let Some(cwd) = p.get_cwd() {
+ let new_path = cwd.join(path.as_ref());
+ p.cwd(new_path);
+ } else {
+ p.cwd(path);
+ }
+ }
+ self
+ }
+
+ fn get_cwd(&self) -> Option<&Path> {
+ self.process_builder.as_ref().and_then(|p| p.get_cwd())
+ }
+
+ pub fn env<T: AsRef<OsStr>>(&mut self, key: &str, val: T) -> &mut Self {
+ if let Some(ref mut p) = self.process_builder {
+ p.env(key, val);
+ }
+ self
+ }
+
+ pub fn env_remove(&mut self, key: &str) -> &mut Self {
+ if let Some(ref mut p) = self.process_builder {
+ p.env_remove(key);
+ }
+ self
+ }
+
+ pub fn exec_with_output(&mut self) -> Result<Output> {
+ self.ran = true;
+ // TODO avoid unwrap
+ let p = (&self.process_builder).clone().unwrap();
+ p.exec_with_output()
+ }
+
+ pub fn build_command(&mut self) -> Command {
+ self.ran = true;
+ // TODO avoid unwrap
+ let p = (&self.process_builder).clone().unwrap();
+ p.build_command()
+ }
+
+ /// Enables nightly features for testing
+ ///
+ /// The list of reasons should be why nightly cargo is needed. If it is
+ /// becuase of an unstable feature put the name of the feature as the reason,
+ /// e.g. `&["print-im-a-teapot"]`
+ pub fn masquerade_as_nightly_cargo(&mut self, reasons: &[&str]) -> &mut Self {
+ if let Some(ref mut p) = self.process_builder {
+ p.masquerade_as_nightly_cargo(reasons);
+ }
+ self
+ }
+
+ /// Overrides the crates.io URL for testing.
+ ///
+ /// Can be used for testing crates-io functionality where alt registries
+ /// cannot be used.
+ pub fn replace_crates_io(&mut self, url: &Url) -> &mut Self {
+ if let Some(ref mut p) = self.process_builder {
+ p.env("__CARGO_TEST_CRATES_IO_URL_DO_NOT_USE_THIS", url.as_str());
+ }
+ self
+ }
+
+ pub fn enable_split_debuginfo_packed(&mut self) -> &mut Self {
+ self.env("CARGO_PROFILE_DEV_SPLIT_DEBUGINFO", "packed")
+ .env("CARGO_PROFILE_TEST_SPLIT_DEBUGINFO", "packed")
+ .env("CARGO_PROFILE_RELEASE_SPLIT_DEBUGINFO", "packed")
+ .env("CARGO_PROFILE_BENCH_SPLIT_DEBUGINFO", "packed");
+ self
+ }
+
+ pub fn enable_mac_dsym(&mut self) -> &mut Self {
+ if cfg!(target_os = "macos") {
+ return self.enable_split_debuginfo_packed();
+ }
+ self
+ }
+
+ #[track_caller]
+ pub fn run(&mut self) {
+ self.ran = true;
+ let mut p = (&self.process_builder).clone().unwrap();
+ if let Some(stdin) = self.expect_stdin.take() {
+ p.stdin(stdin);
+ }
+ if let Err(e) = self.match_process(&p) {
+ panic_error(&format!("test failed running {}", p), e);
+ }
+ }
+
+ #[track_caller]
+ pub fn run_expect_error(&mut self) {
+ self.ran = true;
+ let p = (&self.process_builder).clone().unwrap();
+ if self.match_process(&p).is_ok() {
+ panic!("test was expected to fail, but succeeded running {}", p);
+ }
+ }
+
+ /// Runs the process, checks the expected output, and returns the first
+ /// JSON object on stdout.
+ #[track_caller]
+ pub fn run_json(&mut self) -> serde_json::Value {
+ self.ran = true;
+ let p = (&self.process_builder).clone().unwrap();
+ match self.match_process(&p) {
+ Err(e) => panic_error(&format!("test failed running {}", p), e),
+ Ok(output) => serde_json::from_slice(&output.stdout).unwrap_or_else(|e| {
+ panic!(
+ "\nfailed to parse JSON: {}\n\
+ output was:\n{}\n",
+ e,
+ String::from_utf8_lossy(&output.stdout)
+ );
+ }),
+ }
+ }
+
+ #[track_caller]
+ pub fn run_output(&mut self, output: &Output) {
+ self.ran = true;
+ if let Err(e) = self.match_output(output.status.code(), &output.stdout, &output.stderr) {
+ panic_error("process did not return the expected result", e)
+ }
+ }
+
+ fn verify_checks_output(&self, stdout: &[u8], stderr: &[u8]) {
+ if self.expect_exit_code.unwrap_or(0) != 0
+ && self.expect_stdout.is_none()
+ && self.expect_stdin.is_none()
+ && self.expect_stderr.is_none()
+ && self.expect_stdout_contains.is_empty()
+ && self.expect_stderr_contains.is_empty()
+ && self.expect_stdout_contains_n.is_empty()
+ && self.expect_stdout_not_contains.is_empty()
+ && self.expect_stderr_not_contains.is_empty()
+ && self.expect_stderr_unordered.is_empty()
+ && self.expect_stderr_with_without.is_empty()
+ && self.expect_json.is_none()
+ && self.expect_json_contains_unordered.is_none()
+ {
+ panic!(
+ "`with_status()` is used, but no output is checked.\n\
+ The test must check the output to ensure the correct error is triggered.\n\
+ --- stdout\n{}\n--- stderr\n{}",
+ String::from_utf8_lossy(stdout),
+ String::from_utf8_lossy(stderr),
+ );
+ }
+ }
+
+ fn match_process(&self, process: &ProcessBuilder) -> Result<RawOutput> {
+ println!("running {}", process);
+ let res = if self.stream_output {
+ if is_ci() {
+ panic!("`.stream()` is for local debugging")
+ }
+ process.exec_with_streaming(
+ &mut |out| {
+ println!("{}", out);
+ Ok(())
+ },
+ &mut |err| {
+ eprintln!("{}", err);
+ Ok(())
+ },
+ true,
+ )
+ } else {
+ process.exec_with_output()
+ };
+
+ match res {
+ Ok(out) => {
+ self.match_output(out.status.code(), &out.stdout, &out.stderr)?;
+ return Ok(RawOutput {
+ stdout: out.stdout,
+ stderr: out.stderr,
+ code: out.status.code(),
+ });
+ }
+ Err(e) => {
+ if let Some(ProcessError {
+ stdout: Some(stdout),
+ stderr: Some(stderr),
+ code,
+ ..
+ }) = e.downcast_ref::<ProcessError>()
+ {
+ self.match_output(*code, stdout, stderr)?;
+ return Ok(RawOutput {
+ stdout: stdout.to_vec(),
+ stderr: stderr.to_vec(),
+ code: *code,
+ });
+ }
+ bail!("could not exec process {}: {:?}", process, e)
+ }
+ }
+ }
+
+ fn match_output(&self, code: Option<i32>, stdout: &[u8], stderr: &[u8]) -> Result<()> {
+ self.verify_checks_output(stdout, stderr);
+ let stdout = str::from_utf8(stdout).expect("stdout is not utf8");
+ let stderr = str::from_utf8(stderr).expect("stderr is not utf8");
+ let cwd = self.get_cwd();
+
+ match self.expect_exit_code {
+ None => {}
+ Some(expected) if code == Some(expected) => {}
+ Some(expected) => bail!(
+ "process exited with code {} (expected {})\n--- stdout\n{}\n--- stderr\n{}",
+ code.unwrap_or(-1),
+ expected,
+ stdout,
+ stderr
+ ),
+ }
+
+ if let Some(expect_stdout) = &self.expect_stdout {
+ compare::match_exact(expect_stdout, stdout, "stdout", stderr, cwd)?;
+ }
+ if let Some(expect_stderr) = &self.expect_stderr {
+ compare::match_exact(expect_stderr, stderr, "stderr", stdout, cwd)?;
+ }
+ for expect in self.expect_stdout_contains.iter() {
+ compare::match_contains(expect, stdout, cwd)?;
+ }
+ for expect in self.expect_stderr_contains.iter() {
+ compare::match_contains(expect, stderr, cwd)?;
+ }
+ for &(ref expect, number) in self.expect_stdout_contains_n.iter() {
+ compare::match_contains_n(expect, number, stdout, cwd)?;
+ }
+ for expect in self.expect_stdout_not_contains.iter() {
+ compare::match_does_not_contain(expect, stdout, cwd)?;
+ }
+ for expect in self.expect_stderr_not_contains.iter() {
+ compare::match_does_not_contain(expect, stderr, cwd)?;
+ }
+ for expect in self.expect_stderr_unordered.iter() {
+ compare::match_unordered(expect, stderr, cwd)?;
+ }
+ for (with, without) in self.expect_stderr_with_without.iter() {
+ compare::match_with_without(stderr, with, without, cwd)?;
+ }
+
+ if let Some(ref expect_json) = self.expect_json {
+ compare::match_json(expect_json, stdout, cwd)?;
+ }
+
+ if let Some(ref expected) = self.expect_json_contains_unordered {
+ compare::match_json_contains_unordered(expected, stdout, cwd)?;
+ }
+ Ok(())
+ }
+}
+
+impl Drop for Execs {
+ fn drop(&mut self) {
+ if !self.ran && !std::thread::panicking() {
+ panic!("forgot to run this command");
+ }
+ }
+}
+
+pub fn execs() -> Execs {
+ Execs {
+ ran: false,
+ process_builder: None,
+ expect_stdout: None,
+ expect_stderr: None,
+ expect_stdin: None,
+ expect_exit_code: Some(0),
+ expect_stdout_contains: Vec::new(),
+ expect_stderr_contains: Vec::new(),
+ expect_stdout_contains_n: Vec::new(),
+ expect_stdout_not_contains: Vec::new(),
+ expect_stderr_not_contains: Vec::new(),
+ expect_stderr_unordered: Vec::new(),
+ expect_stderr_with_without: Vec::new(),
+ expect_json: None,
+ expect_json_contains_unordered: None,
+ stream_output: false,
+ }
+}
+
+pub fn basic_manifest(name: &str, version: &str) -> String {
+ format!(
+ r#"
+ [package]
+ name = "{}"
+ version = "{}"
+ authors = []
+ "#,
+ name, version
+ )
+}
+
+pub fn basic_bin_manifest(name: &str) -> String {
+ format!(
+ r#"
+ [package]
+
+ name = "{}"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [[bin]]
+
+ name = "{}"
+ "#,
+ name, name
+ )
+}
+
+pub fn basic_lib_manifest(name: &str) -> String {
+ format!(
+ r#"
+ [package]
+
+ name = "{}"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [lib]
+
+ name = "{}"
+ "#,
+ name, name
+ )
+}
+
+pub fn path2url<P: AsRef<Path>>(p: P) -> Url {
+ Url::from_file_path(p).ok().unwrap()
+}
+
+struct RustcInfo {
+ verbose_version: String,
+ host: String,
+}
+
+impl RustcInfo {
+ fn new() -> RustcInfo {
+ let output = ProcessBuilder::new("rustc")
+ .arg("-vV")
+ .exec_with_output()
+ .expect("rustc should exec");
+ let verbose_version = String::from_utf8(output.stdout).expect("utf8 output");
+ let host = verbose_version
+ .lines()
+ .filter_map(|line| line.strip_prefix("host: "))
+ .next()
+ .expect("verbose version has host: field")
+ .to_string();
+ RustcInfo {
+ verbose_version,
+ host,
+ }
+ }
+}
+
+lazy_static::lazy_static! {
+ static ref RUSTC_INFO: RustcInfo = RustcInfo::new();
+}
+
+/// The rustc host such as `x86_64-unknown-linux-gnu`.
+pub fn rustc_host() -> &'static str {
+ &RUSTC_INFO.host
+}
+
+/// The host triple suitable for use in a cargo environment variable (uppercased).
+pub fn rustc_host_env() -> String {
+ rustc_host().to_uppercase().replace('-', "_")
+}
+
+pub fn is_nightly() -> bool {
+ let vv = &RUSTC_INFO.verbose_version;
+ // CARGO_TEST_DISABLE_NIGHTLY is set in rust-lang/rust's CI so that all
+ // nightly-only tests are disabled there. Otherwise, it could make it
+ // difficult to land changes which would need to be made simultaneously in
+ // rust-lang/cargo and rust-lan/rust, which isn't possible.
+ env::var("CARGO_TEST_DISABLE_NIGHTLY").is_err()
+ && (vv.contains("-nightly") || vv.contains("-dev"))
+}
+
+pub fn process<T: AsRef<OsStr>>(t: T) -> ProcessBuilder {
+ _process(t.as_ref())
+}
+
+fn _process(t: &OsStr) -> ProcessBuilder {
+ let mut p = ProcessBuilder::new(t);
+ p.cwd(&paths::root()).test_env();
+ p
+}
+
+/// Enable nightly features for testing
+pub trait ChannelChanger {
+ /// The list of reasons should be why nightly cargo is needed. If it is
+ /// becuase of an unstable feature put the name of the feature as the reason,
+ /// e.g. `&["print-im-a-teapot"]`.
+ fn masquerade_as_nightly_cargo(self, _reasons: &[&str]) -> Self;
+}
+
+impl ChannelChanger for &mut ProcessBuilder {
+ fn masquerade_as_nightly_cargo(self, _reasons: &[&str]) -> Self {
+ self.env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "nightly")
+ }
+}
+
+impl ChannelChanger for snapbox::cmd::Command {
+ fn masquerade_as_nightly_cargo(self, _reasons: &[&str]) -> Self {
+ self.env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "nightly")
+ }
+}
+
+/// Establish a process's test environment
+pub trait TestEnv: Sized {
+ fn test_env(mut self) -> Self {
+ // In general just clear out all cargo-specific configuration already in the
+ // environment. Our tests all assume a "default configuration" unless
+ // specified otherwise.
+ for (k, _v) in env::vars() {
+ if k.starts_with("CARGO_") {
+ self = self.env_remove(&k);
+ }
+ }
+ if env::var_os("RUSTUP_TOOLCHAIN").is_some() {
+ // Override the PATH to avoid executing the rustup wrapper thousands
+ // of times. This makes the testsuite run substantially faster.
+ lazy_static::lazy_static! {
+ static ref RUSTC_DIR: PathBuf = {
+ match ProcessBuilder::new("rustup")
+ .args(&["which", "rustc"])
+ .exec_with_output()
+ {
+ Ok(output) => {
+ let s = str::from_utf8(&output.stdout).expect("utf8").trim();
+ let mut p = PathBuf::from(s);
+ p.pop();
+ p
+ }
+ Err(e) => {
+ panic!("RUSTUP_TOOLCHAIN was set, but could not run rustup: {}", e);
+ }
+ }
+ };
+ }
+ let path = env::var_os("PATH").unwrap_or_default();
+ let paths = env::split_paths(&path);
+ let new_path =
+ env::join_paths(std::iter::once(RUSTC_DIR.clone()).chain(paths)).unwrap();
+ self = self.env("PATH", new_path);
+ }
+
+ self = self
+ .current_dir(&paths::root())
+ .env("HOME", paths::home())
+ .env("CARGO_HOME", paths::home().join(".cargo"))
+ .env("__CARGO_TEST_ROOT", paths::global_root())
+ // Force Cargo to think it's on the stable channel for all tests, this
+ // should hopefully not surprise us as we add cargo features over time and
+ // cargo rides the trains.
+ .env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "stable")
+ // Keeps cargo within its sandbox.
+ .env("__CARGO_TEST_DISABLE_GLOBAL_KNOWN_HOST", "1")
+ // Incremental generates a huge amount of data per test, which we
+ // don't particularly need. Tests that specifically need to check
+ // the incremental behavior should turn this back on.
+ .env("CARGO_INCREMENTAL", "0")
+ // Don't read the system git config which is out of our control.
+ .env("GIT_CONFIG_NOSYSTEM", "1")
+ .env_remove("__CARGO_DEFAULT_LIB_METADATA")
+ .env_remove("ALL_PROXY")
+ .env_remove("EMAIL")
+ .env_remove("GIT_AUTHOR_EMAIL")
+ .env_remove("GIT_AUTHOR_NAME")
+ .env_remove("GIT_COMMITTER_EMAIL")
+ .env_remove("GIT_COMMITTER_NAME")
+ .env_remove("http_proxy")
+ .env_remove("HTTPS_PROXY")
+ .env_remove("https_proxy")
+ .env_remove("MAKEFLAGS")
+ .env_remove("MFLAGS")
+ .env_remove("MSYSTEM") // assume cmd.exe everywhere on windows
+ .env_remove("RUSTC")
+ .env_remove("RUSTC_WORKSPACE_WRAPPER")
+ .env_remove("RUSTC_WRAPPER")
+ .env_remove("RUSTDOC")
+ .env_remove("RUSTDOCFLAGS")
+ .env_remove("RUSTFLAGS")
+ .env_remove("SSH_AUTH_SOCK") // ensure an outer agent is never contacted
+ .env_remove("USER") // not set on some rust-lang docker images
+ .env_remove("XDG_CONFIG_HOME"); // see #2345
+ if cfg!(target_os = "macos") {
+ // Work-around a bug in macOS 10.15, see `link_or_copy` for details.
+ self = self.env("__CARGO_COPY_DONT_LINK_DO_NOT_USE_THIS", "1");
+ }
+ if cfg!(windows) {
+ self = self.env("USERPROFILE", paths::home());
+ }
+ self
+ }
+
+ fn current_dir<S: AsRef<std::path::Path>>(self, path: S) -> Self;
+ fn env<S: AsRef<std::ffi::OsStr>>(self, key: &str, value: S) -> Self;
+ fn env_remove(self, key: &str) -> Self;
+}
+
+impl TestEnv for &mut ProcessBuilder {
+ fn current_dir<S: AsRef<std::path::Path>>(self, path: S) -> Self {
+ let path = path.as_ref();
+ self.cwd(path)
+ }
+ fn env<S: AsRef<std::ffi::OsStr>>(self, key: &str, value: S) -> Self {
+ self.env(key, value)
+ }
+ fn env_remove(self, key: &str) -> Self {
+ self.env_remove(key)
+ }
+}
+
+impl TestEnv for snapbox::cmd::Command {
+ fn current_dir<S: AsRef<std::path::Path>>(self, path: S) -> Self {
+ self.current_dir(path)
+ }
+ fn env<S: AsRef<std::ffi::OsStr>>(self, key: &str, value: S) -> Self {
+ self.env(key, value)
+ }
+ fn env_remove(self, key: &str) -> Self {
+ self.env_remove(key)
+ }
+}
+
+/// Test the cargo command
+pub trait CargoCommand {
+ fn cargo_ui() -> Self;
+}
+
+impl CargoCommand for snapbox::cmd::Command {
+ fn cargo_ui() -> Self {
+ Self::new(cargo_exe())
+ .with_assert(compare::assert_ui())
+ .test_env()
+ }
+}
+
+/// Add a list of arguments as a line
+pub trait ArgLine: Sized {
+ fn arg_line(mut self, s: &str) -> Self {
+ for mut arg in s.split_whitespace() {
+ if (arg.starts_with('"') && arg.ends_with('"'))
+ || (arg.starts_with('\'') && arg.ends_with('\''))
+ {
+ arg = &arg[1..(arg.len() - 1).max(1)];
+ } else if arg.contains(&['"', '\''][..]) {
+ panic!("shell-style argument parsing is not supported")
+ }
+ self = self.arg(arg);
+ }
+ self
+ }
+
+ fn arg<S: AsRef<std::ffi::OsStr>>(self, s: S) -> Self;
+}
+
+impl ArgLine for &mut ProcessBuilder {
+ fn arg<S: AsRef<std::ffi::OsStr>>(self, s: S) -> Self {
+ self.arg(s)
+ }
+}
+
+impl ArgLine for snapbox::cmd::Command {
+ fn arg<S: AsRef<std::ffi::OsStr>>(self, s: S) -> Self {
+ self.arg(s)
+ }
+}
+
+pub fn cargo_process(s: &str) -> Execs {
+ let cargo = cargo_exe();
+ let mut p = process(&cargo);
+ p.env("CARGO", cargo);
+ p.arg_line(s);
+ execs().with_process_builder(p)
+}
+
+pub fn git_process(s: &str) -> ProcessBuilder {
+ let mut p = process("git");
+ p.arg_line(s);
+ p
+}
+
+pub fn sleep_ms(ms: u64) {
+ ::std::thread::sleep(Duration::from_millis(ms));
+}
+
+/// Returns `true` if the local filesystem has low-resolution mtimes.
+pub fn is_coarse_mtime() -> bool {
+ // If the filetime crate is being used to emulate HFS then
+ // return `true`, without looking at the actual hardware.
+ cfg!(emulate_second_only_system) ||
+ // This should actually be a test that `$CARGO_TARGET_DIR` is on an HFS
+ // filesystem, (or any filesystem with low-resolution mtimes). However,
+ // that's tricky to detect, so for now just deal with CI.
+ cfg!(target_os = "macos") && is_ci()
+}
+
+/// Some CI setups are much slower then the equipment used by Cargo itself.
+/// Architectures that do not have a modern processor, hardware emulation, etc.
+/// This provides a way for those setups to increase the cut off for all the time based test.
+pub fn slow_cpu_multiplier(main: u64) -> Duration {
+ lazy_static::lazy_static! {
+ static ref SLOW_CPU_MULTIPLIER: u64 =
+ env::var("CARGO_TEST_SLOW_CPU_MULTIPLIER").ok().and_then(|m| m.parse().ok()).unwrap_or(1);
+ }
+ Duration::from_secs(*SLOW_CPU_MULTIPLIER * main)
+}
+
+#[cfg(windows)]
+pub fn symlink_supported() -> bool {
+ if is_ci() {
+ // We want to be absolutely sure this runs on CI.
+ return true;
+ }
+ let src = paths::root().join("symlink_src");
+ fs::write(&src, "").unwrap();
+ let dst = paths::root().join("symlink_dst");
+ let result = match os::windows::fs::symlink_file(&src, &dst) {
+ Ok(_) => {
+ fs::remove_file(&dst).unwrap();
+ true
+ }
+ Err(e) => {
+ eprintln!(
+ "symlinks not supported: {:?}\n\
+ Windows 10 users should enable developer mode.",
+ e
+ );
+ false
+ }
+ };
+ fs::remove_file(&src).unwrap();
+ return result;
+}
+
+#[cfg(not(windows))]
+pub fn symlink_supported() -> bool {
+ true
+}
+
+/// The error message for ENOENT.
+pub fn no_such_file_err_msg() -> String {
+ std::io::Error::from_raw_os_error(2).to_string()
+}
diff --git a/crates/cargo-test-support/src/paths.rs b/crates/cargo-test-support/src/paths.rs
new file mode 100644
index 0000000..ef1fddb
--- /dev/null
+++ b/crates/cargo-test-support/src/paths.rs
@@ -0,0 +1,347 @@
+use filetime::{self, FileTime};
+use lazy_static::lazy_static;
+use std::cell::RefCell;
+use std::collections::HashMap;
+use std::env;
+use std::fs;
+use std::io::{self, ErrorKind};
+use std::path::{Path, PathBuf};
+use std::process::Command;
+use std::sync::atomic::{AtomicUsize, Ordering};
+use std::sync::Mutex;
+
+static CARGO_INTEGRATION_TEST_DIR: &str = "cit";
+
+lazy_static! {
+ // TODO: Use `SyncOnceCell` when stable
+ static ref GLOBAL_ROOT: Mutex<Option<PathBuf>> = Mutex::new(None);
+
+ static ref TEST_ROOTS: Mutex<HashMap<String, PathBuf>> = Default::default();
+}
+
+/// This is used when running cargo is pre-CARGO_TARGET_TMPDIR
+/// TODO: Remove when CARGO_TARGET_TMPDIR grows old enough.
+fn global_root_legacy() -> PathBuf {
+ let mut path = t!(env::current_exe());
+ path.pop(); // chop off exe name
+ path.pop(); // chop off "deps"
+ path.push("tmp");
+ path.mkdir_p();
+ path
+}
+
+fn set_global_root(tmp_dir: Option<&'static str>) {
+ let mut lock = GLOBAL_ROOT.lock().unwrap();
+ if lock.is_none() {
+ let mut root = match tmp_dir {
+ Some(tmp_dir) => PathBuf::from(tmp_dir),
+ None => global_root_legacy(),
+ };
+
+ root.push(CARGO_INTEGRATION_TEST_DIR);
+ *lock = Some(root);
+ }
+}
+
+pub fn global_root() -> PathBuf {
+ let lock = GLOBAL_ROOT.lock().unwrap();
+ match lock.as_ref() {
+ Some(p) => p.clone(),
+ None => unreachable!("GLOBAL_ROOT not set yet"),
+ }
+}
+
+// We need to give each test a unique id. The test name could serve this
+// purpose, but the `test` crate doesn't have a way to obtain the current test
+// name.[*] Instead, we used the `cargo-test-macro` crate to automatically
+// insert an init function for each test that sets the test name in a thread
+// local variable.
+//
+// [*] It does set the thread name, but only when running concurrently. If not
+// running concurrently, all tests are run on the main thread.
+thread_local! {
+ static TEST_ID: RefCell<Option<usize>> = RefCell::new(None);
+}
+
+pub struct TestIdGuard {
+ _private: (),
+}
+
+pub fn init_root(tmp_dir: Option<&'static str>) -> TestIdGuard {
+ static NEXT_ID: AtomicUsize = AtomicUsize::new(0);
+
+ let id = NEXT_ID.fetch_add(1, Ordering::SeqCst);
+ TEST_ID.with(|n| *n.borrow_mut() = Some(id));
+
+ let guard = TestIdGuard { _private: () };
+
+ set_global_root(tmp_dir);
+ let r = root();
+ r.rm_rf();
+ r.mkdir_p();
+
+ guard
+}
+
+impl Drop for TestIdGuard {
+ fn drop(&mut self) {
+ TEST_ID.with(|n| *n.borrow_mut() = None);
+ }
+}
+
+pub fn root() -> PathBuf {
+ let id = TEST_ID.with(|n| {
+ n.borrow().expect(
+ "Tests must use the `#[cargo_test]` attribute in \
+ order to be able to use the crate root.",
+ )
+ });
+
+ let mut root = global_root();
+ root.push(&format!("t{}", id));
+ root
+}
+
+pub fn home() -> PathBuf {
+ let mut path = root();
+ path.push("home");
+ path.mkdir_p();
+ path
+}
+
+pub trait CargoPathExt {
+ fn rm_rf(&self);
+ fn mkdir_p(&self);
+
+ fn move_into_the_past(&self) {
+ self.move_in_time(|sec, nsec| (sec - 3600, nsec))
+ }
+
+ fn move_into_the_future(&self) {
+ self.move_in_time(|sec, nsec| (sec + 3600, nsec))
+ }
+
+ fn move_in_time<F>(&self, travel_amount: F)
+ where
+ F: Fn(i64, u32) -> (i64, u32);
+}
+
+impl CargoPathExt for Path {
+ fn rm_rf(&self) {
+ let meta = match self.symlink_metadata() {
+ Ok(meta) => meta,
+ Err(e) => {
+ if e.kind() == ErrorKind::NotFound {
+ return;
+ }
+ panic!("failed to remove {:?}, could not read: {:?}", self, e);
+ }
+ };
+ // There is a race condition between fetching the metadata and
+ // actually performing the removal, but we don't care all that much
+ // for our tests.
+ if meta.is_dir() {
+ if let Err(e) = fs::remove_dir_all(self) {
+ panic!("failed to remove {:?}: {:?}", self, e)
+ }
+ } else if let Err(e) = fs::remove_file(self) {
+ panic!("failed to remove {:?}: {:?}", self, e)
+ }
+ }
+
+ fn mkdir_p(&self) {
+ fs::create_dir_all(self)
+ .unwrap_or_else(|e| panic!("failed to mkdir_p {}: {}", self.display(), e))
+ }
+
+ fn move_in_time<F>(&self, travel_amount: F)
+ where
+ F: Fn(i64, u32) -> (i64, u32),
+ {
+ if self.is_file() {
+ time_travel(self, &travel_amount);
+ } else {
+ recurse(self, &self.join("target"), &travel_amount);
+ }
+
+ fn recurse<F>(p: &Path, bad: &Path, travel_amount: &F)
+ where
+ F: Fn(i64, u32) -> (i64, u32),
+ {
+ if p.is_file() {
+ time_travel(p, travel_amount)
+ } else if !p.starts_with(bad) {
+ for f in t!(fs::read_dir(p)) {
+ let f = t!(f).path();
+ recurse(&f, bad, travel_amount);
+ }
+ }
+ }
+
+ fn time_travel<F>(path: &Path, travel_amount: &F)
+ where
+ F: Fn(i64, u32) -> (i64, u32),
+ {
+ let stat = t!(path.symlink_metadata());
+
+ let mtime = FileTime::from_last_modification_time(&stat);
+
+ let (sec, nsec) = travel_amount(mtime.unix_seconds(), mtime.nanoseconds());
+ let newtime = FileTime::from_unix_time(sec, nsec);
+
+ // Sadly change_file_times has a failure mode where a readonly file
+ // cannot have its times changed on windows.
+ do_op(path, "set file times", |path| {
+ filetime::set_file_times(path, newtime, newtime)
+ });
+ }
+ }
+}
+
+fn do_op<F>(path: &Path, desc: &str, mut f: F)
+where
+ F: FnMut(&Path) -> io::Result<()>,
+{
+ match f(path) {
+ Ok(()) => {}
+ Err(ref e) if e.kind() == ErrorKind::PermissionDenied => {
+ let mut p = t!(path.metadata()).permissions();
+ p.set_readonly(false);
+ t!(fs::set_permissions(path, p));
+
+ // Unix also requires the parent to not be readonly for example when
+ // removing files
+ let parent = path.parent().unwrap();
+ let mut p = t!(parent.metadata()).permissions();
+ p.set_readonly(false);
+ t!(fs::set_permissions(parent, p));
+
+ f(path).unwrap_or_else(|e| {
+ panic!("failed to {} {}: {}", desc, path.display(), e);
+ })
+ }
+ Err(e) => {
+ panic!("failed to {} {}: {}", desc, path.display(), e);
+ }
+ }
+}
+
+/// Get the filename for a library.
+///
+/// `kind` should be one of: "lib", "rlib", "staticlib", "dylib", "proc-macro"
+///
+/// For example, dynamic library named "foo" would return:
+/// - macOS: "libfoo.dylib"
+/// - Windows: "foo.dll"
+/// - Unix: "libfoo.so"
+pub fn get_lib_filename(name: &str, kind: &str) -> String {
+ let prefix = get_lib_prefix(kind);
+ let extension = get_lib_extension(kind);
+ format!("{}{}.{}", prefix, name, extension)
+}
+
+pub fn get_lib_prefix(kind: &str) -> &str {
+ match kind {
+ "lib" | "rlib" => "lib",
+ "staticlib" | "dylib" | "proc-macro" => {
+ if cfg!(windows) {
+ ""
+ } else {
+ "lib"
+ }
+ }
+ _ => unreachable!(),
+ }
+}
+
+pub fn get_lib_extension(kind: &str) -> &str {
+ match kind {
+ "lib" | "rlib" => "rlib",
+ "staticlib" => {
+ if cfg!(windows) {
+ "lib"
+ } else {
+ "a"
+ }
+ }
+ "dylib" | "proc-macro" => {
+ if cfg!(windows) {
+ "dll"
+ } else if cfg!(target_os = "macos") {
+ "dylib"
+ } else {
+ "so"
+ }
+ }
+ _ => unreachable!(),
+ }
+}
+
+/// Returns the sysroot as queried from rustc.
+pub fn sysroot() -> String {
+ let output = Command::new("rustc")
+ .arg("--print=sysroot")
+ .output()
+ .expect("rustc to run");
+ assert!(output.status.success());
+ let sysroot = String::from_utf8(output.stdout).unwrap();
+ sysroot.trim().to_string()
+}
+
+/// Returns true if names such as aux.* are allowed.
+///
+/// Traditionally, Windows did not allow a set of file names (see `is_windows_reserved`
+/// for a list). More recent versions of Windows have relaxed this restriction. This test
+/// determines whether we are running in a mode that allows Windows reserved names.
+#[cfg(windows)]
+pub fn windows_reserved_names_are_allowed() -> bool {
+ use cargo_util::is_ci;
+
+ // Ensure tests still run in CI until we need to migrate.
+ if is_ci() {
+ return false;
+ }
+
+ use std::ffi::OsStr;
+ use std::os::windows::ffi::OsStrExt;
+ use std::ptr;
+ use windows_sys::Win32::Storage::FileSystem::GetFullPathNameW;
+
+ let test_file_name: Vec<_> = OsStr::new("aux.rs").encode_wide().collect();
+
+ let buffer_length =
+ unsafe { GetFullPathNameW(test_file_name.as_ptr(), 0, ptr::null_mut(), ptr::null_mut()) };
+
+ if buffer_length == 0 {
+ // This means the call failed, so we'll conservatively assume reserved names are not allowed.
+ return false;
+ }
+
+ let mut buffer = vec![0u16; buffer_length as usize];
+
+ let result = unsafe {
+ GetFullPathNameW(
+ test_file_name.as_ptr(),
+ buffer_length,
+ buffer.as_mut_ptr(),
+ ptr::null_mut(),
+ )
+ };
+
+ if result == 0 {
+ // Once again, conservatively assume reserved names are not allowed if the
+ // GetFullPathNameW call failed.
+ return false;
+ }
+
+ // Under the old rules, a file name like aux.rs would get converted into \\.\aux, so
+ // we detect this case by checking if the string starts with \\.\
+ //
+ // Otherwise, the filename will be something like C:\Users\Foo\Documents\aux.rs
+ let prefix: Vec<_> = OsStr::new("\\\\.\\").encode_wide().collect();
+ if buffer.starts_with(&prefix) {
+ false
+ } else {
+ true
+ }
+}
diff --git a/crates/cargo-test-support/src/publish.rs b/crates/cargo-test-support/src/publish.rs
new file mode 100644
index 0000000..85bc93c
--- /dev/null
+++ b/crates/cargo-test-support/src/publish.rs
@@ -0,0 +1,245 @@
+use crate::compare::{assert_match_exact, find_json_mismatch};
+use crate::registry::{self, alt_api_path, FeatureMap};
+use flate2::read::GzDecoder;
+use std::collections::{HashMap, HashSet};
+use std::fs;
+use std::fs::File;
+use std::io::{self, prelude::*, SeekFrom};
+use std::path::{Path, PathBuf};
+use tar::Archive;
+
+fn read_le_u32<R>(mut reader: R) -> io::Result<u32>
+where
+ R: Read,
+{
+ let mut buf = [0; 4];
+ reader.read_exact(&mut buf)?;
+ Ok(u32::from_le_bytes(buf))
+}
+
+/// Checks the result of a crate publish.
+pub fn validate_upload(expected_json: &str, expected_crate_name: &str, expected_files: &[&str]) {
+ let new_path = registry::api_path().join("api/v1/crates/new");
+ _validate_upload(
+ &new_path,
+ expected_json,
+ expected_crate_name,
+ expected_files,
+ &[],
+ );
+}
+
+/// Checks the result of a crate publish, along with the contents of the files.
+pub fn validate_upload_with_contents(
+ expected_json: &str,
+ expected_crate_name: &str,
+ expected_files: &[&str],
+ expected_contents: &[(&str, &str)],
+) {
+ let new_path = registry::api_path().join("api/v1/crates/new");
+ _validate_upload(
+ &new_path,
+ expected_json,
+ expected_crate_name,
+ expected_files,
+ expected_contents,
+ );
+}
+
+/// Checks the result of a crate publish to an alternative registry.
+pub fn validate_alt_upload(
+ expected_json: &str,
+ expected_crate_name: &str,
+ expected_files: &[&str],
+) {
+ let new_path = alt_api_path().join("api/v1/crates/new");
+ _validate_upload(
+ &new_path,
+ expected_json,
+ expected_crate_name,
+ expected_files,
+ &[],
+ );
+}
+
+fn _validate_upload(
+ new_path: &Path,
+ expected_json: &str,
+ expected_crate_name: &str,
+ expected_files: &[&str],
+ expected_contents: &[(&str, &str)],
+) {
+ let mut f = File::open(new_path).unwrap();
+ // 32-bit little-endian integer of length of JSON data.
+ let json_sz = read_le_u32(&mut f).expect("read json length");
+ let mut json_bytes = vec![0; json_sz as usize];
+ f.read_exact(&mut json_bytes).expect("read JSON data");
+ let actual_json = serde_json::from_slice(&json_bytes).expect("uploaded JSON should be valid");
+ let expected_json = serde_json::from_str(expected_json).expect("expected JSON does not parse");
+
+ if let Err(e) = find_json_mismatch(&expected_json, &actual_json, None) {
+ panic!("{}", e);
+ }
+
+ // 32-bit little-endian integer of length of crate file.
+ let crate_sz = read_le_u32(&mut f).expect("read crate length");
+ let mut krate_bytes = vec![0; crate_sz as usize];
+ f.read_exact(&mut krate_bytes).expect("read crate data");
+ // Check at end.
+ let current = f.seek(SeekFrom::Current(0)).unwrap();
+ assert_eq!(f.seek(SeekFrom::End(0)).unwrap(), current);
+
+ // Verify the tarball.
+ validate_crate_contents(
+ &krate_bytes[..],
+ expected_crate_name,
+ expected_files,
+ expected_contents,
+ );
+}
+
+/// Checks the contents of a `.crate` file.
+///
+/// - `expected_crate_name` should be something like `foo-0.0.1.crate`.
+/// - `expected_files` should be a complete list of files in the crate
+/// (relative to expected_crate_name).
+/// - `expected_contents` should be a list of `(file_name, contents)` tuples
+/// to validate the contents of the given file. Only the listed files will
+/// be checked (others will be ignored).
+pub fn validate_crate_contents(
+ reader: impl Read,
+ expected_crate_name: &str,
+ expected_files: &[&str],
+ expected_contents: &[(&str, &str)],
+) {
+ let mut rdr = GzDecoder::new(reader);
+ assert_eq!(
+ rdr.header().unwrap().filename().unwrap(),
+ expected_crate_name.as_bytes()
+ );
+ let mut contents = Vec::new();
+ rdr.read_to_end(&mut contents).unwrap();
+ let mut ar = Archive::new(&contents[..]);
+ let files: HashMap<PathBuf, String> = ar
+ .entries()
+ .unwrap()
+ .map(|entry| {
+ let mut entry = entry.unwrap();
+ let name = entry.path().unwrap().into_owned();
+ let mut contents = String::new();
+ entry.read_to_string(&mut contents).unwrap();
+ (name, contents)
+ })
+ .collect();
+ assert!(expected_crate_name.ends_with(".crate"));
+ let base_crate_name = Path::new(&expected_crate_name[..expected_crate_name.len() - 6]);
+ let actual_files: HashSet<PathBuf> = files.keys().cloned().collect();
+ let expected_files: HashSet<PathBuf> = expected_files
+ .iter()
+ .map(|name| base_crate_name.join(name))
+ .collect();
+ let missing: Vec<&PathBuf> = expected_files.difference(&actual_files).collect();
+ let extra: Vec<&PathBuf> = actual_files.difference(&expected_files).collect();
+ if !missing.is_empty() || !extra.is_empty() {
+ panic!(
+ "uploaded archive does not match.\nMissing: {:?}\nExtra: {:?}\n",
+ missing, extra
+ );
+ }
+ if !expected_contents.is_empty() {
+ for (e_file_name, e_file_contents) in expected_contents {
+ let full_e_name = base_crate_name.join(e_file_name);
+ let actual_contents = files
+ .get(&full_e_name)
+ .unwrap_or_else(|| panic!("file `{}` missing in archive", e_file_name));
+ assert_match_exact(e_file_contents, actual_contents);
+ }
+ }
+}
+
+pub(crate) fn create_index_line(
+ name: serde_json::Value,
+ vers: &str,
+ deps: Vec<serde_json::Value>,
+ cksum: &str,
+ features: crate::registry::FeatureMap,
+ yanked: bool,
+ links: Option<String>,
+ v: Option<u32>,
+) -> String {
+ // This emulates what crates.io does to retain backwards compatibility.
+ let (features, features2) = split_index_features(features.clone());
+ let mut json = serde_json::json!({
+ "name": name,
+ "vers": vers,
+ "deps": deps,
+ "cksum": cksum,
+ "features": features,
+ "yanked": yanked,
+ "links": links,
+ });
+ if let Some(f2) = &features2 {
+ json["features2"] = serde_json::json!(f2);
+ json["v"] = serde_json::json!(2);
+ }
+ if let Some(v) = v {
+ json["v"] = serde_json::json!(v);
+ }
+
+ json.to_string()
+}
+
+pub(crate) fn write_to_index(registry_path: &PathBuf, name: &str, line: String, local: bool) {
+ let file = cargo_util::registry::make_dep_path(name, false);
+
+ // Write file/line in the index.
+ let dst = if local {
+ registry_path.join("index").join(&file)
+ } else {
+ registry_path.join(&file)
+ };
+ let prev = fs::read_to_string(&dst).unwrap_or_default();
+ t!(fs::create_dir_all(dst.parent().unwrap()));
+ t!(fs::write(&dst, prev + &line[..] + "\n"));
+
+ // Add the new file to the index.
+ if !local {
+ let repo = t!(git2::Repository::open(&registry_path));
+ let mut index = t!(repo.index());
+ t!(index.add_path(Path::new(&file)));
+ t!(index.write());
+ let id = t!(index.write_tree());
+
+ // Commit this change.
+ let tree = t!(repo.find_tree(id));
+ let sig = t!(repo.signature());
+ let parent = t!(repo.refname_to_id("refs/heads/master"));
+ let parent = t!(repo.find_commit(parent));
+ t!(repo.commit(
+ Some("HEAD"),
+ &sig,
+ &sig,
+ "Another commit",
+ &tree,
+ &[&parent]
+ ));
+ }
+}
+
+fn split_index_features(mut features: FeatureMap) -> (FeatureMap, Option<FeatureMap>) {
+ let mut features2 = FeatureMap::new();
+ for (feat, values) in features.iter_mut() {
+ if values
+ .iter()
+ .any(|value| value.starts_with("dep:") || value.contains("?/"))
+ {
+ let new_values = values.drain(..).collect();
+ features2.insert(feat.clone(), new_values);
+ }
+ }
+ if features2.is_empty() {
+ (features, None)
+ } else {
+ (features, Some(features2))
+ }
+}
diff --git a/crates/cargo-test-support/src/registry.rs b/crates/cargo-test-support/src/registry.rs
new file mode 100644
index 0000000..7b1dc54
--- /dev/null
+++ b/crates/cargo-test-support/src/registry.rs
@@ -0,0 +1,1581 @@
+use crate::git::repo;
+use crate::paths;
+use crate::publish::{create_index_line, write_to_index};
+use cargo_util::paths::append;
+use cargo_util::Sha256;
+use flate2::write::GzEncoder;
+use flate2::Compression;
+use pasetors::keys::{AsymmetricPublicKey, AsymmetricSecretKey};
+use pasetors::paserk::FormatAsPaserk;
+use pasetors::token::UntrustedToken;
+use std::collections::{BTreeMap, HashMap};
+use std::fmt;
+use std::fs::{self, File};
+use std::io::{BufRead, BufReader, Read, Write};
+use std::net::{SocketAddr, TcpListener, TcpStream};
+use std::path::PathBuf;
+use std::thread::{self, JoinHandle};
+use tar::{Builder, Header};
+use time::format_description::well_known::Rfc3339;
+use time::{Duration, OffsetDateTime};
+use url::Url;
+
+/// Gets the path to the local index pretending to be crates.io. This is a Git repo
+/// initialized with a `config.json` file pointing to `dl_path` for downloads
+/// and `api_path` for uploads.
+pub fn registry_path() -> PathBuf {
+ generate_path("registry")
+}
+/// Gets the path for local web API uploads. Cargo will place the contents of a web API
+/// request here. For example, `api/v1/crates/new` is the result of publishing a crate.
+pub fn api_path() -> PathBuf {
+ generate_path("api")
+}
+/// Gets the path where crates can be downloaded using the web API endpoint. Crates
+/// should be organized as `{name}/{version}/download` to match the web API
+/// endpoint. This is rarely used and must be manually set up.
+fn dl_path() -> PathBuf {
+ generate_path("dl")
+}
+/// Gets the alternative-registry version of `registry_path`.
+fn alt_registry_path() -> PathBuf {
+ generate_path("alternative-registry")
+}
+/// Gets the alternative-registry version of `registry_url`.
+fn alt_registry_url() -> Url {
+ generate_url("alternative-registry")
+}
+/// Gets the alternative-registry version of `dl_path`.
+pub fn alt_dl_path() -> PathBuf {
+ generate_path("alternative-dl")
+}
+/// Gets the alternative-registry version of `api_path`.
+pub fn alt_api_path() -> PathBuf {
+ generate_path("alternative-api")
+}
+fn generate_path(name: &str) -> PathBuf {
+ paths::root().join(name)
+}
+fn generate_url(name: &str) -> Url {
+ Url::from_file_path(generate_path(name)).ok().unwrap()
+}
+
+#[derive(Clone)]
+pub enum Token {
+ Plaintext(String),
+ Keys(String, Option<String>),
+}
+
+impl Token {
+ /// This is a valid PASETO secret key.
+ /// This one is already publicly available as part of the text of the RFC so is safe to use for tests.
+ pub fn rfc_key() -> Token {
+ Token::Keys(
+ "k3.secret.fNYVuMvBgOlljt9TDohnaYLblghqaHoQquVZwgR6X12cBFHZLFsaU3q7X3k1Zn36"
+ .to_string(),
+ Some("sub".to_string()),
+ )
+ }
+}
+
+/// A builder for initializing registries.
+pub struct RegistryBuilder {
+ /// If set, configures an alternate registry with the given name.
+ alternative: Option<String>,
+ /// The authorization token for the registry.
+ token: Option<Token>,
+ /// If set, the registry requires authorization for all operations.
+ auth_required: bool,
+ /// If set, serves the index over http.
+ http_index: bool,
+ /// If set, serves the API over http.
+ http_api: bool,
+ /// If set, config.json includes 'api'
+ api: bool,
+ /// Write the token in the configuration.
+ configure_token: bool,
+ /// Write the registry in configuration.
+ configure_registry: bool,
+ /// API responders.
+ custom_responders: HashMap<&'static str, Box<dyn Send + Fn(&Request, &HttpServer) -> Response>>,
+}
+
+pub struct TestRegistry {
+ server: Option<HttpServerHandle>,
+ index_url: Url,
+ path: PathBuf,
+ api_url: Url,
+ dl_url: Url,
+ token: Token,
+}
+
+impl TestRegistry {
+ pub fn index_url(&self) -> &Url {
+ &self.index_url
+ }
+
+ pub fn api_url(&self) -> &Url {
+ &self.api_url
+ }
+
+ pub fn token(&self) -> &str {
+ match &self.token {
+ Token::Plaintext(s) => s,
+ Token::Keys(_, _) => panic!("registry was not configured with a plaintext token"),
+ }
+ }
+
+ pub fn key(&self) -> &str {
+ match &self.token {
+ Token::Plaintext(_) => panic!("registry was not configured with a secret key"),
+ Token::Keys(s, _) => s,
+ }
+ }
+
+ /// Shutdown the server thread and wait for it to stop.
+ /// `Drop` automatically stops the server, but this additionally
+ /// waits for the thread to stop.
+ pub fn join(self) {
+ if let Some(mut server) = self.server {
+ server.stop();
+ let handle = server.handle.take().unwrap();
+ handle.join().unwrap();
+ }
+ }
+}
+
+impl RegistryBuilder {
+ #[must_use]
+ pub fn new() -> RegistryBuilder {
+ RegistryBuilder {
+ alternative: None,
+ token: None,
+ auth_required: false,
+ http_api: false,
+ http_index: false,
+ api: true,
+ configure_registry: true,
+ configure_token: true,
+ custom_responders: HashMap::new(),
+ }
+ }
+
+ /// Adds a custom HTTP response for a specific url
+ #[must_use]
+ pub fn add_responder<R: 'static + Send + Fn(&Request, &HttpServer) -> Response>(
+ mut self,
+ url: &'static str,
+ responder: R,
+ ) -> Self {
+ self.custom_responders.insert(url, Box::new(responder));
+ self
+ }
+
+ /// Sets whether or not to initialize as an alternative registry.
+ #[must_use]
+ pub fn alternative_named(mut self, alt: &str) -> Self {
+ self.alternative = Some(alt.to_string());
+ self
+ }
+
+ /// Sets whether or not to initialize as an alternative registry.
+ #[must_use]
+ pub fn alternative(self) -> Self {
+ self.alternative_named("alternative")
+ }
+
+ /// Prevents placing a token in the configuration
+ #[must_use]
+ pub fn no_configure_token(mut self) -> Self {
+ self.configure_token = false;
+ self
+ }
+
+ /// Prevents adding the registry to the configuration.
+ #[must_use]
+ pub fn no_configure_registry(mut self) -> Self {
+ self.configure_registry = false;
+ self
+ }
+
+ /// Sets the token value
+ #[must_use]
+ pub fn token(mut self, token: Token) -> Self {
+ self.token = Some(token);
+ self
+ }
+
+ /// Sets this registry to require the authentication token for
+ /// all operations.
+ #[must_use]
+ pub fn auth_required(mut self) -> Self {
+ self.auth_required = true;
+ self
+ }
+
+ /// Operate the index over http
+ #[must_use]
+ pub fn http_index(mut self) -> Self {
+ self.http_index = true;
+ self
+ }
+
+ /// Operate the api over http
+ #[must_use]
+ pub fn http_api(mut self) -> Self {
+ self.http_api = true;
+ self
+ }
+
+ /// The registry has no api.
+ #[must_use]
+ pub fn no_api(mut self) -> Self {
+ self.api = false;
+ self
+ }
+
+ /// Initializes the registry.
+ #[must_use]
+ pub fn build(self) -> TestRegistry {
+ let config_path = paths::home().join(".cargo/config");
+ t!(fs::create_dir_all(config_path.parent().unwrap()));
+ let prefix = if let Some(alternative) = &self.alternative {
+ format!("{alternative}-")
+ } else {
+ String::new()
+ };
+ let registry_path = generate_path(&format!("{prefix}registry"));
+ let index_url = generate_url(&format!("{prefix}registry"));
+ let api_url = generate_url(&format!("{prefix}api"));
+ let dl_url = generate_url(&format!("{prefix}dl"));
+ let dl_path = generate_path(&format!("{prefix}dl"));
+ let api_path = generate_path(&format!("{prefix}api"));
+ let token = self
+ .token
+ .unwrap_or_else(|| Token::Plaintext(format!("{prefix}sekrit")));
+
+ let (server, index_url, api_url, dl_url) = if !self.http_index && !self.http_api {
+ // No need to start the HTTP server.
+ (None, index_url, api_url, dl_url)
+ } else {
+ let server = HttpServer::new(
+ registry_path.clone(),
+ dl_path,
+ api_path.clone(),
+ token.clone(),
+ self.auth_required,
+ self.custom_responders,
+ );
+ let index_url = if self.http_index {
+ server.index_url()
+ } else {
+ index_url
+ };
+ let api_url = if self.http_api {
+ server.api_url()
+ } else {
+ api_url
+ };
+ let dl_url = server.dl_url();
+ (Some(server), index_url, api_url, dl_url)
+ };
+
+ let registry = TestRegistry {
+ api_url,
+ index_url,
+ server,
+ dl_url,
+ path: registry_path,
+ token,
+ };
+
+ if self.configure_registry {
+ if let Some(alternative) = &self.alternative {
+ append(
+ &config_path,
+ format!(
+ "
+ [registries.{alternative}]
+ index = '{}'",
+ registry.index_url
+ )
+ .as_bytes(),
+ )
+ .unwrap();
+ } else {
+ append(
+ &config_path,
+ format!(
+ "
+ [source.crates-io]
+ replace-with = 'dummy-registry'
+
+ [registries.dummy-registry]
+ index = '{}'",
+ registry.index_url
+ )
+ .as_bytes(),
+ )
+ .unwrap();
+ }
+ }
+
+ if self.configure_token {
+ let credentials = paths::home().join(".cargo/credentials.toml");
+ match &registry.token {
+ Token::Plaintext(token) => {
+ if let Some(alternative) = &self.alternative {
+ append(
+ &credentials,
+ format!(
+ r#"
+ [registries.{alternative}]
+ token = "{token}"
+ "#
+ )
+ .as_bytes(),
+ )
+ .unwrap();
+ } else {
+ append(
+ &credentials,
+ format!(
+ r#"
+ [registry]
+ token = "{token}"
+ "#
+ )
+ .as_bytes(),
+ )
+ .unwrap();
+ }
+ }
+ Token::Keys(key, subject) => {
+ let mut out = if let Some(alternative) = &self.alternative {
+ format!("\n[registries.{alternative}]\n")
+ } else {
+ format!("\n[registry]\n")
+ };
+ out += &format!("secret-key = \"{key}\"\n");
+ if let Some(subject) = subject {
+ out += &format!("secret-key-subject = \"{subject}\"\n");
+ }
+
+ append(&credentials, out.as_bytes()).unwrap();
+ }
+ }
+ }
+
+ let auth = if self.auth_required {
+ r#","auth-required":true"#
+ } else {
+ ""
+ };
+ let api = if self.api {
+ format!(r#","api":"{}""#, registry.api_url)
+ } else {
+ String::new()
+ };
+ // Initialize a new registry.
+ repo(&registry.path)
+ .file(
+ "config.json",
+ &format!(r#"{{"dl":"{}"{api}{auth}}}"#, registry.dl_url),
+ )
+ .build();
+ fs::create_dir_all(api_path.join("api/v1/crates")).unwrap();
+
+ registry
+ }
+}
+
+/// A builder for creating a new package in a registry.
+///
+/// This uses "source replacement" using an automatically generated
+/// `.cargo/config` file to ensure that dependencies will use these packages
+/// instead of contacting crates.io. See `source-replacement.md` for more
+/// details on how source replacement works.
+///
+/// Call `publish` to finalize and create the package.
+///
+/// If no files are specified, an empty `lib.rs` file is automatically created.
+///
+/// The `Cargo.toml` file is automatically generated based on the methods
+/// called on `Package` (for example, calling `dep()` will add to the
+/// `[dependencies]` automatically). You may also specify a `Cargo.toml` file
+/// to override the generated one.
+///
+/// This supports different registry types:
+/// - Regular source replacement that replaces `crates.io` (the default).
+/// - A "local registry" which is a subset for vendoring (see
+/// `Package::local`).
+/// - An "alternative registry" which requires specifying the registry name
+/// (see `Package::alternative`).
+///
+/// This does not support "directory sources". See `directory.rs` for
+/// `VendorPackage` which implements directory sources.
+///
+/// # Example
+/// ```
+/// // Publish package "a" depending on "b".
+/// Package::new("a", "1.0.0")
+/// .dep("b", "1.0.0")
+/// .file("src/lib.rs", r#"
+/// extern crate b;
+/// pub fn f() -> i32 { b::f() * 2 }
+/// "#)
+/// .publish();
+///
+/// // Publish package "b".
+/// Package::new("b", "1.0.0")
+/// .file("src/lib.rs", r#"
+/// pub fn f() -> i32 { 12 }
+/// "#)
+/// .publish();
+///
+/// // Create a project that uses package "a".
+/// let p = project()
+/// .file("Cargo.toml", r#"
+/// [package]
+/// name = "foo"
+/// version = "0.0.1"
+///
+/// [dependencies]
+/// a = "1.0"
+/// "#)
+/// .file("src/main.rs", r#"
+/// extern crate a;
+/// fn main() { println!("{}", a::f()); }
+/// "#)
+/// .build();
+///
+/// p.cargo("run").with_stdout("24").run();
+/// ```
+#[must_use]
+pub struct Package {
+ name: String,
+ vers: String,
+ deps: Vec<Dependency>,
+ files: Vec<PackageFile>,
+ yanked: bool,
+ features: FeatureMap,
+ local: bool,
+ alternative: bool,
+ invalid_json: bool,
+ proc_macro: bool,
+ links: Option<String>,
+ rust_version: Option<String>,
+ cargo_features: Vec<String>,
+ v: Option<u32>,
+}
+
+pub(crate) type FeatureMap = BTreeMap<String, Vec<String>>;
+
+#[derive(Clone)]
+pub struct Dependency {
+ name: String,
+ vers: String,
+ kind: String,
+ artifact: Option<(String, Option<String>)>,
+ target: Option<String>,
+ features: Vec<String>,
+ registry: Option<String>,
+ package: Option<String>,
+ optional: bool,
+}
+
+/// Entry with data that corresponds to [`tar::EntryType`].
+#[non_exhaustive]
+enum EntryData {
+ Regular(String),
+ Symlink(PathBuf),
+}
+
+/// A file to be created in a package.
+struct PackageFile {
+ path: String,
+ contents: EntryData,
+ /// The Unix mode for the file. Note that when extracted on Windows, this
+ /// is mostly ignored since it doesn't have the same style of permissions.
+ mode: u32,
+ /// If `true`, the file is created in the root of the tarfile, used for
+ /// testing invalid packages.
+ extra: bool,
+}
+
+const DEFAULT_MODE: u32 = 0o644;
+
+/// Initializes the on-disk registry and sets up the config so that crates.io
+/// is replaced with the one on disk.
+pub fn init() -> TestRegistry {
+ RegistryBuilder::new().build()
+}
+
+/// Variant of `init` that initializes the "alternative" registry and crates.io
+/// replacement.
+pub fn alt_init() -> TestRegistry {
+ init();
+ RegistryBuilder::new().alternative().build()
+}
+
+pub struct HttpServerHandle {
+ addr: SocketAddr,
+ handle: Option<JoinHandle<()>>,
+}
+
+impl HttpServerHandle {
+ pub fn index_url(&self) -> Url {
+ Url::parse(&format!("sparse+http://{}/index/", self.addr.to_string())).unwrap()
+ }
+
+ pub fn api_url(&self) -> Url {
+ Url::parse(&format!("http://{}/", self.addr.to_string())).unwrap()
+ }
+
+ pub fn dl_url(&self) -> Url {
+ Url::parse(&format!("http://{}/dl", self.addr.to_string())).unwrap()
+ }
+
+ fn stop(&self) {
+ if let Ok(mut stream) = TcpStream::connect(self.addr) {
+ // shutdown the server
+ let _ = stream.write_all(b"stop");
+ let _ = stream.flush();
+ }
+ }
+}
+
+impl Drop for HttpServerHandle {
+ fn drop(&mut self) {
+ self.stop();
+ }
+}
+
+/// Request to the test http server
+#[derive(Clone)]
+pub struct Request {
+ pub url: Url,
+ pub method: String,
+ pub body: Option<Vec<u8>>,
+ pub authorization: Option<String>,
+ pub if_modified_since: Option<String>,
+ pub if_none_match: Option<String>,
+}
+
+impl fmt::Debug for Request {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ // body is not included as it can produce long debug outputs
+ f.debug_struct("Request")
+ .field("url", &self.url)
+ .field("method", &self.method)
+ .field("authorization", &self.authorization)
+ .field("if_modified_since", &self.if_modified_since)
+ .field("if_none_match", &self.if_none_match)
+ .finish()
+ }
+}
+
+/// Response from the test http server
+pub struct Response {
+ pub code: u32,
+ pub headers: Vec<String>,
+ pub body: Vec<u8>,
+}
+
+pub struct HttpServer {
+ listener: TcpListener,
+ registry_path: PathBuf,
+ dl_path: PathBuf,
+ api_path: PathBuf,
+ addr: SocketAddr,
+ token: Token,
+ auth_required: bool,
+ custom_responders: HashMap<&'static str, Box<dyn Send + Fn(&Request, &HttpServer) -> Response>>,
+}
+
+/// A helper struct that collects the arguments for [HttpServer::check_authorized].
+/// Based on looking at the request, these are the fields that the authentication header should attest to.
+pub struct Mutation<'a> {
+ pub mutation: &'a str,
+ pub name: Option<&'a str>,
+ pub vers: Option<&'a str>,
+ pub cksum: Option<&'a str>,
+}
+
+impl HttpServer {
+ pub fn new(
+ registry_path: PathBuf,
+ dl_path: PathBuf,
+ api_path: PathBuf,
+ token: Token,
+ auth_required: bool,
+ api_responders: HashMap<
+ &'static str,
+ Box<dyn Send + Fn(&Request, &HttpServer) -> Response>,
+ >,
+ ) -> HttpServerHandle {
+ let listener = TcpListener::bind("127.0.0.1:0").unwrap();
+ let addr = listener.local_addr().unwrap();
+ let server = HttpServer {
+ listener,
+ registry_path,
+ dl_path,
+ api_path,
+ addr,
+ token,
+ auth_required,
+ custom_responders: api_responders,
+ };
+ let handle = Some(thread::spawn(move || server.start()));
+ HttpServerHandle { addr, handle }
+ }
+
+ fn start(&self) {
+ let mut line = String::new();
+ 'server: loop {
+ let (socket, _) = self.listener.accept().unwrap();
+ let mut buf = BufReader::new(socket);
+ line.clear();
+ if buf.read_line(&mut line).unwrap() == 0 {
+ // Connection terminated.
+ continue;
+ }
+ // Read the "GET path HTTP/1.1" line.
+ let mut parts = line.split_ascii_whitespace();
+ let method = parts.next().unwrap().to_ascii_lowercase();
+ if method == "stop" {
+ // Shutdown the server.
+ return;
+ }
+ let addr = self.listener.local_addr().unwrap();
+ let url = format!(
+ "http://{}/{}",
+ addr,
+ parts.next().unwrap().trim_start_matches('/')
+ );
+ let url = Url::parse(&url).unwrap();
+
+ // Grab headers we care about.
+ let mut if_modified_since = None;
+ let mut if_none_match = None;
+ let mut authorization = None;
+ let mut content_len = None;
+ loop {
+ line.clear();
+ if buf.read_line(&mut line).unwrap() == 0 {
+ continue 'server;
+ }
+ if line == "\r\n" {
+ // End of headers.
+ line.clear();
+ break;
+ }
+ let (name, value) = line.split_once(':').unwrap();
+ let name = name.trim().to_ascii_lowercase();
+ let value = value.trim().to_string();
+ match name.as_str() {
+ "if-modified-since" => if_modified_since = Some(value),
+ "if-none-match" => if_none_match = Some(value),
+ "authorization" => authorization = Some(value),
+ "content-length" => content_len = Some(value),
+ _ => {}
+ }
+ }
+
+ let mut body = None;
+ if let Some(con_len) = content_len {
+ let len = con_len.parse::<u64>().unwrap();
+ let mut content = vec![0u8; len as usize];
+ buf.read_exact(&mut content).unwrap();
+ body = Some(content)
+ }
+
+ let req = Request {
+ authorization,
+ if_modified_since,
+ if_none_match,
+ method,
+ url,
+ body,
+ };
+ println!("req: {:#?}", req);
+ let response = self.route(&req);
+ let buf = buf.get_mut();
+ write!(buf, "HTTP/1.1 {}\r\n", response.code).unwrap();
+ write!(buf, "Content-Length: {}\r\n", response.body.len()).unwrap();
+ for header in response.headers {
+ write!(buf, "{}\r\n", header).unwrap();
+ }
+ write!(buf, "\r\n").unwrap();
+ buf.write_all(&response.body).unwrap();
+ buf.flush().unwrap();
+ }
+ }
+
+ fn check_authorized(&self, req: &Request, mutation: Option<Mutation>) -> bool {
+ let (private_key, private_key_subject) = if mutation.is_some() || self.auth_required {
+ match &self.token {
+ Token::Plaintext(token) => return Some(token) == req.authorization.as_ref(),
+ Token::Keys(private_key, private_key_subject) => {
+ (private_key.as_str(), private_key_subject)
+ }
+ }
+ } else {
+ assert!(req.authorization.is_none(), "unexpected token");
+ return true;
+ };
+
+ macro_rules! t {
+ ($e:expr) => {
+ match $e {
+ Some(e) => e,
+ None => return false,
+ }
+ };
+ }
+
+ let secret: AsymmetricSecretKey<pasetors::version3::V3> = private_key.try_into().unwrap();
+ let public: AsymmetricPublicKey<pasetors::version3::V3> = (&secret).try_into().unwrap();
+ let pub_key_id: pasetors::paserk::Id = (&public).into();
+ let mut paserk_pub_key_id = String::new();
+ FormatAsPaserk::fmt(&pub_key_id, &mut paserk_pub_key_id).unwrap();
+ // https://github.com/rust-lang/rfcs/blob/master/text/3231-cargo-asymmetric-tokens.md#how-the-registry-server-will-validate-an-asymmetric-token
+
+ // - The PASETO is in v3.public format.
+ let authorization = t!(&req.authorization);
+ let untrusted_token = t!(
+ UntrustedToken::<pasetors::Public, pasetors::version3::V3>::try_from(authorization)
+ .ok()
+ );
+
+ // - The PASETO validates using the public key it looked up based on the key ID.
+ #[derive(serde::Deserialize, Debug)]
+ struct Footer<'a> {
+ url: &'a str,
+ kip: &'a str,
+ }
+ let footer: Footer = t!(serde_json::from_slice(untrusted_token.untrusted_footer()).ok());
+ if footer.kip != paserk_pub_key_id {
+ return false;
+ }
+ let trusted_token =
+ t!(
+ pasetors::version3::PublicToken::verify(&public, &untrusted_token, None, None,)
+ .ok()
+ );
+
+ // - The URL matches the registry base URL
+ if footer.url != "https://github.com/rust-lang/crates.io-index"
+ && footer.url != &format!("sparse+http://{}/index/", self.addr.to_string())
+ {
+ dbg!(footer.url);
+ return false;
+ }
+
+ // - The PASETO is still within its valid time period.
+ #[derive(serde::Deserialize)]
+ struct Message<'a> {
+ iat: &'a str,
+ sub: Option<&'a str>,
+ mutation: Option<&'a str>,
+ name: Option<&'a str>,
+ vers: Option<&'a str>,
+ cksum: Option<&'a str>,
+ _challenge: Option<&'a str>, // todo: PASETO with challenges
+ v: Option<u8>,
+ }
+ let message: Message = t!(serde_json::from_str(trusted_token.payload()).ok());
+ let token_time = t!(OffsetDateTime::parse(message.iat, &Rfc3339).ok());
+ let now = OffsetDateTime::now_utc();
+ if (now - token_time) > Duration::MINUTE {
+ return false;
+ }
+ if private_key_subject.as_deref() != message.sub {
+ dbg!(message.sub);
+ return false;
+ }
+ // - If the claim v is set, that it has the value of 1.
+ if let Some(v) = message.v {
+ if v != 1 {
+ dbg!(message.v);
+ return false;
+ }
+ }
+ // - If the server issues challenges, that the challenge has not yet been answered.
+ // todo: PASETO with challenges
+ // - If the operation is a mutation:
+ if let Some(mutation) = mutation {
+ // - That the operation matches the mutation field and is one of publish, yank, or unyank.
+ if message.mutation != Some(mutation.mutation) {
+ dbg!(message.mutation);
+ return false;
+ }
+ // - That the package, and version match the request.
+ if message.name != mutation.name {
+ dbg!(message.name);
+ return false;
+ }
+ if message.vers != mutation.vers {
+ dbg!(message.vers);
+ return false;
+ }
+ // - If the mutation is publish, that the version has not already been published, and that the hash matches the request.
+ if mutation.mutation == "publish" {
+ if message.cksum != mutation.cksum {
+ dbg!(message.cksum);
+ return false;
+ }
+ }
+ } else {
+ // - If the operation is a read, that the mutation field is not set.
+ if message.mutation.is_some()
+ || message.name.is_some()
+ || message.vers.is_some()
+ || message.cksum.is_some()
+ {
+ return false;
+ }
+ }
+ true
+ }
+
+ /// Route the request
+ fn route(&self, req: &Request) -> Response {
+ // Check for custom responder
+ if let Some(responder) = self.custom_responders.get(req.url.path()) {
+ return responder(&req, self);
+ }
+ let path: Vec<_> = req.url.path()[1..].split('/').collect();
+ match (req.method.as_str(), path.as_slice()) {
+ ("get", ["index", ..]) => {
+ if !self.check_authorized(req, None) {
+ self.unauthorized(req)
+ } else {
+ self.index(&req)
+ }
+ }
+ ("get", ["dl", ..]) => {
+ if !self.check_authorized(req, None) {
+ self.unauthorized(req)
+ } else {
+ self.dl(&req)
+ }
+ }
+ // publish
+ ("put", ["api", "v1", "crates", "new"]) => self.check_authorized_publish(req),
+ // The remainder of the operators in the test framework do nothing other than responding 'ok'.
+ //
+ // Note: We don't need to support anything real here because there are no tests that
+ // currently require anything other than publishing via the http api.
+
+ // yank / unyank
+ ("delete" | "put", ["api", "v1", "crates", crate_name, version, mutation]) => {
+ if !self.check_authorized(
+ req,
+ Some(Mutation {
+ mutation,
+ name: Some(crate_name),
+ vers: Some(version),
+ cksum: None,
+ }),
+ ) {
+ self.unauthorized(req)
+ } else {
+ self.ok(&req)
+ }
+ }
+ // owners
+ ("get" | "put" | "delete", ["api", "v1", "crates", crate_name, "owners"]) => {
+ if !self.check_authorized(
+ req,
+ Some(Mutation {
+ mutation: "owners",
+ name: Some(crate_name),
+ vers: None,
+ cksum: None,
+ }),
+ ) {
+ self.unauthorized(req)
+ } else {
+ self.ok(&req)
+ }
+ }
+ _ => self.not_found(&req),
+ }
+ }
+
+ /// Unauthorized response
+ pub fn unauthorized(&self, _req: &Request) -> Response {
+ Response {
+ code: 401,
+ headers: vec![
+ r#"WWW-Authenticate: Cargo login_url="https://test-registry-login/me""#.to_string(),
+ ],
+ body: b"Unauthorized message from server.".to_vec(),
+ }
+ }
+
+ /// Not found response
+ pub fn not_found(&self, _req: &Request) -> Response {
+ Response {
+ code: 404,
+ headers: vec![],
+ body: b"not found".to_vec(),
+ }
+ }
+
+ /// Respond OK without doing anything
+ pub fn ok(&self, _req: &Request) -> Response {
+ Response {
+ code: 200,
+ headers: vec![],
+ body: br#"{"ok": true, "msg": "completed!"}"#.to_vec(),
+ }
+ }
+
+ /// Return an internal server error (HTTP 500)
+ pub fn internal_server_error(&self, _req: &Request) -> Response {
+ Response {
+ code: 500,
+ headers: vec![],
+ body: br#"internal server error"#.to_vec(),
+ }
+ }
+
+ /// Serve the download endpoint
+ pub fn dl(&self, req: &Request) -> Response {
+ let file = self
+ .dl_path
+ .join(req.url.path().strip_prefix("/dl/").unwrap());
+ println!("{}", file.display());
+ if !file.exists() {
+ return self.not_found(req);
+ }
+ return Response {
+ body: fs::read(&file).unwrap(),
+ code: 200,
+ headers: vec![],
+ };
+ }
+
+ /// Serve the registry index
+ pub fn index(&self, req: &Request) -> Response {
+ let file = self
+ .registry_path
+ .join(req.url.path().strip_prefix("/index/").unwrap());
+ if !file.exists() {
+ return self.not_found(req);
+ } else {
+ // Now grab info about the file.
+ let data = fs::read(&file).unwrap();
+ let etag = Sha256::new().update(&data).finish_hex();
+ let last_modified = format!("{:?}", file.metadata().unwrap().modified().unwrap());
+
+ // Start to construct our response:
+ let mut any_match = false;
+ let mut all_match = true;
+ if let Some(expected) = &req.if_none_match {
+ if &etag != expected {
+ all_match = false;
+ } else {
+ any_match = true;
+ }
+ }
+ if let Some(expected) = &req.if_modified_since {
+ // NOTE: Equality comparison is good enough for tests.
+ if &last_modified != expected {
+ all_match = false;
+ } else {
+ any_match = true;
+ }
+ }
+
+ if any_match && all_match {
+ return Response {
+ body: Vec::new(),
+ code: 304,
+ headers: vec![],
+ };
+ } else {
+ return Response {
+ body: data,
+ code: 200,
+ headers: vec![
+ format!("ETag: \"{}\"", etag),
+ format!("Last-Modified: {}", last_modified),
+ ],
+ };
+ }
+ }
+ }
+
+ pub fn check_authorized_publish(&self, req: &Request) -> Response {
+ if let Some(body) = &req.body {
+ // Mimic the publish behavior for local registries by writing out the request
+ // so tests can verify publishes made to either registry type.
+ let path = self.api_path.join("api/v1/crates/new");
+ t!(fs::create_dir_all(path.parent().unwrap()));
+ t!(fs::write(&path, body));
+
+ // Get the metadata of the package
+ let (len, remaining) = body.split_at(4);
+ let json_len = u32::from_le_bytes(len.try_into().unwrap());
+ let (json, remaining) = remaining.split_at(json_len as usize);
+ let new_crate = serde_json::from_slice::<crates_io::NewCrate>(json).unwrap();
+ // Get the `.crate` file
+ let (len, remaining) = remaining.split_at(4);
+ let file_len = u32::from_le_bytes(len.try_into().unwrap());
+ let (file, _remaining) = remaining.split_at(file_len as usize);
+ let file_cksum = cksum(&file);
+
+ if !self.check_authorized(
+ req,
+ Some(Mutation {
+ mutation: "publish",
+ name: Some(&new_crate.name),
+ vers: Some(&new_crate.vers),
+ cksum: Some(&file_cksum),
+ }),
+ ) {
+ return self.unauthorized(req);
+ }
+
+ // Write the `.crate`
+ let dst = self
+ .dl_path
+ .join(&new_crate.name)
+ .join(&new_crate.vers)
+ .join("download");
+ t!(fs::create_dir_all(dst.parent().unwrap()));
+ t!(fs::write(&dst, file));
+
+ let deps = new_crate
+ .deps
+ .iter()
+ .map(|dep| {
+ let (name, package) = match &dep.explicit_name_in_toml {
+ Some(explicit) => (explicit.to_string(), Some(dep.name.to_string())),
+ None => (dep.name.to_string(), None),
+ };
+ serde_json::json!({
+ "name": name,
+ "req": dep.version_req,
+ "features": dep.features,
+ "default_features": true,
+ "target": dep.target,
+ "optional": dep.optional,
+ "kind": dep.kind,
+ "registry": dep.registry,
+ "package": package,
+ })
+ })
+ .collect::<Vec<_>>();
+
+ let line = create_index_line(
+ serde_json::json!(new_crate.name),
+ &new_crate.vers,
+ deps,
+ &file_cksum,
+ new_crate.features,
+ false,
+ new_crate.links,
+ None,
+ );
+
+ write_to_index(&self.registry_path, &new_crate.name, line, false);
+
+ self.ok(&req)
+ } else {
+ Response {
+ code: 400,
+ headers: vec![],
+ body: b"The request was missing a body".to_vec(),
+ }
+ }
+ }
+}
+
+impl Package {
+ /// Creates a new package builder.
+ /// Call `publish()` to finalize and build the package.
+ pub fn new(name: &str, vers: &str) -> Package {
+ let config = paths::home().join(".cargo/config");
+ if !config.exists() {
+ init();
+ }
+ Package {
+ name: name.to_string(),
+ vers: vers.to_string(),
+ deps: Vec::new(),
+ files: Vec::new(),
+ yanked: false,
+ features: BTreeMap::new(),
+ local: false,
+ alternative: false,
+ invalid_json: false,
+ proc_macro: false,
+ links: None,
+ rust_version: None,
+ cargo_features: Vec::new(),
+ v: None,
+ }
+ }
+
+ /// Call with `true` to publish in a "local registry".
+ ///
+ /// See `source-replacement.html#local-registry-sources` for more details
+ /// on local registries. See `local_registry.rs` for the tests that use
+ /// this.
+ pub fn local(&mut self, local: bool) -> &mut Package {
+ self.local = local;
+ self
+ }
+
+ /// Call with `true` to publish in an "alternative registry".
+ ///
+ /// The name of the alternative registry is called "alternative".
+ ///
+ /// See `src/doc/src/reference/registries.md` for more details on
+ /// alternative registries. See `alt_registry.rs` for the tests that use
+ /// this.
+ pub fn alternative(&mut self, alternative: bool) -> &mut Package {
+ self.alternative = alternative;
+ self
+ }
+
+ /// Adds a file to the package.
+ pub fn file(&mut self, name: &str, contents: &str) -> &mut Package {
+ self.file_with_mode(name, DEFAULT_MODE, contents)
+ }
+
+ /// Adds a file with a specific Unix mode.
+ pub fn file_with_mode(&mut self, path: &str, mode: u32, contents: &str) -> &mut Package {
+ self.files.push(PackageFile {
+ path: path.to_string(),
+ contents: EntryData::Regular(contents.into()),
+ mode,
+ extra: false,
+ });
+ self
+ }
+
+ /// Adds a symlink to a path to the package.
+ pub fn symlink(&mut self, dst: &str, src: &str) -> &mut Package {
+ self.files.push(PackageFile {
+ path: dst.to_string(),
+ contents: EntryData::Symlink(src.into()),
+ mode: DEFAULT_MODE,
+ extra: false,
+ });
+ self
+ }
+
+ /// Adds an "extra" file that is not rooted within the package.
+ ///
+ /// Normal files are automatically placed within a directory named
+ /// `$PACKAGE-$VERSION`. This allows you to override that behavior,
+ /// typically for testing invalid behavior.
+ pub fn extra_file(&mut self, path: &str, contents: &str) -> &mut Package {
+ self.files.push(PackageFile {
+ path: path.to_string(),
+ contents: EntryData::Regular(contents.to_string()),
+ mode: DEFAULT_MODE,
+ extra: true,
+ });
+ self
+ }
+
+ /// Adds a normal dependency. Example:
+ /// ```
+ /// [dependencies]
+ /// foo = {version = "1.0"}
+ /// ```
+ pub fn dep(&mut self, name: &str, vers: &str) -> &mut Package {
+ self.add_dep(&Dependency::new(name, vers))
+ }
+
+ /// Adds a dependency with the given feature. Example:
+ /// ```
+ /// [dependencies]
+ /// foo = {version = "1.0", "features": ["feat1", "feat2"]}
+ /// ```
+ pub fn feature_dep(&mut self, name: &str, vers: &str, features: &[&str]) -> &mut Package {
+ self.add_dep(Dependency::new(name, vers).enable_features(features))
+ }
+
+ /// Adds a platform-specific dependency. Example:
+ /// ```
+ /// [target.'cfg(windows)'.dependencies]
+ /// foo = {version = "1.0"}
+ /// ```
+ pub fn target_dep(&mut self, name: &str, vers: &str, target: &str) -> &mut Package {
+ self.add_dep(Dependency::new(name, vers).target(target))
+ }
+
+ /// Adds a dependency to the alternative registry.
+ pub fn registry_dep(&mut self, name: &str, vers: &str) -> &mut Package {
+ self.add_dep(Dependency::new(name, vers).registry("alternative"))
+ }
+
+ /// Adds a dev-dependency. Example:
+ /// ```
+ /// [dev-dependencies]
+ /// foo = {version = "1.0"}
+ /// ```
+ pub fn dev_dep(&mut self, name: &str, vers: &str) -> &mut Package {
+ self.add_dep(Dependency::new(name, vers).dev())
+ }
+
+ /// Adds a build-dependency. Example:
+ /// ```
+ /// [build-dependencies]
+ /// foo = {version = "1.0"}
+ /// ```
+ pub fn build_dep(&mut self, name: &str, vers: &str) -> &mut Package {
+ self.add_dep(Dependency::new(name, vers).build())
+ }
+
+ pub fn add_dep(&mut self, dep: &Dependency) -> &mut Package {
+ self.deps.push(dep.clone());
+ self
+ }
+
+ /// Specifies whether or not the package is "yanked".
+ pub fn yanked(&mut self, yanked: bool) -> &mut Package {
+ self.yanked = yanked;
+ self
+ }
+
+ /// Specifies whether or not this is a proc macro.
+ pub fn proc_macro(&mut self, proc_macro: bool) -> &mut Package {
+ self.proc_macro = proc_macro;
+ self
+ }
+
+ /// Adds an entry in the `[features]` section.
+ pub fn feature(&mut self, name: &str, deps: &[&str]) -> &mut Package {
+ let deps = deps.iter().map(|s| s.to_string()).collect();
+ self.features.insert(name.to_string(), deps);
+ self
+ }
+
+ /// Specify a minimal Rust version.
+ pub fn rust_version(&mut self, rust_version: &str) -> &mut Package {
+ self.rust_version = Some(rust_version.into());
+ self
+ }
+
+ /// Causes the JSON line emitted in the index to be invalid, presumably
+ /// causing Cargo to skip over this version.
+ pub fn invalid_json(&mut self, invalid: bool) -> &mut Package {
+ self.invalid_json = invalid;
+ self
+ }
+
+ pub fn links(&mut self, links: &str) -> &mut Package {
+ self.links = Some(links.to_string());
+ self
+ }
+
+ pub fn cargo_feature(&mut self, feature: &str) -> &mut Package {
+ self.cargo_features.push(feature.to_owned());
+ self
+ }
+
+ /// Sets the index schema version for this package.
+ ///
+ /// See `cargo::sources::registry::RegistryPackage` for more information.
+ pub fn schema_version(&mut self, version: u32) -> &mut Package {
+ self.v = Some(version);
+ self
+ }
+
+ /// Creates the package and place it in the registry.
+ ///
+ /// This does not actually use Cargo's publishing system, but instead
+ /// manually creates the entry in the registry on the filesystem.
+ ///
+ /// Returns the checksum for the package.
+ pub fn publish(&self) -> String {
+ self.make_archive();
+
+ // Figure out what we're going to write into the index.
+ let deps = self
+ .deps
+ .iter()
+ .map(|dep| {
+ // In the index, the `registry` is null if it is from the same registry.
+ // In Cargo.toml, it is None if it is from crates.io.
+ let registry_url = match (self.alternative, dep.registry.as_deref()) {
+ (false, None) => None,
+ (false, Some("alternative")) => Some(alt_registry_url().to_string()),
+ (true, None) => {
+ Some("https://github.com/rust-lang/crates.io-index".to_string())
+ }
+ (true, Some("alternative")) => None,
+ _ => panic!("registry_dep currently only supports `alternative`"),
+ };
+ serde_json::json!({
+ "name": dep.name,
+ "req": dep.vers,
+ "features": dep.features,
+ "default_features": true,
+ "target": dep.target,
+ "artifact": dep.artifact,
+ "optional": dep.optional,
+ "kind": dep.kind,
+ "registry": registry_url,
+ "package": dep.package,
+ })
+ })
+ .collect::<Vec<_>>();
+ let cksum = {
+ let c = t!(fs::read(&self.archive_dst()));
+ cksum(&c)
+ };
+ let name = if self.invalid_json {
+ serde_json::json!(1)
+ } else {
+ serde_json::json!(self.name)
+ };
+ let line = create_index_line(
+ name,
+ &self.vers,
+ deps,
+ &cksum,
+ self.features.clone(),
+ self.yanked,
+ self.links.clone(),
+ self.v,
+ );
+
+ let registry_path = if self.alternative {
+ alt_registry_path()
+ } else {
+ registry_path()
+ };
+
+ write_to_index(&registry_path, &self.name, line, self.local);
+
+ cksum
+ }
+
+ fn make_archive(&self) {
+ let dst = self.archive_dst();
+ t!(fs::create_dir_all(dst.parent().unwrap()));
+ let f = t!(File::create(&dst));
+ let mut a = Builder::new(GzEncoder::new(f, Compression::default()));
+
+ if !self
+ .files
+ .iter()
+ .any(|PackageFile { path, .. }| path == "Cargo.toml")
+ {
+ self.append_manifest(&mut a);
+ }
+ if self.files.is_empty() {
+ self.append(
+ &mut a,
+ "src/lib.rs",
+ DEFAULT_MODE,
+ &EntryData::Regular("".into()),
+ );
+ } else {
+ for PackageFile {
+ path,
+ contents,
+ mode,
+ extra,
+ } in &self.files
+ {
+ if *extra {
+ self.append_raw(&mut a, path, *mode, contents);
+ } else {
+ self.append(&mut a, path, *mode, contents);
+ }
+ }
+ }
+ }
+
+ fn append_manifest<W: Write>(&self, ar: &mut Builder<W>) {
+ let mut manifest = String::new();
+
+ if !self.cargo_features.is_empty() {
+ let mut features = String::new();
+ serde::Serialize::serialize(
+ &self.cargo_features,
+ toml::ser::ValueSerializer::new(&mut features),
+ )
+ .unwrap();
+ manifest.push_str(&format!("cargo-features = {}\n\n", features));
+ }
+
+ manifest.push_str(&format!(
+ r#"
+ [package]
+ name = "{}"
+ version = "{}"
+ authors = []
+ "#,
+ self.name, self.vers
+ ));
+
+ if let Some(version) = &self.rust_version {
+ manifest.push_str(&format!("rust-version = \"{}\"", version));
+ }
+
+ for dep in self.deps.iter() {
+ let target = match dep.target {
+ None => String::new(),
+ Some(ref s) => format!("target.'{}'.", s),
+ };
+ let kind = match &dep.kind[..] {
+ "build" => "build-",
+ "dev" => "dev-",
+ _ => "",
+ };
+ manifest.push_str(&format!(
+ r#"
+ [{}{}dependencies.{}]
+ version = "{}"
+ "#,
+ target, kind, dep.name, dep.vers
+ ));
+ if let Some((artifact, target)) = &dep.artifact {
+ manifest.push_str(&format!("artifact = \"{}\"\n", artifact));
+ if let Some(target) = &target {
+ manifest.push_str(&format!("target = \"{}\"\n", target))
+ }
+ }
+ if let Some(registry) = &dep.registry {
+ assert_eq!(registry, "alternative");
+ manifest.push_str(&format!("registry-index = \"{}\"", alt_registry_url()));
+ }
+ }
+ if self.proc_macro {
+ manifest.push_str("[lib]\nproc-macro = true\n");
+ }
+
+ self.append(
+ ar,
+ "Cargo.toml",
+ DEFAULT_MODE,
+ &EntryData::Regular(manifest.into()),
+ );
+ }
+
+ fn append<W: Write>(&self, ar: &mut Builder<W>, file: &str, mode: u32, contents: &EntryData) {
+ self.append_raw(
+ ar,
+ &format!("{}-{}/{}", self.name, self.vers, file),
+ mode,
+ contents,
+ );
+ }
+
+ fn append_raw<W: Write>(
+ &self,
+ ar: &mut Builder<W>,
+ path: &str,
+ mode: u32,
+ contents: &EntryData,
+ ) {
+ let mut header = Header::new_ustar();
+ let contents = match contents {
+ EntryData::Regular(contents) => contents.as_str(),
+ EntryData::Symlink(src) => {
+ header.set_entry_type(tar::EntryType::Symlink);
+ t!(header.set_link_name(src));
+ "" // Symlink has no contents.
+ }
+ };
+ header.set_size(contents.len() as u64);
+ t!(header.set_path(path));
+ header.set_mode(mode);
+ header.set_cksum();
+ t!(ar.append(&header, contents.as_bytes()));
+ }
+
+ /// Returns the path to the compressed package file.
+ pub fn archive_dst(&self) -> PathBuf {
+ if self.local {
+ registry_path().join(format!("{}-{}.crate", self.name, self.vers))
+ } else if self.alternative {
+ alt_dl_path()
+ .join(&self.name)
+ .join(&self.vers)
+ .join("download")
+ } else {
+ dl_path().join(&self.name).join(&self.vers).join("download")
+ }
+ }
+}
+
+pub fn cksum(s: &[u8]) -> String {
+ Sha256::new().update(s).finish_hex()
+}
+
+impl Dependency {
+ pub fn new(name: &str, vers: &str) -> Dependency {
+ Dependency {
+ name: name.to_string(),
+ vers: vers.to_string(),
+ kind: "normal".to_string(),
+ artifact: None,
+ target: None,
+ features: Vec::new(),
+ package: None,
+ optional: false,
+ registry: None,
+ }
+ }
+
+ /// Changes this to `[build-dependencies]`.
+ pub fn build(&mut self) -> &mut Self {
+ self.kind = "build".to_string();
+ self
+ }
+
+ /// Changes this to `[dev-dependencies]`.
+ pub fn dev(&mut self) -> &mut Self {
+ self.kind = "dev".to_string();
+ self
+ }
+
+ /// Changes this to `[target.$target.dependencies]`.
+ pub fn target(&mut self, target: &str) -> &mut Self {
+ self.target = Some(target.to_string());
+ self
+ }
+
+ /// Change the artifact to be of the given kind, like "bin", or "staticlib",
+ /// along with a specific target triple if provided.
+ pub fn artifact(&mut self, kind: &str, target: Option<String>) -> &mut Self {
+ self.artifact = Some((kind.to_string(), target));
+ self
+ }
+
+ /// Adds `registry = $registry` to this dependency.
+ pub fn registry(&mut self, registry: &str) -> &mut Self {
+ self.registry = Some(registry.to_string());
+ self
+ }
+
+ /// Adds `features = [ ... ]` to this dependency.
+ pub fn enable_features(&mut self, features: &[&str]) -> &mut Self {
+ self.features.extend(features.iter().map(|s| s.to_string()));
+ self
+ }
+
+ /// Adds `package = ...` to this dependency.
+ pub fn package(&mut self, pkg: &str) -> &mut Self {
+ self.package = Some(pkg.to_string());
+ self
+ }
+
+ /// Changes this to an optional dependency.
+ pub fn optional(&mut self, optional: bool) -> &mut Self {
+ self.optional = optional;
+ self
+ }
+}
diff --git a/crates/cargo-test-support/src/tools.rs b/crates/cargo-test-support/src/tools.rs
new file mode 100644
index 0000000..7c056b6
--- /dev/null
+++ b/crates/cargo-test-support/src/tools.rs
@@ -0,0 +1,108 @@
+//! Common executables that can be reused by various tests.
+
+use crate::{basic_manifest, paths, project, Project};
+use lazy_static::lazy_static;
+use std::path::{Path, PathBuf};
+use std::sync::Mutex;
+
+lazy_static! {
+ static ref ECHO_WRAPPER: Mutex<Option<PathBuf>> = Mutex::new(None);
+ static ref ECHO: Mutex<Option<PathBuf>> = Mutex::new(None);
+}
+
+/// Returns the path to an executable that works as a wrapper around rustc.
+///
+/// The wrapper will echo the command line it was called with to stderr.
+pub fn echo_wrapper() -> PathBuf {
+ let mut lock = ECHO_WRAPPER.lock().unwrap();
+ if let Some(path) = &*lock {
+ return path.clone();
+ }
+ let p = project()
+ .at(paths::global_root().join("rustc-echo-wrapper"))
+ .file("Cargo.toml", &basic_manifest("rustc-echo-wrapper", "1.0.0"))
+ .file(
+ "src/main.rs",
+ r#"
+ use std::fs::read_to_string;
+ use std::path::PathBuf;
+ fn main() {
+ // Handle args from `@path` argfile for rustc
+ let args = std::env::args()
+ .flat_map(|p| if let Some(p) = p.strip_prefix("@") {
+ read_to_string(p).unwrap().lines().map(String::from).collect()
+ } else {
+ vec![p]
+ })
+ .collect::<Vec<_>>();
+ eprintln!("WRAPPER CALLED: {}", args[1..].join(" "));
+ let status = std::process::Command::new(&args[1])
+ .args(&args[2..]).status().unwrap();
+ std::process::exit(status.code().unwrap_or(1));
+ }
+ "#,
+ )
+ .build();
+ p.cargo("build").run();
+ let path = p.bin("rustc-echo-wrapper");
+ *lock = Some(path.clone());
+ path
+}
+
+/// Returns the path to an executable that prints its arguments.
+///
+/// Do not expect this to be anything fancy.
+pub fn echo() -> PathBuf {
+ let mut lock = ECHO.lock().unwrap();
+ if let Some(path) = &*lock {
+ return path.clone();
+ }
+ if let Ok(path) = cargo_util::paths::resolve_executable(Path::new("echo")) {
+ *lock = Some(path.clone());
+ return path;
+ }
+ // Often on Windows, `echo` is not available.
+ let p = project()
+ .at(paths::global_root().join("basic-echo"))
+ .file("Cargo.toml", &basic_manifest("basic-echo", "1.0.0"))
+ .file(
+ "src/main.rs",
+ r#"
+ fn main() {
+ let mut s = String::new();
+ let mut it = std::env::args().skip(1).peekable();
+ while let Some(n) = it.next() {
+ s.push_str(&n);
+ if it.peek().is_some() {
+ s.push(' ');
+ }
+ }
+ println!("{}", s);
+ }
+ "#,
+ )
+ .build();
+ p.cargo("build").run();
+ let path = p.bin("basic-echo");
+ *lock = Some(path.clone());
+ path
+}
+
+/// Returns a project which builds a cargo-echo simple subcommand
+pub fn echo_subcommand() -> Project {
+ let p = project()
+ .at("cargo-echo")
+ .file("Cargo.toml", &basic_manifest("cargo-echo", "0.0.1"))
+ .file(
+ "src/main.rs",
+ r#"
+ fn main() {
+ let args: Vec<_> = ::std::env::args().skip(1).collect();
+ println!("{}", args.join(" "));
+ }
+ "#,
+ )
+ .build();
+ p.cargo("build").run();
+ p
+}
diff --git a/crates/cargo-util/Cargo.toml b/crates/cargo-util/Cargo.toml
new file mode 100644
index 0000000..aa25c13
--- /dev/null
+++ b/crates/cargo-util/Cargo.toml
@@ -0,0 +1,28 @@
+[package]
+name = "cargo-util"
+version = "0.2.3"
+edition = "2021"
+license = "MIT OR Apache-2.0"
+homepage = "https://github.com/rust-lang/cargo"
+repository = "https://github.com/rust-lang/cargo"
+description = "Miscellaneous support code used by Cargo."
+
+[dependencies]
+anyhow = "1.0.34"
+crypto-hash = "0.3.1"
+filetime = "0.2.9"
+hex = "0.4.2"
+jobserver = "0.1.26"
+libc = "0.2.88"
+log = "0.4.6"
+same-file = "1.0.6"
+shell-escape = "0.1.4"
+tempfile = "3.1.0"
+walkdir = "2.3.1"
+
+[target.'cfg(target_os = "macos")'.dependencies]
+core-foundation = { version = "0.9.0", features = ["mac_os_10_7_support"] }
+
+[target.'cfg(windows)'.dependencies]
+miow = "0.5.0"
+windows-sys = { version = "0.45.0", features = ["Win32_Storage_FileSystem", "Win32_Foundation", "Win32_System_Console"] }
diff --git a/crates/cargo-util/LICENSE-APACHE b/crates/cargo-util/LICENSE-APACHE
new file mode 120000
index 0000000..1cd601d
--- /dev/null
+++ b/crates/cargo-util/LICENSE-APACHE
@@ -0,0 +1 @@
+../../LICENSE-APACHE \ No newline at end of file
diff --git a/crates/cargo-util/LICENSE-MIT b/crates/cargo-util/LICENSE-MIT
new file mode 120000
index 0000000..b2cfbdc
--- /dev/null
+++ b/crates/cargo-util/LICENSE-MIT
@@ -0,0 +1 @@
+../../LICENSE-MIT \ No newline at end of file
diff --git a/crates/cargo-util/src/lib.rs b/crates/cargo-util/src/lib.rs
new file mode 100644
index 0000000..0cbc920
--- /dev/null
+++ b/crates/cargo-util/src/lib.rs
@@ -0,0 +1,18 @@
+//! Miscellaneous support code used by Cargo.
+
+pub use self::read2::read2;
+pub use process_builder::ProcessBuilder;
+pub use process_error::{exit_status_to_string, is_simple_exit_code, ProcessError};
+pub use sha256::Sha256;
+
+pub mod paths;
+mod process_builder;
+mod process_error;
+mod read2;
+pub mod registry;
+mod sha256;
+
+/// Whether or not this running in a Continuous Integration environment.
+pub fn is_ci() -> bool {
+ std::env::var("CI").is_ok() || std::env::var("TF_BUILD").is_ok()
+}
diff --git a/crates/cargo-util/src/paths.rs b/crates/cargo-util/src/paths.rs
new file mode 100644
index 0000000..69df7a2
--- /dev/null
+++ b/crates/cargo-util/src/paths.rs
@@ -0,0 +1,788 @@
+//! Various utilities for working with files and paths.
+
+use anyhow::{Context, Result};
+use filetime::FileTime;
+use std::env;
+use std::ffi::{OsStr, OsString};
+use std::fs::{self, File, OpenOptions};
+use std::io;
+use std::io::prelude::*;
+use std::iter;
+use std::path::{Component, Path, PathBuf};
+use tempfile::Builder as TempFileBuilder;
+
+/// Joins paths into a string suitable for the `PATH` environment variable.
+///
+/// This is equivalent to [`std::env::join_paths`], but includes a more
+/// detailed error message. The given `env` argument is the name of the
+/// environment variable this is will be used for, which is included in the
+/// error message.
+pub fn join_paths<T: AsRef<OsStr>>(paths: &[T], env: &str) -> Result<OsString> {
+ env::join_paths(paths.iter()).with_context(|| {
+ let mut message = format!(
+ "failed to join paths from `${env}` together\n\n\
+ Check if any of path segments listed below contain an \
+ unterminated quote character or path separator:"
+ );
+ for path in paths {
+ use std::fmt::Write;
+ write!(&mut message, "\n {:?}", Path::new(path)).unwrap();
+ }
+
+ message
+ })
+}
+
+/// Returns the name of the environment variable used for searching for
+/// dynamic libraries.
+pub fn dylib_path_envvar() -> &'static str {
+ if cfg!(windows) {
+ "PATH"
+ } else if cfg!(target_os = "macos") {
+ // When loading and linking a dynamic library or bundle, dlopen
+ // searches in LD_LIBRARY_PATH, DYLD_LIBRARY_PATH, PWD, and
+ // DYLD_FALLBACK_LIBRARY_PATH.
+ // In the Mach-O format, a dynamic library has an "install path."
+ // Clients linking against the library record this path, and the
+ // dynamic linker, dyld, uses it to locate the library.
+ // dyld searches DYLD_LIBRARY_PATH *before* the install path.
+ // dyld searches DYLD_FALLBACK_LIBRARY_PATH only if it cannot
+ // find the library in the install path.
+ // Setting DYLD_LIBRARY_PATH can easily have unintended
+ // consequences.
+ //
+ // Also, DYLD_LIBRARY_PATH appears to have significant performance
+ // penalty starting in 10.13. Cargo's testsuite ran more than twice as
+ // slow with it on CI.
+ "DYLD_FALLBACK_LIBRARY_PATH"
+ } else {
+ "LD_LIBRARY_PATH"
+ }
+}
+
+/// Returns a list of directories that are searched for dynamic libraries.
+///
+/// Note that some operating systems will have defaults if this is empty that
+/// will need to be dealt with.
+pub fn dylib_path() -> Vec<PathBuf> {
+ match env::var_os(dylib_path_envvar()) {
+ Some(var) => env::split_paths(&var).collect(),
+ None => Vec::new(),
+ }
+}
+
+/// Normalize a path, removing things like `.` and `..`.
+///
+/// CAUTION: This does not resolve symlinks (unlike
+/// [`std::fs::canonicalize`]). This may cause incorrect or surprising
+/// behavior at times. This should be used carefully. Unfortunately,
+/// [`std::fs::canonicalize`] can be hard to use correctly, since it can often
+/// fail, or on Windows returns annoying device paths. This is a problem Cargo
+/// needs to improve on.
+pub fn normalize_path(path: &Path) -> PathBuf {
+ let mut components = path.components().peekable();
+ let mut ret = if let Some(c @ Component::Prefix(..)) = components.peek().cloned() {
+ components.next();
+ PathBuf::from(c.as_os_str())
+ } else {
+ PathBuf::new()
+ };
+
+ for component in components {
+ match component {
+ Component::Prefix(..) => unreachable!(),
+ Component::RootDir => {
+ ret.push(component.as_os_str());
+ }
+ Component::CurDir => {}
+ Component::ParentDir => {
+ ret.pop();
+ }
+ Component::Normal(c) => {
+ ret.push(c);
+ }
+ }
+ }
+ ret
+}
+
+/// Returns the absolute path of where the given executable is located based
+/// on searching the `PATH` environment variable.
+///
+/// Returns an error if it cannot be found.
+pub fn resolve_executable(exec: &Path) -> Result<PathBuf> {
+ if exec.components().count() == 1 {
+ let paths = env::var_os("PATH").ok_or_else(|| anyhow::format_err!("no PATH"))?;
+ let candidates = env::split_paths(&paths).flat_map(|path| {
+ let candidate = path.join(&exec);
+ let with_exe = if env::consts::EXE_EXTENSION.is_empty() {
+ None
+ } else {
+ Some(candidate.with_extension(env::consts::EXE_EXTENSION))
+ };
+ iter::once(candidate).chain(with_exe)
+ });
+ for candidate in candidates {
+ if candidate.is_file() {
+ return Ok(candidate);
+ }
+ }
+
+ anyhow::bail!("no executable for `{}` found in PATH", exec.display())
+ } else {
+ Ok(exec.into())
+ }
+}
+
+/// Reads a file to a string.
+///
+/// Equivalent to [`std::fs::read_to_string`] with better error messages.
+pub fn read(path: &Path) -> Result<String> {
+ match String::from_utf8(read_bytes(path)?) {
+ Ok(s) => Ok(s),
+ Err(_) => anyhow::bail!("path at `{}` was not valid utf-8", path.display()),
+ }
+}
+
+/// Reads a file into a bytes vector.
+///
+/// Equivalent to [`std::fs::read`] with better error messages.
+pub fn read_bytes(path: &Path) -> Result<Vec<u8>> {
+ fs::read(path).with_context(|| format!("failed to read `{}`", path.display()))
+}
+
+/// Writes a file to disk.
+///
+/// Equivalent to [`std::fs::write`] with better error messages.
+pub fn write<P: AsRef<Path>, C: AsRef<[u8]>>(path: P, contents: C) -> Result<()> {
+ let path = path.as_ref();
+ fs::write(path, contents.as_ref())
+ .with_context(|| format!("failed to write `{}`", path.display()))
+}
+
+/// Equivalent to [`write()`], but does not write anything if the file contents
+/// are identical to the given contents.
+pub fn write_if_changed<P: AsRef<Path>, C: AsRef<[u8]>>(path: P, contents: C) -> Result<()> {
+ (|| -> Result<()> {
+ let contents = contents.as_ref();
+ let mut f = OpenOptions::new()
+ .read(true)
+ .write(true)
+ .create(true)
+ .open(&path)?;
+ let mut orig = Vec::new();
+ f.read_to_end(&mut orig)?;
+ if orig != contents {
+ f.set_len(0)?;
+ f.seek(io::SeekFrom::Start(0))?;
+ f.write_all(contents)?;
+ }
+ Ok(())
+ })()
+ .with_context(|| format!("failed to write `{}`", path.as_ref().display()))?;
+ Ok(())
+}
+
+/// Equivalent to [`write()`], but appends to the end instead of replacing the
+/// contents.
+pub fn append(path: &Path, contents: &[u8]) -> Result<()> {
+ (|| -> Result<()> {
+ let mut f = OpenOptions::new()
+ .write(true)
+ .append(true)
+ .create(true)
+ .open(path)?;
+
+ f.write_all(contents)?;
+ Ok(())
+ })()
+ .with_context(|| format!("failed to write `{}`", path.display()))?;
+ Ok(())
+}
+
+/// Creates a new file.
+pub fn create<P: AsRef<Path>>(path: P) -> Result<File> {
+ let path = path.as_ref();
+ File::create(path).with_context(|| format!("failed to create file `{}`", path.display()))
+}
+
+/// Opens an existing file.
+pub fn open<P: AsRef<Path>>(path: P) -> Result<File> {
+ let path = path.as_ref();
+ File::open(path).with_context(|| format!("failed to open file `{}`", path.display()))
+}
+
+/// Returns the last modification time of a file.
+pub fn mtime(path: &Path) -> Result<FileTime> {
+ let meta =
+ fs::metadata(path).with_context(|| format!("failed to stat `{}`", path.display()))?;
+ Ok(FileTime::from_last_modification_time(&meta))
+}
+
+/// Returns the maximum mtime of the given path, recursing into
+/// subdirectories, and following symlinks.
+pub fn mtime_recursive(path: &Path) -> Result<FileTime> {
+ let meta =
+ fs::metadata(path).with_context(|| format!("failed to stat `{}`", path.display()))?;
+ if !meta.is_dir() {
+ return Ok(FileTime::from_last_modification_time(&meta));
+ }
+ let max_meta = walkdir::WalkDir::new(path)
+ .follow_links(true)
+ .into_iter()
+ .filter_map(|e| match e {
+ Ok(e) => Some(e),
+ Err(e) => {
+ // Ignore errors while walking. If Cargo can't access it, the
+ // build script probably can't access it, either.
+ log::debug!("failed to determine mtime while walking directory: {}", e);
+ None
+ }
+ })
+ .filter_map(|e| {
+ if e.path_is_symlink() {
+ // Use the mtime of both the symlink and its target, to
+ // handle the case where the symlink is modified to a
+ // different target.
+ let sym_meta = match std::fs::symlink_metadata(e.path()) {
+ Ok(m) => m,
+ Err(err) => {
+ // I'm not sure when this is really possible (maybe a
+ // race with unlinking?). Regardless, if Cargo can't
+ // read it, the build script probably can't either.
+ log::debug!(
+ "failed to determine mtime while fetching symlink metadata of {}: {}",
+ e.path().display(),
+ err
+ );
+ return None;
+ }
+ };
+ let sym_mtime = FileTime::from_last_modification_time(&sym_meta);
+ // Walkdir follows symlinks.
+ match e.metadata() {
+ Ok(target_meta) => {
+ let target_mtime = FileTime::from_last_modification_time(&target_meta);
+ Some(sym_mtime.max(target_mtime))
+ }
+ Err(err) => {
+ // Can't access the symlink target. If Cargo can't
+ // access it, the build script probably can't access
+ // it either.
+ log::debug!(
+ "failed to determine mtime of symlink target for {}: {}",
+ e.path().display(),
+ err
+ );
+ Some(sym_mtime)
+ }
+ }
+ } else {
+ let meta = match e.metadata() {
+ Ok(m) => m,
+ Err(err) => {
+ // I'm not sure when this is really possible (maybe a
+ // race with unlinking?). Regardless, if Cargo can't
+ // read it, the build script probably can't either.
+ log::debug!(
+ "failed to determine mtime while fetching metadata of {}: {}",
+ e.path().display(),
+ err
+ );
+ return None;
+ }
+ };
+ Some(FileTime::from_last_modification_time(&meta))
+ }
+ })
+ .max()
+ // or_else handles the case where there are no files in the directory.
+ .unwrap_or_else(|| FileTime::from_last_modification_time(&meta));
+ Ok(max_meta)
+}
+
+/// Record the current time on the filesystem (using the filesystem's clock)
+/// using a file at the given directory. Returns the current time.
+pub fn set_invocation_time(path: &Path) -> Result<FileTime> {
+ // note that if `FileTime::from_system_time(SystemTime::now());` is determined to be sufficient,
+ // then this can be removed.
+ let timestamp = path.join("invoked.timestamp");
+ write(
+ &timestamp,
+ "This file has an mtime of when this was started.",
+ )?;
+ let ft = mtime(&timestamp)?;
+ log::debug!("invocation time for {:?} is {}", path, ft);
+ Ok(ft)
+}
+
+/// Converts a path to UTF-8 bytes.
+pub fn path2bytes(path: &Path) -> Result<&[u8]> {
+ #[cfg(unix)]
+ {
+ use std::os::unix::prelude::*;
+ Ok(path.as_os_str().as_bytes())
+ }
+ #[cfg(windows)]
+ {
+ match path.as_os_str().to_str() {
+ Some(s) => Ok(s.as_bytes()),
+ None => Err(anyhow::format_err!(
+ "invalid non-unicode path: {}",
+ path.display()
+ )),
+ }
+ }
+}
+
+/// Converts UTF-8 bytes to a path.
+pub fn bytes2path(bytes: &[u8]) -> Result<PathBuf> {
+ #[cfg(unix)]
+ {
+ use std::os::unix::prelude::*;
+ Ok(PathBuf::from(OsStr::from_bytes(bytes)))
+ }
+ #[cfg(windows)]
+ {
+ use std::str;
+ match str::from_utf8(bytes) {
+ Ok(s) => Ok(PathBuf::from(s)),
+ Err(..) => Err(anyhow::format_err!("invalid non-unicode path")),
+ }
+ }
+}
+
+/// Returns an iterator that walks up the directory hierarchy towards the root.
+///
+/// Each item is a [`Path`]. It will start with the given path, finishing at
+/// the root. If the `stop_root_at` parameter is given, it will stop at the
+/// given path (which will be the last item).
+pub fn ancestors<'a>(path: &'a Path, stop_root_at: Option<&Path>) -> PathAncestors<'a> {
+ PathAncestors::new(path, stop_root_at)
+}
+
+pub struct PathAncestors<'a> {
+ current: Option<&'a Path>,
+ stop_at: Option<PathBuf>,
+}
+
+impl<'a> PathAncestors<'a> {
+ fn new(path: &'a Path, stop_root_at: Option<&Path>) -> PathAncestors<'a> {
+ let stop_at = env::var("__CARGO_TEST_ROOT")
+ .ok()
+ .map(PathBuf::from)
+ .or_else(|| stop_root_at.map(|p| p.to_path_buf()));
+ PathAncestors {
+ current: Some(path),
+ //HACK: avoid reading `~/.cargo/config` when testing Cargo itself.
+ stop_at,
+ }
+ }
+}
+
+impl<'a> Iterator for PathAncestors<'a> {
+ type Item = &'a Path;
+
+ fn next(&mut self) -> Option<&'a Path> {
+ if let Some(path) = self.current {
+ self.current = path.parent();
+
+ if let Some(ref stop_at) = self.stop_at {
+ if path == stop_at {
+ self.current = None;
+ }
+ }
+
+ Some(path)
+ } else {
+ None
+ }
+ }
+}
+
+/// Equivalent to [`std::fs::create_dir_all`] with better error messages.
+pub fn create_dir_all(p: impl AsRef<Path>) -> Result<()> {
+ _create_dir_all(p.as_ref())
+}
+
+fn _create_dir_all(p: &Path) -> Result<()> {
+ fs::create_dir_all(p)
+ .with_context(|| format!("failed to create directory `{}`", p.display()))?;
+ Ok(())
+}
+
+/// Recursively remove all files and directories at the given directory.
+///
+/// This does *not* follow symlinks.
+pub fn remove_dir_all<P: AsRef<Path>>(p: P) -> Result<()> {
+ _remove_dir_all(p.as_ref())
+}
+
+fn _remove_dir_all(p: &Path) -> Result<()> {
+ if p.symlink_metadata()
+ .with_context(|| format!("could not get metadata for `{}` to remove", p.display()))?
+ .is_symlink()
+ {
+ return remove_file(p);
+ }
+ let entries = p
+ .read_dir()
+ .with_context(|| format!("failed to read directory `{}`", p.display()))?;
+ for entry in entries {
+ let entry = entry?;
+ let path = entry.path();
+ if entry.file_type()?.is_dir() {
+ remove_dir_all(&path)?;
+ } else {
+ remove_file(&path)?;
+ }
+ }
+ remove_dir(&p)
+}
+
+/// Equivalent to [`std::fs::remove_dir`] with better error messages.
+pub fn remove_dir<P: AsRef<Path>>(p: P) -> Result<()> {
+ _remove_dir(p.as_ref())
+}
+
+fn _remove_dir(p: &Path) -> Result<()> {
+ fs::remove_dir(p).with_context(|| format!("failed to remove directory `{}`", p.display()))?;
+ Ok(())
+}
+
+/// Equivalent to [`std::fs::remove_file`] with better error messages.
+///
+/// If the file is readonly, this will attempt to change the permissions to
+/// force the file to be deleted.
+pub fn remove_file<P: AsRef<Path>>(p: P) -> Result<()> {
+ _remove_file(p.as_ref())
+}
+
+fn _remove_file(p: &Path) -> Result<()> {
+ let mut err = match fs::remove_file(p) {
+ Ok(()) => return Ok(()),
+ Err(e) => e,
+ };
+
+ if err.kind() == io::ErrorKind::PermissionDenied && set_not_readonly(p).unwrap_or(false) {
+ match fs::remove_file(p) {
+ Ok(()) => return Ok(()),
+ Err(e) => err = e,
+ }
+ }
+
+ Err(err).with_context(|| format!("failed to remove file `{}`", p.display()))?;
+ Ok(())
+}
+
+fn set_not_readonly(p: &Path) -> io::Result<bool> {
+ let mut perms = p.metadata()?.permissions();
+ if !perms.readonly() {
+ return Ok(false);
+ }
+ perms.set_readonly(false);
+ fs::set_permissions(p, perms)?;
+ Ok(true)
+}
+
+/// Hardlink (file) or symlink (dir) src to dst if possible, otherwise copy it.
+///
+/// If the destination already exists, it is removed before linking.
+pub fn link_or_copy(src: impl AsRef<Path>, dst: impl AsRef<Path>) -> Result<()> {
+ let src = src.as_ref();
+ let dst = dst.as_ref();
+ _link_or_copy(src, dst)
+}
+
+fn _link_or_copy(src: &Path, dst: &Path) -> Result<()> {
+ log::debug!("linking {} to {}", src.display(), dst.display());
+ if same_file::is_same_file(src, dst).unwrap_or(false) {
+ return Ok(());
+ }
+
+ // NB: we can't use dst.exists(), as if dst is a broken symlink,
+ // dst.exists() will return false. This is problematic, as we still need to
+ // unlink dst in this case. symlink_metadata(dst).is_ok() will tell us
+ // whether dst exists *without* following symlinks, which is what we want.
+ if fs::symlink_metadata(dst).is_ok() {
+ remove_file(&dst)?;
+ }
+
+ let link_result = if src.is_dir() {
+ #[cfg(target_os = "redox")]
+ use std::os::redox::fs::symlink;
+ #[cfg(unix)]
+ use std::os::unix::fs::symlink;
+ #[cfg(windows)]
+ // FIXME: This should probably panic or have a copy fallback. Symlinks
+ // are not supported in all windows environments. Currently symlinking
+ // is only used for .dSYM directories on macos, but this shouldn't be
+ // accidentally relied upon.
+ use std::os::windows::fs::symlink_dir as symlink;
+
+ let dst_dir = dst.parent().unwrap();
+ let src = if src.starts_with(dst_dir) {
+ src.strip_prefix(dst_dir).unwrap()
+ } else {
+ src
+ };
+ symlink(src, dst)
+ } else if env::var_os("__CARGO_COPY_DONT_LINK_DO_NOT_USE_THIS").is_some() {
+ // This is a work-around for a bug in macOS 10.15. When running on
+ // APFS, there seems to be a strange race condition with
+ // Gatekeeper where it will forcefully kill a process launched via
+ // `cargo run` with SIGKILL. Copying seems to avoid the problem.
+ // This shouldn't affect anyone except Cargo's test suite because
+ // it is very rare, and only seems to happen under heavy load and
+ // rapidly creating lots of executables and running them.
+ // See https://github.com/rust-lang/cargo/issues/7821 for the
+ // gory details.
+ fs::copy(src, dst).map(|_| ())
+ } else {
+ if cfg!(target_os = "macos") {
+ // This is a work-around for a bug on macos. There seems to be a race condition
+ // with APFS when hard-linking binaries. Gatekeeper does not have signing or
+ // hash information stored in kernel when running the process. Therefore killing it.
+ // This problem does not appear when copying files as kernel has time to process it.
+ // Note that: fs::copy on macos is using CopyOnWrite (syscall fclonefileat) which should be
+ // as fast as hardlinking.
+ // See https://github.com/rust-lang/cargo/issues/10060 for the details
+ fs::copy(src, dst).map(|_| ())
+ } else {
+ fs::hard_link(src, dst)
+ }
+ };
+ link_result
+ .or_else(|err| {
+ log::debug!("link failed {}. falling back to fs::copy", err);
+ fs::copy(src, dst).map(|_| ())
+ })
+ .with_context(|| {
+ format!(
+ "failed to link or copy `{}` to `{}`",
+ src.display(),
+ dst.display()
+ )
+ })?;
+ Ok(())
+}
+
+/// Copies a file from one location to another.
+///
+/// Equivalent to [`std::fs::copy`] with better error messages.
+pub fn copy<P: AsRef<Path>, Q: AsRef<Path>>(from: P, to: Q) -> Result<u64> {
+ let from = from.as_ref();
+ let to = to.as_ref();
+ fs::copy(from, to)
+ .with_context(|| format!("failed to copy `{}` to `{}`", from.display(), to.display()))
+}
+
+/// Changes the filesystem mtime (and atime if possible) for the given file.
+///
+/// This intentionally does not return an error, as this is sometimes not
+/// supported on network filesystems. For the current uses in Cargo, this is a
+/// "best effort" approach, and errors shouldn't be propagated.
+pub fn set_file_time_no_err<P: AsRef<Path>>(path: P, time: FileTime) {
+ let path = path.as_ref();
+ match filetime::set_file_times(path, time, time) {
+ Ok(()) => log::debug!("set file mtime {} to {}", path.display(), time),
+ Err(e) => log::warn!(
+ "could not set mtime of {} to {}: {:?}",
+ path.display(),
+ time,
+ e
+ ),
+ }
+}
+
+/// Strips `base` from `path`.
+///
+/// This canonicalizes both paths before stripping. This is useful if the
+/// paths are obtained in different ways, and one or the other may or may not
+/// have been normalized in some way.
+pub fn strip_prefix_canonical<P: AsRef<Path>>(
+ path: P,
+ base: P,
+) -> Result<PathBuf, std::path::StripPrefixError> {
+ // Not all filesystems support canonicalize. Just ignore if it doesn't work.
+ let safe_canonicalize = |path: &Path| match path.canonicalize() {
+ Ok(p) => p,
+ Err(e) => {
+ log::warn!("cannot canonicalize {:?}: {:?}", path, e);
+ path.to_path_buf()
+ }
+ };
+ let canon_path = safe_canonicalize(path.as_ref());
+ let canon_base = safe_canonicalize(base.as_ref());
+ canon_path.strip_prefix(canon_base).map(|p| p.to_path_buf())
+}
+
+/// Creates an excluded from cache directory atomically with its parents as needed.
+///
+/// The atomicity only covers creating the leaf directory and exclusion from cache. Any missing
+/// parent directories will not be created in an atomic manner.
+///
+/// This function is idempotent and in addition to that it won't exclude ``p`` from cache if it
+/// already exists.
+pub fn create_dir_all_excluded_from_backups_atomic(p: impl AsRef<Path>) -> Result<()> {
+ let path = p.as_ref();
+ if path.is_dir() {
+ return Ok(());
+ }
+
+ let parent = path.parent().unwrap();
+ let base = path.file_name().unwrap();
+ create_dir_all(parent)?;
+ // We do this in two steps (first create a temporary directory and exclude
+ // it from backups, then rename it to the desired name. If we created the
+ // directory directly where it should be and then excluded it from backups
+ // we would risk a situation where cargo is interrupted right after the directory
+ // creation but before the exclusion the directory would remain non-excluded from
+ // backups because we only perform exclusion right after we created the directory
+ // ourselves.
+ //
+ // We need the tempdir created in parent instead of $TMP, because only then we can be
+ // easily sure that rename() will succeed (the new name needs to be on the same mount
+ // point as the old one).
+ let tempdir = TempFileBuilder::new().prefix(base).tempdir_in(parent)?;
+ exclude_from_backups(tempdir.path());
+ exclude_from_content_indexing(tempdir.path());
+ // Previously std::fs::create_dir_all() (through paths::create_dir_all()) was used
+ // here to create the directory directly and fs::create_dir_all() explicitly treats
+ // the directory being created concurrently by another thread or process as success,
+ // hence the check below to follow the existing behavior. If we get an error at
+ // rename() and suddenly the directory (which didn't exist a moment earlier) exists
+ // we can infer from it's another cargo process doing work.
+ if let Err(e) = fs::rename(tempdir.path(), path) {
+ if !path.exists() {
+ return Err(anyhow::Error::from(e));
+ }
+ }
+ Ok(())
+}
+
+/// Mark an existing directory as excluded from backups and indexing.
+///
+/// Errors in marking it are ignored.
+pub fn exclude_from_backups_and_indexing(p: impl AsRef<Path>) {
+ let path = p.as_ref();
+ exclude_from_backups(path);
+ exclude_from_content_indexing(path);
+}
+
+/// Marks the directory as excluded from archives/backups.
+///
+/// This is recommended to prevent derived/temporary files from bloating backups. There are two
+/// mechanisms used to achieve this right now:
+///
+/// * A dedicated resource property excluding from Time Machine backups on macOS
+/// * CACHEDIR.TAG files supported by various tools in a platform-independent way
+fn exclude_from_backups(path: &Path) {
+ exclude_from_time_machine(path);
+ let _ = std::fs::write(
+ path.join("CACHEDIR.TAG"),
+ "Signature: 8a477f597d28d172789f06886806bc55
+# This file is a cache directory tag created by cargo.
+# For information about cache directory tags see https://bford.info/cachedir/
+",
+ );
+ // Similarly to exclude_from_time_machine() we ignore errors here as it's an optional feature.
+}
+
+/// Marks the directory as excluded from content indexing.
+///
+/// This is recommended to prevent the content of derived/temporary files from being indexed.
+/// This is very important for Windows users, as the live content indexing significantly slows
+/// cargo's I/O operations.
+///
+/// This is currently a no-op on non-Windows platforms.
+fn exclude_from_content_indexing(path: &Path) {
+ #[cfg(windows)]
+ {
+ use std::iter::once;
+ use std::os::windows::prelude::OsStrExt;
+ use windows_sys::Win32::Storage::FileSystem::{
+ GetFileAttributesW, SetFileAttributesW, FILE_ATTRIBUTE_NOT_CONTENT_INDEXED,
+ };
+
+ let path: Vec<u16> = path.as_os_str().encode_wide().chain(once(0)).collect();
+ unsafe {
+ SetFileAttributesW(
+ path.as_ptr(),
+ GetFileAttributesW(path.as_ptr()) | FILE_ATTRIBUTE_NOT_CONTENT_INDEXED,
+ );
+ }
+ }
+ #[cfg(not(windows))]
+ {
+ let _ = path;
+ }
+}
+
+#[cfg(not(target_os = "macos"))]
+fn exclude_from_time_machine(_: &Path) {}
+
+#[cfg(target_os = "macos")]
+/// Marks files or directories as excluded from Time Machine on macOS
+fn exclude_from_time_machine(path: &Path) {
+ use core_foundation::base::TCFType;
+ use core_foundation::{number, string, url};
+ use std::ptr;
+
+ // For compatibility with 10.7 a string is used instead of global kCFURLIsExcludedFromBackupKey
+ let is_excluded_key: Result<string::CFString, _> = "NSURLIsExcludedFromBackupKey".parse();
+ let path = url::CFURL::from_path(path, false);
+ if let (Some(path), Ok(is_excluded_key)) = (path, is_excluded_key) {
+ unsafe {
+ url::CFURLSetResourcePropertyForKey(
+ path.as_concrete_TypeRef(),
+ is_excluded_key.as_concrete_TypeRef(),
+ number::kCFBooleanTrue as *const _,
+ ptr::null_mut(),
+ );
+ }
+ }
+ // Errors are ignored, since it's an optional feature and failure
+ // doesn't prevent Cargo from working
+}
+
+#[cfg(test)]
+mod tests {
+ use super::join_paths;
+
+ #[test]
+ fn join_paths_lists_paths_on_error() {
+ let valid_paths = vec!["/testing/one", "/testing/two"];
+ // does not fail on valid input
+ let _joined = join_paths(&valid_paths, "TESTING1").unwrap();
+
+ #[cfg(unix)]
+ {
+ let invalid_paths = vec!["/testing/one", "/testing/t:wo/three"];
+ let err = join_paths(&invalid_paths, "TESTING2").unwrap_err();
+ assert_eq!(
+ err.to_string(),
+ "failed to join paths from `$TESTING2` together\n\n\
+ Check if any of path segments listed below contain an \
+ unterminated quote character or path separator:\
+ \n \"/testing/one\"\
+ \n \"/testing/t:wo/three\"\
+ "
+ );
+ }
+ #[cfg(windows)]
+ {
+ let invalid_paths = vec!["/testing/one", "/testing/t\"wo/three"];
+ let err = join_paths(&invalid_paths, "TESTING2").unwrap_err();
+ assert_eq!(
+ err.to_string(),
+ "failed to join paths from `$TESTING2` together\n\n\
+ Check if any of path segments listed below contain an \
+ unterminated quote character or path separator:\
+ \n \"/testing/one\"\
+ \n \"/testing/t\\\"wo/three\"\
+ "
+ );
+ }
+ }
+}
diff --git a/crates/cargo-util/src/process_builder.rs b/crates/cargo-util/src/process_builder.rs
new file mode 100644
index 0000000..76392f2
--- /dev/null
+++ b/crates/cargo-util/src/process_builder.rs
@@ -0,0 +1,689 @@
+use crate::process_error::ProcessError;
+use crate::read2;
+
+use anyhow::{bail, Context, Result};
+use jobserver::Client;
+use shell_escape::escape;
+use tempfile::NamedTempFile;
+
+use std::collections::BTreeMap;
+use std::env;
+use std::ffi::{OsStr, OsString};
+use std::fmt;
+use std::io::{self, Write};
+use std::iter::once;
+use std::path::Path;
+use std::process::{Command, ExitStatus, Output, Stdio};
+
+/// A builder object for an external process, similar to [`std::process::Command`].
+#[derive(Clone, Debug)]
+pub struct ProcessBuilder {
+ /// The program to execute.
+ program: OsString,
+ /// A list of arguments to pass to the program.
+ args: Vec<OsString>,
+ /// Any environment variables that should be set for the program.
+ env: BTreeMap<String, Option<OsString>>,
+ /// The directory to run the program from.
+ cwd: Option<OsString>,
+ /// A list of wrappers that wrap the original program when calling
+ /// [`ProcessBuilder::wrapped`]. The last one is the outermost one.
+ wrappers: Vec<OsString>,
+ /// The `make` jobserver. See the [jobserver crate] for
+ /// more information.
+ ///
+ /// [jobserver crate]: https://docs.rs/jobserver/
+ jobserver: Option<Client>,
+ /// `true` to include environment variable in display.
+ display_env_vars: bool,
+ /// `true` to retry with an argfile if hitting "command line too big" error.
+ /// See [`ProcessBuilder::retry_with_argfile`] for more information.
+ retry_with_argfile: bool,
+ /// Data to write to stdin.
+ stdin: Option<Vec<u8>>,
+}
+
+impl fmt::Display for ProcessBuilder {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "`")?;
+
+ if self.display_env_vars {
+ for (key, val) in self.env.iter() {
+ if let Some(val) = val {
+ let val = escape(val.to_string_lossy());
+ if cfg!(windows) {
+ write!(f, "set {}={}&& ", key, val)?;
+ } else {
+ write!(f, "{}={} ", key, val)?;
+ }
+ }
+ }
+ }
+
+ write!(f, "{}", self.get_program().to_string_lossy())?;
+
+ for arg in self.get_args() {
+ write!(f, " {}", escape(arg.to_string_lossy()))?;
+ }
+
+ write!(f, "`")
+ }
+}
+
+impl ProcessBuilder {
+ /// Creates a new [`ProcessBuilder`] with the given executable path.
+ pub fn new<T: AsRef<OsStr>>(cmd: T) -> ProcessBuilder {
+ ProcessBuilder {
+ program: cmd.as_ref().to_os_string(),
+ args: Vec::new(),
+ cwd: None,
+ env: BTreeMap::new(),
+ wrappers: Vec::new(),
+ jobserver: None,
+ display_env_vars: false,
+ retry_with_argfile: false,
+ stdin: None,
+ }
+ }
+
+ /// (chainable) Sets the executable for the process.
+ pub fn program<T: AsRef<OsStr>>(&mut self, program: T) -> &mut ProcessBuilder {
+ self.program = program.as_ref().to_os_string();
+ self
+ }
+
+ /// (chainable) Adds `arg` to the args list.
+ pub fn arg<T: AsRef<OsStr>>(&mut self, arg: T) -> &mut ProcessBuilder {
+ self.args.push(arg.as_ref().to_os_string());
+ self
+ }
+
+ /// (chainable) Adds multiple `args` to the args list.
+ pub fn args<T: AsRef<OsStr>>(&mut self, args: &[T]) -> &mut ProcessBuilder {
+ self.args
+ .extend(args.iter().map(|t| t.as_ref().to_os_string()));
+ self
+ }
+
+ /// (chainable) Replaces the args list with the given `args`.
+ pub fn args_replace<T: AsRef<OsStr>>(&mut self, args: &[T]) -> &mut ProcessBuilder {
+ if let Some(program) = self.wrappers.pop() {
+ // User intend to replace all args, so we
+ // - use the outermost wrapper as the main program, and
+ // - cleanup other inner wrappers.
+ self.program = program;
+ self.wrappers = Vec::new();
+ }
+ self.args = args.iter().map(|t| t.as_ref().to_os_string()).collect();
+ self
+ }
+
+ /// (chainable) Sets the current working directory of the process.
+ pub fn cwd<T: AsRef<OsStr>>(&mut self, path: T) -> &mut ProcessBuilder {
+ self.cwd = Some(path.as_ref().to_os_string());
+ self
+ }
+
+ /// (chainable) Sets an environment variable for the process.
+ pub fn env<T: AsRef<OsStr>>(&mut self, key: &str, val: T) -> &mut ProcessBuilder {
+ self.env
+ .insert(key.to_string(), Some(val.as_ref().to_os_string()));
+ self
+ }
+
+ /// (chainable) Unsets an environment variable for the process.
+ pub fn env_remove(&mut self, key: &str) -> &mut ProcessBuilder {
+ self.env.insert(key.to_string(), None);
+ self
+ }
+
+ /// Gets the executable name.
+ pub fn get_program(&self) -> &OsString {
+ self.wrappers.last().unwrap_or(&self.program)
+ }
+
+ /// Gets the program arguments.
+ pub fn get_args(&self) -> impl Iterator<Item = &OsString> {
+ self.wrappers
+ .iter()
+ .rev()
+ .chain(once(&self.program))
+ .chain(self.args.iter())
+ .skip(1) // Skip the main `program
+ }
+
+ /// Gets the current working directory for the process.
+ pub fn get_cwd(&self) -> Option<&Path> {
+ self.cwd.as_ref().map(Path::new)
+ }
+
+ /// Gets an environment variable as the process will see it (will inherit from environment
+ /// unless explicitally unset).
+ pub fn get_env(&self, var: &str) -> Option<OsString> {
+ self.env
+ .get(var)
+ .cloned()
+ .or_else(|| Some(env::var_os(var)))
+ .and_then(|s| s)
+ }
+
+ /// Gets all environment variables explicitly set or unset for the process (not inherited
+ /// vars).
+ pub fn get_envs(&self) -> &BTreeMap<String, Option<OsString>> {
+ &self.env
+ }
+
+ /// Sets the `make` jobserver. See the [jobserver crate][jobserver_docs] for
+ /// more information.
+ ///
+ /// [jobserver_docs]: https://docs.rs/jobserver/0.1.6/jobserver/
+ pub fn inherit_jobserver(&mut self, jobserver: &Client) -> &mut Self {
+ self.jobserver = Some(jobserver.clone());
+ self
+ }
+
+ /// Enables environment variable display.
+ pub fn display_env_vars(&mut self) -> &mut Self {
+ self.display_env_vars = true;
+ self
+ }
+
+ /// Enables retrying with an argfile if hitting "command line too big" error
+ ///
+ /// This is primarily for the `@path` arg of rustc and rustdoc, which treat
+ /// each line as an command-line argument, so `LF` and `CRLF` bytes are not
+ /// valid as an argument for argfile at this moment.
+ /// For example, `RUSTDOCFLAGS="--crate-version foo\nbar" cargo doc` is
+ /// valid when invoking from command-line but not from argfile.
+ ///
+ /// To sum up, the limitations of the argfile are:
+ ///
+ /// - Must be valid UTF-8 encoded.
+ /// - Must not contain any newlines in each argument.
+ ///
+ /// Ref:
+ ///
+ /// - <https://doc.rust-lang.org/rustdoc/command-line-arguments.html#path-load-command-line-flags-from-a-path>
+ /// - <https://doc.rust-lang.org/rustc/command-line-arguments.html#path-load-command-line-flags-from-a-path>
+ pub fn retry_with_argfile(&mut self, enabled: bool) -> &mut Self {
+ self.retry_with_argfile = enabled;
+ self
+ }
+
+ /// Sets a value that will be written to stdin of the process on launch.
+ pub fn stdin<T: Into<Vec<u8>>>(&mut self, stdin: T) -> &mut Self {
+ self.stdin = Some(stdin.into());
+ self
+ }
+
+ fn should_retry_with_argfile(&self, err: &io::Error) -> bool {
+ self.retry_with_argfile && imp::command_line_too_big(err)
+ }
+
+ /// Like [`Command::status`] but with a better error message.
+ pub fn status(&self) -> Result<ExitStatus> {
+ self._status()
+ .with_context(|| ProcessError::could_not_execute(self))
+ }
+
+ fn _status(&self) -> io::Result<ExitStatus> {
+ if !debug_force_argfile(self.retry_with_argfile) {
+ let mut cmd = self.build_command();
+ match cmd.spawn() {
+ Err(ref e) if self.should_retry_with_argfile(e) => {}
+ Err(e) => return Err(e),
+ Ok(mut child) => return child.wait(),
+ }
+ }
+ let (mut cmd, argfile) = self.build_command_with_argfile()?;
+ let status = cmd.spawn()?.wait();
+ close_tempfile_and_log_error(argfile);
+ status
+ }
+
+ /// Runs the process, waiting for completion, and mapping non-success exit codes to an error.
+ pub fn exec(&self) -> Result<()> {
+ let exit = self.status()?;
+ if exit.success() {
+ Ok(())
+ } else {
+ Err(ProcessError::new(
+ &format!("process didn't exit successfully: {}", self),
+ Some(exit),
+ None,
+ )
+ .into())
+ }
+ }
+
+ /// Replaces the current process with the target process.
+ ///
+ /// On Unix, this executes the process using the Unix syscall `execvp`, which will block
+ /// this process, and will only return if there is an error.
+ ///
+ /// On Windows this isn't technically possible. Instead we emulate it to the best of our
+ /// ability. One aspect we fix here is that we specify a handler for the Ctrl-C handler.
+ /// In doing so (and by effectively ignoring it) we should emulate proxying Ctrl-C
+ /// handling to the application at hand, which will either terminate or handle it itself.
+ /// According to Microsoft's documentation at
+ /// <https://docs.microsoft.com/en-us/windows/console/ctrl-c-and-ctrl-break-signals>.
+ /// the Ctrl-C signal is sent to all processes attached to a terminal, which should
+ /// include our child process. If the child terminates then we'll reap them in Cargo
+ /// pretty quickly, and if the child handles the signal then we won't terminate
+ /// (and we shouldn't!) until the process itself later exits.
+ pub fn exec_replace(&self) -> Result<()> {
+ imp::exec_replace(self)
+ }
+
+ /// Like [`Command::output`] but with a better error message.
+ pub fn output(&self) -> Result<Output> {
+ self._output()
+ .with_context(|| ProcessError::could_not_execute(self))
+ }
+
+ fn _output(&self) -> io::Result<Output> {
+ if !debug_force_argfile(self.retry_with_argfile) {
+ let mut cmd = self.build_command();
+ match piped(&mut cmd, self.stdin.is_some()).spawn() {
+ Err(ref e) if self.should_retry_with_argfile(e) => {}
+ Err(e) => return Err(e),
+ Ok(mut child) => {
+ if let Some(stdin) = &self.stdin {
+ child.stdin.take().unwrap().write_all(stdin)?;
+ }
+ return child.wait_with_output();
+ }
+ }
+ }
+ let (mut cmd, argfile) = self.build_command_with_argfile()?;
+ let mut child = piped(&mut cmd, self.stdin.is_some()).spawn()?;
+ if let Some(stdin) = &self.stdin {
+ child.stdin.take().unwrap().write_all(stdin)?;
+ }
+ let output = child.wait_with_output();
+ close_tempfile_and_log_error(argfile);
+ output
+ }
+
+ /// Executes the process, returning the stdio output, or an error if non-zero exit status.
+ pub fn exec_with_output(&self) -> Result<Output> {
+ let output = self.output()?;
+ if output.status.success() {
+ Ok(output)
+ } else {
+ Err(ProcessError::new(
+ &format!("process didn't exit successfully: {}", self),
+ Some(output.status),
+ Some(&output),
+ )
+ .into())
+ }
+ }
+
+ /// Executes a command, passing each line of stdout and stderr to the supplied callbacks, which
+ /// can mutate the string data.
+ ///
+ /// If any invocations of these function return an error, it will be propagated.
+ ///
+ /// If `capture_output` is true, then all the output will also be buffered
+ /// and stored in the returned `Output` object. If it is false, no caching
+ /// is done, and the callbacks are solely responsible for handling the
+ /// output.
+ pub fn exec_with_streaming(
+ &self,
+ on_stdout_line: &mut dyn FnMut(&str) -> Result<()>,
+ on_stderr_line: &mut dyn FnMut(&str) -> Result<()>,
+ capture_output: bool,
+ ) -> Result<Output> {
+ let mut stdout = Vec::new();
+ let mut stderr = Vec::new();
+
+ let mut callback_error = None;
+ let mut stdout_pos = 0;
+ let mut stderr_pos = 0;
+
+ let spawn = |mut cmd| {
+ if !debug_force_argfile(self.retry_with_argfile) {
+ match piped(&mut cmd, false).spawn() {
+ Err(ref e) if self.should_retry_with_argfile(e) => {}
+ Err(e) => return Err(e),
+ Ok(child) => return Ok((child, None)),
+ }
+ }
+ let (mut cmd, argfile) = self.build_command_with_argfile()?;
+ Ok((piped(&mut cmd, false).spawn()?, Some(argfile)))
+ };
+
+ let status = (|| {
+ let cmd = self.build_command();
+ let (mut child, argfile) = spawn(cmd)?;
+ let out = child.stdout.take().unwrap();
+ let err = child.stderr.take().unwrap();
+ read2(out, err, &mut |is_out, data, eof| {
+ let pos = if is_out {
+ &mut stdout_pos
+ } else {
+ &mut stderr_pos
+ };
+ let idx = if eof {
+ data.len()
+ } else {
+ match data[*pos..].iter().rposition(|b| *b == b'\n') {
+ Some(i) => *pos + i + 1,
+ None => {
+ *pos = data.len();
+ return;
+ }
+ }
+ };
+
+ let new_lines = &data[..idx];
+
+ for line in String::from_utf8_lossy(new_lines).lines() {
+ if callback_error.is_some() {
+ break;
+ }
+ let callback_result = if is_out {
+ on_stdout_line(line)
+ } else {
+ on_stderr_line(line)
+ };
+ if let Err(e) = callback_result {
+ callback_error = Some(e);
+ break;
+ }
+ }
+
+ if capture_output {
+ let dst = if is_out { &mut stdout } else { &mut stderr };
+ dst.extend(new_lines);
+ }
+
+ data.drain(..idx);
+ *pos = 0;
+ })?;
+ let status = child.wait();
+ if let Some(argfile) = argfile {
+ close_tempfile_and_log_error(argfile);
+ }
+ status
+ })()
+ .with_context(|| ProcessError::could_not_execute(self))?;
+ let output = Output {
+ status,
+ stdout,
+ stderr,
+ };
+
+ {
+ let to_print = if capture_output { Some(&output) } else { None };
+ if let Some(e) = callback_error {
+ let cx = ProcessError::new(
+ &format!("failed to parse process output: {}", self),
+ Some(output.status),
+ to_print,
+ );
+ bail!(anyhow::Error::new(cx).context(e));
+ } else if !output.status.success() {
+ bail!(ProcessError::new(
+ &format!("process didn't exit successfully: {}", self),
+ Some(output.status),
+ to_print,
+ ));
+ }
+ }
+
+ Ok(output)
+ }
+
+ /// Builds the command with an `@<path>` argfile that contains all the
+ /// arguments. This is primarily served for rustc/rustdoc command family.
+ fn build_command_with_argfile(&self) -> io::Result<(Command, NamedTempFile)> {
+ use std::io::Write as _;
+
+ let mut tmp = tempfile::Builder::new()
+ .prefix("cargo-argfile.")
+ .tempfile()?;
+
+ let mut arg = OsString::from("@");
+ arg.push(tmp.path());
+ let mut cmd = self.build_command_without_args();
+ cmd.arg(arg);
+ log::debug!("created argfile at {} for {self}", tmp.path().display());
+
+ let cap = self.get_args().map(|arg| arg.len() + 1).sum::<usize>();
+ let mut buf = Vec::with_capacity(cap);
+ for arg in &self.args {
+ let arg = arg.to_str().ok_or_else(|| {
+ io::Error::new(
+ io::ErrorKind::Other,
+ format!(
+ "argument for argfile contains invalid UTF-8 characters: `{}`",
+ arg.to_string_lossy()
+ ),
+ )
+ })?;
+ if arg.contains('\n') {
+ return Err(io::Error::new(
+ io::ErrorKind::Other,
+ format!("argument for argfile contains newlines: `{arg}`"),
+ ));
+ }
+ writeln!(buf, "{arg}")?;
+ }
+ tmp.write_all(&mut buf)?;
+ Ok((cmd, tmp))
+ }
+
+ /// Builds a command from `ProcessBuilder` for everything but not `args`.
+ fn build_command_without_args(&self) -> Command {
+ let mut command = {
+ let mut iter = self.wrappers.iter().rev().chain(once(&self.program));
+ let mut cmd = Command::new(iter.next().expect("at least one `program` exists"));
+ cmd.args(iter);
+ cmd
+ };
+ if let Some(cwd) = self.get_cwd() {
+ command.current_dir(cwd);
+ }
+ for (k, v) in &self.env {
+ match *v {
+ Some(ref v) => {
+ command.env(k, v);
+ }
+ None => {
+ command.env_remove(k);
+ }
+ }
+ }
+ if let Some(ref c) = self.jobserver {
+ c.configure(&mut command);
+ }
+ command
+ }
+
+ /// Converts `ProcessBuilder` into a `std::process::Command`, and handles
+ /// the jobserver, if present.
+ ///
+ /// Note that this method doesn't take argfile fallback into account. The
+ /// caller should handle it by themselves.
+ pub fn build_command(&self) -> Command {
+ let mut command = self.build_command_without_args();
+ for arg in &self.args {
+ command.arg(arg);
+ }
+ command
+ }
+
+ /// Wraps an existing command with the provided wrapper, if it is present and valid.
+ ///
+ /// # Examples
+ ///
+ /// ```rust
+ /// use cargo_util::ProcessBuilder;
+ /// // Running this would execute `rustc`
+ /// let cmd = ProcessBuilder::new("rustc");
+ ///
+ /// // Running this will execute `sccache rustc`
+ /// let cmd = cmd.wrapped(Some("sccache"));
+ /// ```
+ pub fn wrapped(mut self, wrapper: Option<impl AsRef<OsStr>>) -> Self {
+ if let Some(wrapper) = wrapper.as_ref() {
+ let wrapper = wrapper.as_ref();
+ if !wrapper.is_empty() {
+ self.wrappers.push(wrapper.to_os_string());
+ }
+ }
+ self
+ }
+}
+
+/// Forces the command to use `@path` argfile.
+///
+/// You should set `__CARGO_TEST_FORCE_ARGFILE` to enable this.
+fn debug_force_argfile(retry_enabled: bool) -> bool {
+ cfg!(debug_assertions) && env::var("__CARGO_TEST_FORCE_ARGFILE").is_ok() && retry_enabled
+}
+
+/// Creates new pipes for stderr, stdout, and optionally stdin.
+fn piped(cmd: &mut Command, pipe_stdin: bool) -> &mut Command {
+ cmd.stdout(Stdio::piped())
+ .stderr(Stdio::piped())
+ .stdin(if pipe_stdin {
+ Stdio::piped()
+ } else {
+ Stdio::null()
+ })
+}
+
+fn close_tempfile_and_log_error(file: NamedTempFile) {
+ file.close().unwrap_or_else(|e| {
+ log::warn!("failed to close temporary file: {e}");
+ });
+}
+
+#[cfg(unix)]
+mod imp {
+ use super::{close_tempfile_and_log_error, debug_force_argfile, ProcessBuilder, ProcessError};
+ use anyhow::Result;
+ use std::io;
+ use std::os::unix::process::CommandExt;
+
+ pub fn exec_replace(process_builder: &ProcessBuilder) -> Result<()> {
+ let mut error;
+ let mut file = None;
+ if debug_force_argfile(process_builder.retry_with_argfile) {
+ let (mut command, argfile) = process_builder.build_command_with_argfile()?;
+ file = Some(argfile);
+ error = command.exec()
+ } else {
+ let mut command = process_builder.build_command();
+ error = command.exec();
+ if process_builder.should_retry_with_argfile(&error) {
+ let (mut command, argfile) = process_builder.build_command_with_argfile()?;
+ file = Some(argfile);
+ error = command.exec()
+ }
+ }
+ if let Some(file) = file {
+ close_tempfile_and_log_error(file);
+ }
+
+ Err(anyhow::Error::from(error).context(ProcessError::new(
+ &format!("could not execute process {}", process_builder),
+ None,
+ None,
+ )))
+ }
+
+ pub fn command_line_too_big(err: &io::Error) -> bool {
+ err.raw_os_error() == Some(libc::E2BIG)
+ }
+}
+
+#[cfg(windows)]
+mod imp {
+ use super::{ProcessBuilder, ProcessError};
+ use anyhow::Result;
+ use std::io;
+ use windows_sys::Win32::Foundation::{BOOL, FALSE, TRUE};
+ use windows_sys::Win32::System::Console::SetConsoleCtrlHandler;
+
+ unsafe extern "system" fn ctrlc_handler(_: u32) -> BOOL {
+ // Do nothing; let the child process handle it.
+ TRUE
+ }
+
+ pub fn exec_replace(process_builder: &ProcessBuilder) -> Result<()> {
+ unsafe {
+ if SetConsoleCtrlHandler(Some(ctrlc_handler), TRUE) == FALSE {
+ return Err(ProcessError::new("Could not set Ctrl-C handler.", None, None).into());
+ }
+ }
+
+ // Just execute the process as normal.
+ process_builder.exec()
+ }
+
+ pub fn command_line_too_big(err: &io::Error) -> bool {
+ use windows_sys::Win32::Foundation::ERROR_FILENAME_EXCED_RANGE;
+ err.raw_os_error() == Some(ERROR_FILENAME_EXCED_RANGE as i32)
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::ProcessBuilder;
+ use std::fs;
+
+ #[test]
+ fn argfile_build_succeeds() {
+ let mut cmd = ProcessBuilder::new("echo");
+ cmd.args(["foo", "bar"].as_slice());
+ let (cmd, argfile) = cmd.build_command_with_argfile().unwrap();
+
+ assert_eq!(cmd.get_program(), "echo");
+ let cmd_args: Vec<_> = cmd.get_args().map(|s| s.to_str().unwrap()).collect();
+ assert_eq!(cmd_args.len(), 1);
+ assert!(cmd_args[0].starts_with("@"));
+ assert!(cmd_args[0].contains("cargo-argfile."));
+
+ let buf = fs::read_to_string(argfile.path()).unwrap();
+ assert_eq!(buf, "foo\nbar\n");
+ }
+
+ #[test]
+ fn argfile_build_fails_if_arg_contains_newline() {
+ let mut cmd = ProcessBuilder::new("echo");
+ cmd.arg("foo\n");
+ let err = cmd.build_command_with_argfile().unwrap_err();
+ assert_eq!(
+ err.to_string(),
+ "argument for argfile contains newlines: `foo\n`"
+ );
+ }
+
+ #[test]
+ fn argfile_build_fails_if_arg_contains_invalid_utf8() {
+ let mut cmd = ProcessBuilder::new("echo");
+
+ #[cfg(windows)]
+ let invalid_arg = {
+ use std::os::windows::prelude::*;
+ std::ffi::OsString::from_wide(&[0x0066, 0x006f, 0xD800, 0x006f])
+ };
+
+ #[cfg(unix)]
+ let invalid_arg = {
+ use std::os::unix::ffi::OsStrExt;
+ std::ffi::OsStr::from_bytes(&[0x66, 0x6f, 0x80, 0x6f]).to_os_string()
+ };
+
+ cmd.arg(invalid_arg);
+ let err = cmd.build_command_with_argfile().unwrap_err();
+ assert_eq!(
+ err.to_string(),
+ "argument for argfile contains invalid UTF-8 characters: `fo�o`"
+ );
+ }
+}
diff --git a/crates/cargo-util/src/process_error.rs b/crates/cargo-util/src/process_error.rs
new file mode 100644
index 0000000..9b4a38c
--- /dev/null
+++ b/crates/cargo-util/src/process_error.rs
@@ -0,0 +1,200 @@
+//! Error value for [`crate::ProcessBuilder`] when a process fails.
+
+use std::fmt;
+use std::process::{ExitStatus, Output};
+use std::str;
+
+#[derive(Debug)]
+pub struct ProcessError {
+ /// A detailed description to show to the user why the process failed.
+ pub desc: String,
+
+ /// The exit status of the process.
+ ///
+ /// This can be `None` if the process failed to launch (like process not
+ /// found) or if the exit status wasn't a code but was instead something
+ /// like termination via a signal.
+ pub code: Option<i32>,
+
+ /// The stdout from the process.
+ ///
+ /// This can be `None` if the process failed to launch, or the output was
+ /// not captured.
+ pub stdout: Option<Vec<u8>>,
+
+ /// The stderr from the process.
+ ///
+ /// This can be `None` if the process failed to launch, or the output was
+ /// not captured.
+ pub stderr: Option<Vec<u8>>,
+}
+
+impl fmt::Display for ProcessError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.desc.fmt(f)
+ }
+}
+
+impl std::error::Error for ProcessError {}
+
+impl ProcessError {
+ /// Creates a new [`ProcessError`].
+ ///
+ /// * `status` can be `None` if the process did not launch.
+ /// * `output` can be `None` if the process did not launch, or output was not captured.
+ pub fn new(msg: &str, status: Option<ExitStatus>, output: Option<&Output>) -> ProcessError {
+ let exit = match status {
+ Some(s) => exit_status_to_string(s),
+ None => "never executed".to_string(),
+ };
+
+ Self::new_raw(
+ msg,
+ status.and_then(|s| s.code()),
+ &exit,
+ output.map(|s| s.stdout.as_slice()),
+ output.map(|s| s.stderr.as_slice()),
+ )
+ }
+
+ /// Creates a new [`ProcessError`] with the raw output data.
+ ///
+ /// * `code` can be `None` for situations like being killed by a signal on unix.
+ pub fn new_raw(
+ msg: &str,
+ code: Option<i32>,
+ status: &str,
+ stdout: Option<&[u8]>,
+ stderr: Option<&[u8]>,
+ ) -> ProcessError {
+ let mut desc = format!("{} ({})", msg, status);
+
+ if let Some(out) = stdout {
+ match str::from_utf8(out) {
+ Ok(s) if !s.trim().is_empty() => {
+ desc.push_str("\n--- stdout\n");
+ desc.push_str(s);
+ }
+ Ok(..) | Err(..) => {}
+ }
+ }
+ if let Some(out) = stderr {
+ match str::from_utf8(out) {
+ Ok(s) if !s.trim().is_empty() => {
+ desc.push_str("\n--- stderr\n");
+ desc.push_str(s);
+ }
+ Ok(..) | Err(..) => {}
+ }
+ }
+
+ ProcessError {
+ desc,
+ code,
+ stdout: stdout.map(|s| s.to_vec()),
+ stderr: stderr.map(|s| s.to_vec()),
+ }
+ }
+
+ /// Creates a [`ProcessError`] with "could not execute process {cmd}".
+ ///
+ /// * `cmd` is usually but not limited to [`std::process::Command`].
+ pub fn could_not_execute(cmd: impl fmt::Display) -> ProcessError {
+ ProcessError::new(&format!("could not execute process {cmd}"), None, None)
+ }
+}
+
+/// Converts an [`ExitStatus`] to a human-readable string suitable for
+/// displaying to a user.
+pub fn exit_status_to_string(status: ExitStatus) -> String {
+ return status_to_string(status);
+
+ #[cfg(unix)]
+ fn status_to_string(status: ExitStatus) -> String {
+ use std::os::unix::process::*;
+
+ if let Some(signal) = status.signal() {
+ let name = match signal as libc::c_int {
+ libc::SIGABRT => ", SIGABRT: process abort signal",
+ libc::SIGALRM => ", SIGALRM: alarm clock",
+ libc::SIGFPE => ", SIGFPE: erroneous arithmetic operation",
+ libc::SIGHUP => ", SIGHUP: hangup",
+ libc::SIGILL => ", SIGILL: illegal instruction",
+ libc::SIGINT => ", SIGINT: terminal interrupt signal",
+ libc::SIGKILL => ", SIGKILL: kill",
+ libc::SIGPIPE => ", SIGPIPE: write on a pipe with no one to read",
+ libc::SIGQUIT => ", SIGQUIT: terminal quit signal",
+ libc::SIGSEGV => ", SIGSEGV: invalid memory reference",
+ libc::SIGTERM => ", SIGTERM: termination signal",
+ libc::SIGBUS => ", SIGBUS: access to undefined memory",
+ #[cfg(not(target_os = "haiku"))]
+ libc::SIGSYS => ", SIGSYS: bad system call",
+ libc::SIGTRAP => ", SIGTRAP: trace/breakpoint trap",
+ _ => "",
+ };
+ format!("signal: {}{}", signal, name)
+ } else {
+ status.to_string()
+ }
+ }
+
+ #[cfg(windows)]
+ fn status_to_string(status: ExitStatus) -> String {
+ use windows_sys::Win32::Foundation::*;
+
+ let mut base = status.to_string();
+ let extra = match status.code().unwrap() as i32 {
+ STATUS_ACCESS_VIOLATION => "STATUS_ACCESS_VIOLATION",
+ STATUS_IN_PAGE_ERROR => "STATUS_IN_PAGE_ERROR",
+ STATUS_INVALID_HANDLE => "STATUS_INVALID_HANDLE",
+ STATUS_INVALID_PARAMETER => "STATUS_INVALID_PARAMETER",
+ STATUS_NO_MEMORY => "STATUS_NO_MEMORY",
+ STATUS_ILLEGAL_INSTRUCTION => "STATUS_ILLEGAL_INSTRUCTION",
+ STATUS_NONCONTINUABLE_EXCEPTION => "STATUS_NONCONTINUABLE_EXCEPTION",
+ STATUS_INVALID_DISPOSITION => "STATUS_INVALID_DISPOSITION",
+ STATUS_ARRAY_BOUNDS_EXCEEDED => "STATUS_ARRAY_BOUNDS_EXCEEDED",
+ STATUS_FLOAT_DENORMAL_OPERAND => "STATUS_FLOAT_DENORMAL_OPERAND",
+ STATUS_FLOAT_DIVIDE_BY_ZERO => "STATUS_FLOAT_DIVIDE_BY_ZERO",
+ STATUS_FLOAT_INEXACT_RESULT => "STATUS_FLOAT_INEXACT_RESULT",
+ STATUS_FLOAT_INVALID_OPERATION => "STATUS_FLOAT_INVALID_OPERATION",
+ STATUS_FLOAT_OVERFLOW => "STATUS_FLOAT_OVERFLOW",
+ STATUS_FLOAT_STACK_CHECK => "STATUS_FLOAT_STACK_CHECK",
+ STATUS_FLOAT_UNDERFLOW => "STATUS_FLOAT_UNDERFLOW",
+ STATUS_INTEGER_DIVIDE_BY_ZERO => "STATUS_INTEGER_DIVIDE_BY_ZERO",
+ STATUS_INTEGER_OVERFLOW => "STATUS_INTEGER_OVERFLOW",
+ STATUS_PRIVILEGED_INSTRUCTION => "STATUS_PRIVILEGED_INSTRUCTION",
+ STATUS_STACK_OVERFLOW => "STATUS_STACK_OVERFLOW",
+ STATUS_DLL_NOT_FOUND => "STATUS_DLL_NOT_FOUND",
+ STATUS_ORDINAL_NOT_FOUND => "STATUS_ORDINAL_NOT_FOUND",
+ STATUS_ENTRYPOINT_NOT_FOUND => "STATUS_ENTRYPOINT_NOT_FOUND",
+ STATUS_CONTROL_C_EXIT => "STATUS_CONTROL_C_EXIT",
+ STATUS_DLL_INIT_FAILED => "STATUS_DLL_INIT_FAILED",
+ STATUS_FLOAT_MULTIPLE_FAULTS => "STATUS_FLOAT_MULTIPLE_FAULTS",
+ STATUS_FLOAT_MULTIPLE_TRAPS => "STATUS_FLOAT_MULTIPLE_TRAPS",
+ STATUS_REG_NAT_CONSUMPTION => "STATUS_REG_NAT_CONSUMPTION",
+ STATUS_HEAP_CORRUPTION => "STATUS_HEAP_CORRUPTION",
+ STATUS_STACK_BUFFER_OVERRUN => "STATUS_STACK_BUFFER_OVERRUN",
+ STATUS_ASSERTION_FAILURE => "STATUS_ASSERTION_FAILURE",
+ _ => return base,
+ };
+ base.push_str(", ");
+ base.push_str(extra);
+ base
+ }
+}
+
+/// Returns `true` if the given process exit code is something a normal
+/// process would exit with.
+///
+/// This helps differentiate from abnormal termination codes, such as
+/// segmentation faults or signals.
+pub fn is_simple_exit_code(code: i32) -> bool {
+ // Typical unix exit codes are 0 to 127.
+ // Windows doesn't have anything "typical", and is a
+ // 32-bit number (which appears signed here, but is really
+ // unsigned). However, most of the interesting NTSTATUS
+ // codes are very large. This is just a rough
+ // approximation of which codes are "normal" and which
+ // ones are abnormal termination.
+ code >= 0 && code <= 127
+}
diff --git a/crates/cargo-util/src/read2.rs b/crates/cargo-util/src/read2.rs
new file mode 100644
index 0000000..742dc1d
--- /dev/null
+++ b/crates/cargo-util/src/read2.rs
@@ -0,0 +1,178 @@
+pub use self::imp::read2;
+
+#[cfg(unix)]
+mod imp {
+ use std::io;
+ use std::io::prelude::*;
+ use std::mem;
+ use std::os::unix::prelude::*;
+ use std::process::{ChildStderr, ChildStdout};
+
+ pub fn read2(
+ mut out_pipe: ChildStdout,
+ mut err_pipe: ChildStderr,
+ data: &mut dyn FnMut(bool, &mut Vec<u8>, bool),
+ ) -> io::Result<()> {
+ unsafe {
+ libc::fcntl(out_pipe.as_raw_fd(), libc::F_SETFL, libc::O_NONBLOCK);
+ libc::fcntl(err_pipe.as_raw_fd(), libc::F_SETFL, libc::O_NONBLOCK);
+ }
+
+ let mut out_done = false;
+ let mut err_done = false;
+ let mut out = Vec::new();
+ let mut err = Vec::new();
+
+ let mut fds: [libc::pollfd; 2] = unsafe { mem::zeroed() };
+ fds[0].fd = out_pipe.as_raw_fd();
+ fds[0].events = libc::POLLIN;
+ fds[1].fd = err_pipe.as_raw_fd();
+ fds[1].events = libc::POLLIN;
+ let mut nfds = 2;
+ let mut errfd = 1;
+
+ while nfds > 0 {
+ // wait for either pipe to become readable using `select`
+ let r = unsafe { libc::poll(fds.as_mut_ptr(), nfds, -1) };
+ if r == -1 {
+ let err = io::Error::last_os_error();
+ if err.kind() == io::ErrorKind::Interrupted {
+ continue;
+ }
+ return Err(err);
+ }
+
+ // Read as much as we can from each pipe, ignoring EWOULDBLOCK or
+ // EAGAIN. If we hit EOF, then this will happen because the underlying
+ // reader will return Ok(0), in which case we'll see `Ok` ourselves. In
+ // this case we flip the other fd back into blocking mode and read
+ // whatever's leftover on that file descriptor.
+ let handle = |res: io::Result<_>| match res {
+ Ok(_) => Ok(true),
+ Err(e) => {
+ if e.kind() == io::ErrorKind::WouldBlock {
+ Ok(false)
+ } else {
+ Err(e)
+ }
+ }
+ };
+ if !err_done && fds[errfd].revents != 0 && handle(err_pipe.read_to_end(&mut err))? {
+ err_done = true;
+ nfds -= 1;
+ }
+ data(false, &mut err, err_done);
+ if !out_done && fds[0].revents != 0 && handle(out_pipe.read_to_end(&mut out))? {
+ out_done = true;
+ fds[0].fd = err_pipe.as_raw_fd();
+ errfd = 0;
+ nfds -= 1;
+ }
+ data(true, &mut out, out_done);
+ }
+ Ok(())
+ }
+}
+
+#[cfg(windows)]
+mod imp {
+ use std::io;
+ use std::os::windows::prelude::*;
+ use std::process::{ChildStderr, ChildStdout};
+ use std::slice;
+
+ use miow::iocp::{CompletionPort, CompletionStatus};
+ use miow::pipe::NamedPipe;
+ use miow::Overlapped;
+ use windows_sys::Win32::Foundation::ERROR_BROKEN_PIPE;
+
+ struct Pipe<'a> {
+ dst: &'a mut Vec<u8>,
+ overlapped: Overlapped,
+ pipe: NamedPipe,
+ done: bool,
+ }
+
+ pub fn read2(
+ out_pipe: ChildStdout,
+ err_pipe: ChildStderr,
+ data: &mut dyn FnMut(bool, &mut Vec<u8>, bool),
+ ) -> io::Result<()> {
+ let mut out = Vec::new();
+ let mut err = Vec::new();
+
+ let port = CompletionPort::new(1)?;
+ port.add_handle(0, &out_pipe)?;
+ port.add_handle(1, &err_pipe)?;
+
+ unsafe {
+ let mut out_pipe = Pipe::new(out_pipe, &mut out);
+ let mut err_pipe = Pipe::new(err_pipe, &mut err);
+
+ out_pipe.read()?;
+ err_pipe.read()?;
+
+ let mut status = [CompletionStatus::zero(), CompletionStatus::zero()];
+
+ while !out_pipe.done || !err_pipe.done {
+ for status in port.get_many(&mut status, None)? {
+ if status.token() == 0 {
+ out_pipe.complete(status);
+ data(true, out_pipe.dst, out_pipe.done);
+ out_pipe.read()?;
+ } else {
+ err_pipe.complete(status);
+ data(false, err_pipe.dst, err_pipe.done);
+ err_pipe.read()?;
+ }
+ }
+ }
+
+ Ok(())
+ }
+ }
+
+ impl<'a> Pipe<'a> {
+ unsafe fn new<P: IntoRawHandle>(p: P, dst: &'a mut Vec<u8>) -> Pipe<'a> {
+ Pipe {
+ dst,
+ pipe: NamedPipe::from_raw_handle(p.into_raw_handle()),
+ overlapped: Overlapped::zero(),
+ done: false,
+ }
+ }
+
+ unsafe fn read(&mut self) -> io::Result<()> {
+ let dst = slice_to_end(self.dst);
+ match self.pipe.read_overlapped(dst, self.overlapped.raw()) {
+ Ok(_) => Ok(()),
+ Err(e) => {
+ if e.raw_os_error() == Some(ERROR_BROKEN_PIPE as i32) {
+ self.done = true;
+ Ok(())
+ } else {
+ Err(e)
+ }
+ }
+ }
+ }
+
+ unsafe fn complete(&mut self, status: &CompletionStatus) {
+ let prev = self.dst.len();
+ self.dst.set_len(prev + status.bytes_transferred() as usize);
+ if status.bytes_transferred() == 0 {
+ self.done = true;
+ }
+ }
+ }
+
+ unsafe fn slice_to_end(v: &mut Vec<u8>) -> &mut [u8] {
+ if v.capacity() == 0 {
+ v.reserve(16);
+ }
+ if v.capacity() == v.len() {
+ v.reserve(1);
+ }
+ slice::from_raw_parts_mut(v.as_mut_ptr().add(v.len()), v.capacity() - v.len())
+ }
+}
diff --git a/crates/cargo-util/src/registry.rs b/crates/cargo-util/src/registry.rs
new file mode 100644
index 0000000..6b1ccd2
--- /dev/null
+++ b/crates/cargo-util/src/registry.rs
@@ -0,0 +1,45 @@
+/// Make a path to a dependency, which aligns to
+///
+/// - [index from of Cargo's index on filesystem][1], and
+/// - [index from Crates.io][2].
+///
+/// [1]: https://docs.rs/cargo/latest/cargo/sources/registry/index.html#the-format-of-the-index
+/// [2]: https://github.com/rust-lang/crates.io-index
+pub fn make_dep_path(dep_name: &str, prefix_only: bool) -> String {
+ let (slash, name) = if prefix_only {
+ ("", "")
+ } else {
+ ("/", dep_name)
+ };
+ match dep_name.len() {
+ 1 => format!("1{}{}", slash, name),
+ 2 => format!("2{}{}", slash, name),
+ 3 => format!("3/{}{}{}", &dep_name[..1], slash, name),
+ _ => format!("{}/{}{}{}", &dep_name[0..2], &dep_name[2..4], slash, name),
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::make_dep_path;
+
+ #[test]
+ fn prefix_only() {
+ assert_eq!(make_dep_path("a", true), "1");
+ assert_eq!(make_dep_path("ab", true), "2");
+ assert_eq!(make_dep_path("abc", true), "3/a");
+ assert_eq!(make_dep_path("Abc", true), "3/A");
+ assert_eq!(make_dep_path("AbCd", true), "Ab/Cd");
+ assert_eq!(make_dep_path("aBcDe", true), "aB/cD");
+ }
+
+ #[test]
+ fn full() {
+ assert_eq!(make_dep_path("a", false), "1/a");
+ assert_eq!(make_dep_path("ab", false), "2/ab");
+ assert_eq!(make_dep_path("abc", false), "3/a/abc");
+ assert_eq!(make_dep_path("Abc", false), "3/A/Abc");
+ assert_eq!(make_dep_path("AbCd", false), "Ab/Cd/AbCd");
+ assert_eq!(make_dep_path("aBcDe", false), "aB/cD/aBcDe");
+ }
+}
diff --git a/crates/cargo-util/src/sha256.rs b/crates/cargo-util/src/sha256.rs
new file mode 100644
index 0000000..58821f4
--- /dev/null
+++ b/crates/cargo-util/src/sha256.rs
@@ -0,0 +1,56 @@
+use super::paths;
+use anyhow::{Context, Result};
+use crypto_hash::{Algorithm, Hasher};
+use std::fs::File;
+use std::io::{self, Read, Write};
+use std::path::Path;
+
+pub struct Sha256(Hasher);
+
+impl Sha256 {
+ pub fn new() -> Sha256 {
+ let hasher = Hasher::new(Algorithm::SHA256);
+ Sha256(hasher)
+ }
+
+ pub fn update(&mut self, bytes: &[u8]) -> &mut Sha256 {
+ let _ = self.0.write_all(bytes);
+ self
+ }
+
+ pub fn update_file(&mut self, mut file: &File) -> io::Result<&mut Sha256> {
+ let mut buf = [0; 64 * 1024];
+ loop {
+ let n = file.read(&mut buf)?;
+ if n == 0 {
+ break Ok(self);
+ }
+ self.update(&buf[..n]);
+ }
+ }
+
+ pub fn update_path<P: AsRef<Path>>(&mut self, path: P) -> Result<&mut Sha256> {
+ let path = path.as_ref();
+ let file = paths::open(path)?;
+ self.update_file(&file)
+ .with_context(|| format!("failed to read `{}`", path.display()))?;
+ Ok(self)
+ }
+
+ pub fn finish(&mut self) -> [u8; 32] {
+ let mut ret = [0u8; 32];
+ let data = self.0.finish();
+ ret.copy_from_slice(&data[..]);
+ ret
+ }
+
+ pub fn finish_hex(&mut self) -> String {
+ hex::encode(self.finish())
+ }
+}
+
+impl Default for Sha256 {
+ fn default() -> Self {
+ Self::new()
+ }
+}
diff --git a/crates/crates-io/Cargo.toml b/crates/crates-io/Cargo.toml
new file mode 100644
index 0000000..004e2da
--- /dev/null
+++ b/crates/crates-io/Cargo.toml
@@ -0,0 +1,21 @@
+[package]
+name = "crates-io"
+version = "0.36.0"
+edition = "2021"
+license = "MIT OR Apache-2.0"
+repository = "https://github.com/rust-lang/cargo"
+description = """
+Helpers for interacting with crates.io
+"""
+
+[lib]
+name = "crates_io"
+path = "lib.rs"
+
+[dependencies]
+anyhow = "1.0.34"
+curl = "0.4"
+percent-encoding = "2.0"
+serde = { version = "1.0", features = ['derive'] }
+serde_json = "1.0"
+url = "2.0"
diff --git a/crates/crates-io/LICENSE-APACHE b/crates/crates-io/LICENSE-APACHE
new file mode 120000
index 0000000..1cd601d
--- /dev/null
+++ b/crates/crates-io/LICENSE-APACHE
@@ -0,0 +1 @@
+../../LICENSE-APACHE \ No newline at end of file
diff --git a/crates/crates-io/LICENSE-MIT b/crates/crates-io/LICENSE-MIT
new file mode 120000
index 0000000..b2cfbdc
--- /dev/null
+++ b/crates/crates-io/LICENSE-MIT
@@ -0,0 +1 @@
+../../LICENSE-MIT \ No newline at end of file
diff --git a/crates/crates-io/lib.rs b/crates/crates-io/lib.rs
new file mode 100644
index 0000000..ad3ea76
--- /dev/null
+++ b/crates/crates-io/lib.rs
@@ -0,0 +1,537 @@
+#![allow(clippy::all)]
+
+use std::collections::BTreeMap;
+use std::fmt;
+use std::fs::File;
+use std::io::prelude::*;
+use std::io::{Cursor, SeekFrom};
+use std::time::Instant;
+
+use anyhow::{bail, format_err, Context, Result};
+use curl::easy::{Easy, List};
+use percent_encoding::{percent_encode, NON_ALPHANUMERIC};
+use serde::{Deserialize, Serialize};
+use url::Url;
+
+pub struct Registry {
+ /// The base URL for issuing API requests.
+ host: String,
+ /// Optional authorization token.
+ /// If None, commands requiring authorization will fail.
+ token: Option<String>,
+ /// Curl handle for issuing requests.
+ handle: Easy,
+ /// Whether to include the authorization token with all requests.
+ auth_required: bool,
+}
+
+#[derive(PartialEq, Clone, Copy)]
+pub enum Auth {
+ Authorized,
+ Unauthorized,
+}
+
+#[derive(Deserialize)]
+pub struct Crate {
+ pub name: String,
+ pub description: Option<String>,
+ pub max_version: String,
+}
+
+#[derive(Serialize, Deserialize)]
+pub struct NewCrate {
+ pub name: String,
+ pub vers: String,
+ pub deps: Vec<NewCrateDependency>,
+ pub features: BTreeMap<String, Vec<String>>,
+ pub authors: Vec<String>,
+ pub description: Option<String>,
+ pub documentation: Option<String>,
+ pub homepage: Option<String>,
+ pub readme: Option<String>,
+ pub readme_file: Option<String>,
+ pub keywords: Vec<String>,
+ pub categories: Vec<String>,
+ pub license: Option<String>,
+ pub license_file: Option<String>,
+ pub repository: Option<String>,
+ pub badges: BTreeMap<String, BTreeMap<String, String>>,
+ pub links: Option<String>,
+}
+
+#[derive(Serialize, Deserialize)]
+pub struct NewCrateDependency {
+ pub optional: bool,
+ pub default_features: bool,
+ pub name: String,
+ pub features: Vec<String>,
+ pub version_req: String,
+ pub target: Option<String>,
+ pub kind: String,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub registry: Option<String>,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub explicit_name_in_toml: Option<String>,
+}
+
+#[derive(Deserialize)]
+pub struct User {
+ pub id: u32,
+ pub login: String,
+ pub avatar: Option<String>,
+ pub email: Option<String>,
+ pub name: Option<String>,
+}
+
+pub struct Warnings {
+ pub invalid_categories: Vec<String>,
+ pub invalid_badges: Vec<String>,
+ pub other: Vec<String>,
+}
+
+#[derive(Deserialize)]
+struct R {
+ ok: bool,
+}
+#[derive(Deserialize)]
+struct OwnerResponse {
+ ok: bool,
+ msg: String,
+}
+#[derive(Deserialize)]
+struct ApiErrorList {
+ errors: Vec<ApiError>,
+}
+#[derive(Deserialize)]
+struct ApiError {
+ detail: String,
+}
+#[derive(Serialize)]
+struct OwnersReq<'a> {
+ users: &'a [&'a str],
+}
+#[derive(Deserialize)]
+struct Users {
+ users: Vec<User>,
+}
+#[derive(Deserialize)]
+struct TotalCrates {
+ total: u32,
+}
+#[derive(Deserialize)]
+struct Crates {
+ crates: Vec<Crate>,
+ meta: TotalCrates,
+}
+
+#[derive(Debug)]
+pub enum ResponseError {
+ Curl(curl::Error),
+ Api {
+ code: u32,
+ errors: Vec<String>,
+ },
+ Code {
+ code: u32,
+ headers: Vec<String>,
+ body: String,
+ },
+ Other(anyhow::Error),
+}
+
+impl std::error::Error for ResponseError {
+ fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
+ match self {
+ ResponseError::Curl(..) => None,
+ ResponseError::Api { .. } => None,
+ ResponseError::Code { .. } => None,
+ ResponseError::Other(e) => Some(e.as_ref()),
+ }
+ }
+}
+
+impl fmt::Display for ResponseError {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+ ResponseError::Curl(e) => write!(f, "{}", e),
+ ResponseError::Api { code, errors } => {
+ f.write_str("the remote server responded with an error")?;
+ if *code != 200 {
+ write!(f, " (status {} {})", code, reason(*code))?;
+ };
+ write!(f, ": {}", errors.join(", "))
+ }
+ ResponseError::Code {
+ code,
+ headers,
+ body,
+ } => write!(
+ f,
+ "failed to get a 200 OK response, got {}\n\
+ headers:\n\
+ \t{}\n\
+ body:\n\
+ {}",
+ code,
+ headers.join("\n\t"),
+ body
+ ),
+ ResponseError::Other(..) => write!(f, "invalid response from server"),
+ }
+ }
+}
+
+impl From<curl::Error> for ResponseError {
+ fn from(error: curl::Error) -> Self {
+ ResponseError::Curl(error)
+ }
+}
+
+impl Registry {
+ /// Creates a new `Registry`.
+ ///
+ /// ## Example
+ ///
+ /// ```rust
+ /// use curl::easy::Easy;
+ /// use crates_io::Registry;
+ ///
+ /// let mut handle = Easy::new();
+ /// // If connecting to crates.io, a user-agent is required.
+ /// handle.useragent("my_crawler (example.com/info)");
+ /// let mut reg = Registry::new_handle(String::from("https://crates.io"), None, handle);
+ /// ```
+ pub fn new_handle(
+ host: String,
+ token: Option<String>,
+ handle: Easy,
+ auth_required: bool,
+ ) -> Registry {
+ Registry {
+ host,
+ token,
+ handle,
+ auth_required,
+ }
+ }
+
+ pub fn set_token(&mut self, token: Option<String>) {
+ self.token = token;
+ }
+
+ pub fn host(&self) -> &str {
+ &self.host
+ }
+
+ pub fn host_is_crates_io(&self) -> bool {
+ is_url_crates_io(&self.host)
+ }
+
+ pub fn add_owners(&mut self, krate: &str, owners: &[&str]) -> Result<String> {
+ let body = serde_json::to_string(&OwnersReq { users: owners })?;
+ let body = self.put(&format!("/crates/{}/owners", krate), body.as_bytes())?;
+ assert!(serde_json::from_str::<OwnerResponse>(&body)?.ok);
+ Ok(serde_json::from_str::<OwnerResponse>(&body)?.msg)
+ }
+
+ pub fn remove_owners(&mut self, krate: &str, owners: &[&str]) -> Result<()> {
+ let body = serde_json::to_string(&OwnersReq { users: owners })?;
+ let body = self.delete(&format!("/crates/{}/owners", krate), Some(body.as_bytes()))?;
+ assert!(serde_json::from_str::<OwnerResponse>(&body)?.ok);
+ Ok(())
+ }
+
+ pub fn list_owners(&mut self, krate: &str) -> Result<Vec<User>> {
+ let body = self.get(&format!("/crates/{}/owners", krate))?;
+ Ok(serde_json::from_str::<Users>(&body)?.users)
+ }
+
+ pub fn publish(&mut self, krate: &NewCrate, mut tarball: &File) -> Result<Warnings> {
+ let json = serde_json::to_string(krate)?;
+ // Prepare the body. The format of the upload request is:
+ //
+ // <le u32 of json>
+ // <json request> (metadata for the package)
+ // <le u32 of tarball>
+ // <source tarball>
+
+ // NOTE: This can be replaced with `stream_len` if it is ever stabilized.
+ //
+ // This checks the length using seeking instead of metadata, because
+ // on some filesystems, getting the metadata will fail because
+ // the file was renamed in ops::package.
+ let tarball_len = tarball
+ .seek(SeekFrom::End(0))
+ .with_context(|| "failed to seek tarball")?;
+ tarball
+ .seek(SeekFrom::Start(0))
+ .with_context(|| "failed to seek tarball")?;
+ let header = {
+ let mut w = Vec::new();
+ w.extend(&(json.len() as u32).to_le_bytes());
+ w.extend(json.as_bytes().iter().cloned());
+ w.extend(&(tarball_len as u32).to_le_bytes());
+ w
+ };
+ let size = tarball_len as usize + header.len();
+ let mut body = Cursor::new(header).chain(tarball);
+
+ let url = format!("{}/api/v1/crates/new", self.host);
+
+ let token = match self.token.as_ref() {
+ Some(s) => s,
+ None => bail!("no upload token found, please run `cargo login`"),
+ };
+ self.handle.put(true)?;
+ self.handle.url(&url)?;
+ self.handle.in_filesize(size as u64)?;
+ let mut headers = List::new();
+ headers.append("Accept: application/json")?;
+ headers.append(&format!("Authorization: {}", token))?;
+ self.handle.http_headers(headers)?;
+
+ let started = Instant::now();
+ let body = self
+ .handle(&mut |buf| body.read(buf).unwrap_or(0))
+ .map_err(|e| match e {
+ ResponseError::Code { code, .. }
+ if code == 503
+ && started.elapsed().as_secs() >= 29
+ && self.host_is_crates_io() =>
+ {
+ format_err!(
+ "Request timed out after 30 seconds. If you're trying to \
+ upload a crate it may be too large. If the crate is under \
+ 10MB in size, you can email help@crates.io for assistance.\n\
+ Total size was {}.",
+ tarball_len
+ )
+ }
+ _ => e.into(),
+ })?;
+
+ let response = if body.is_empty() {
+ "{}".parse()?
+ } else {
+ body.parse::<serde_json::Value>()?
+ };
+
+ let invalid_categories: Vec<String> = response
+ .get("warnings")
+ .and_then(|j| j.get("invalid_categories"))
+ .and_then(|j| j.as_array())
+ .map(|x| x.iter().flat_map(|j| j.as_str()).map(Into::into).collect())
+ .unwrap_or_else(Vec::new);
+
+ let invalid_badges: Vec<String> = response
+ .get("warnings")
+ .and_then(|j| j.get("invalid_badges"))
+ .and_then(|j| j.as_array())
+ .map(|x| x.iter().flat_map(|j| j.as_str()).map(Into::into).collect())
+ .unwrap_or_else(Vec::new);
+
+ let other: Vec<String> = response
+ .get("warnings")
+ .and_then(|j| j.get("other"))
+ .and_then(|j| j.as_array())
+ .map(|x| x.iter().flat_map(|j| j.as_str()).map(Into::into).collect())
+ .unwrap_or_else(Vec::new);
+
+ Ok(Warnings {
+ invalid_categories,
+ invalid_badges,
+ other,
+ })
+ }
+
+ pub fn search(&mut self, query: &str, limit: u32) -> Result<(Vec<Crate>, u32)> {
+ let formatted_query = percent_encode(query.as_bytes(), NON_ALPHANUMERIC);
+ let body = self.req(
+ &format!("/crates?q={}&per_page={}", formatted_query, limit),
+ None,
+ Auth::Unauthorized,
+ )?;
+
+ let crates = serde_json::from_str::<Crates>(&body)?;
+ Ok((crates.crates, crates.meta.total))
+ }
+
+ pub fn yank(&mut self, krate: &str, version: &str) -> Result<()> {
+ let body = self.delete(&format!("/crates/{}/{}/yank", krate, version), None)?;
+ assert!(serde_json::from_str::<R>(&body)?.ok);
+ Ok(())
+ }
+
+ pub fn unyank(&mut self, krate: &str, version: &str) -> Result<()> {
+ let body = self.put(&format!("/crates/{}/{}/unyank", krate, version), &[])?;
+ assert!(serde_json::from_str::<R>(&body)?.ok);
+ Ok(())
+ }
+
+ fn put(&mut self, path: &str, b: &[u8]) -> Result<String> {
+ self.handle.put(true)?;
+ self.req(path, Some(b), Auth::Authorized)
+ }
+
+ fn get(&mut self, path: &str) -> Result<String> {
+ self.handle.get(true)?;
+ self.req(path, None, Auth::Authorized)
+ }
+
+ fn delete(&mut self, path: &str, b: Option<&[u8]>) -> Result<String> {
+ self.handle.custom_request("DELETE")?;
+ self.req(path, b, Auth::Authorized)
+ }
+
+ fn req(&mut self, path: &str, body: Option<&[u8]>, authorized: Auth) -> Result<String> {
+ self.handle.url(&format!("{}/api/v1{}", self.host, path))?;
+ let mut headers = List::new();
+ headers.append("Accept: application/json")?;
+ headers.append("Content-Type: application/json")?;
+
+ if self.auth_required || authorized == Auth::Authorized {
+ let token = match self.token.as_ref() {
+ Some(s) => s,
+ None => bail!("no upload token found, please run `cargo login`"),
+ };
+ check_token(token)?;
+ headers.append(&format!("Authorization: {}", token))?;
+ }
+ self.handle.http_headers(headers)?;
+ match body {
+ Some(mut body) => {
+ self.handle.upload(true)?;
+ self.handle.in_filesize(body.len() as u64)?;
+ self.handle(&mut |buf| body.read(buf).unwrap_or(0))
+ .map_err(|e| e.into())
+ }
+ None => self.handle(&mut |_| 0).map_err(|e| e.into()),
+ }
+ }
+
+ fn handle(
+ &mut self,
+ read: &mut dyn FnMut(&mut [u8]) -> usize,
+ ) -> std::result::Result<String, ResponseError> {
+ let mut headers = Vec::new();
+ let mut body = Vec::new();
+ {
+ let mut handle = self.handle.transfer();
+ handle.read_function(|buf| Ok(read(buf)))?;
+ handle.write_function(|data| {
+ body.extend_from_slice(data);
+ Ok(data.len())
+ })?;
+ handle.header_function(|data| {
+ // Headers contain trailing \r\n, trim them to make it easier
+ // to work with.
+ let s = String::from_utf8_lossy(data).trim().to_string();
+ headers.push(s);
+ true
+ })?;
+ handle.perform()?;
+ }
+
+ let body = match String::from_utf8(body) {
+ Ok(body) => body,
+ Err(..) => {
+ return Err(ResponseError::Other(format_err!(
+ "response body was not valid utf-8"
+ )))
+ }
+ };
+ let errors = serde_json::from_str::<ApiErrorList>(&body)
+ .ok()
+ .map(|s| s.errors.into_iter().map(|s| s.detail).collect::<Vec<_>>());
+
+ match (self.handle.response_code()?, errors) {
+ (0, None) | (200, None) => Ok(body),
+ (code, Some(errors)) => Err(ResponseError::Api { code, errors }),
+ (code, None) => Err(ResponseError::Code {
+ code,
+ headers,
+ body,
+ }),
+ }
+ }
+}
+
+fn reason(code: u32) -> &'static str {
+ // Taken from https://developer.mozilla.org/en-US/docs/Web/HTTP/Status
+ match code {
+ 100 => "Continue",
+ 101 => "Switching Protocol",
+ 103 => "Early Hints",
+ 200 => "OK",
+ 201 => "Created",
+ 202 => "Accepted",
+ 203 => "Non-Authoritative Information",
+ 204 => "No Content",
+ 205 => "Reset Content",
+ 206 => "Partial Content",
+ 300 => "Multiple Choice",
+ 301 => "Moved Permanently",
+ 302 => "Found",
+ 303 => "See Other",
+ 304 => "Not Modified",
+ 307 => "Temporary Redirect",
+ 308 => "Permanent Redirect",
+ 400 => "Bad Request",
+ 401 => "Unauthorized",
+ 402 => "Payment Required",
+ 403 => "Forbidden",
+ 404 => "Not Found",
+ 405 => "Method Not Allowed",
+ 406 => "Not Acceptable",
+ 407 => "Proxy Authentication Required",
+ 408 => "Request Timeout",
+ 409 => "Conflict",
+ 410 => "Gone",
+ 411 => "Length Required",
+ 412 => "Precondition Failed",
+ 413 => "Payload Too Large",
+ 414 => "URI Too Long",
+ 415 => "Unsupported Media Type",
+ 416 => "Request Range Not Satisfiable",
+ 417 => "Expectation Failed",
+ 429 => "Too Many Requests",
+ 431 => "Request Header Fields Too Large",
+ 500 => "Internal Server Error",
+ 501 => "Not Implemented",
+ 502 => "Bad Gateway",
+ 503 => "Service Unavailable",
+ 504 => "Gateway Timeout",
+ _ => "<unknown>",
+ }
+}
+
+/// Returns `true` if the host of the given URL is "crates.io".
+pub fn is_url_crates_io(url: &str) -> bool {
+ Url::parse(url)
+ .map(|u| u.host_str() == Some("crates.io"))
+ .unwrap_or(false)
+}
+
+/// Checks if a token is valid or malformed.
+///
+/// This check is necessary to prevent sending tokens which create an invalid HTTP request.
+/// It would be easier to check just for alphanumeric tokens, but we can't be sure that all
+/// registries only create tokens in that format so that is as less restricted as possible.
+pub fn check_token(token: &str) -> Result<()> {
+ if token.is_empty() {
+ bail!("please provide a non-empty token");
+ }
+ if token.bytes().all(|b| {
+ b >= 32 // undefined in ISO-8859-1, in ASCII/ UTF-8 not-printable character
+ && b < 128 // utf-8: the first bit signals a multi-byte character
+ && b != 127 // 127 is a control character in ascii and not in ISO 8859-1
+ || b == b't' // tab is also allowed (even when < 32)
+ }) {
+ Ok(())
+ } else {
+ Err(anyhow::anyhow!(
+ "token contains invalid characters.\nOnly printable ISO-8859-1 characters \
+ are allowed as it is sent in a HTTPS header."
+ ))
+ }
+}
diff --git a/crates/credential/README.md b/crates/credential/README.md
new file mode 100644
index 0000000..168cc71
--- /dev/null
+++ b/crates/credential/README.md
@@ -0,0 +1,8 @@
+# Cargo Credential Packages
+
+This directory contains Cargo packages for handling storage of tokens in a
+secure manner.
+
+`cargo-credential` is a generic library to assist writing a credential
+process. The other directories contain implementations that integrate with
+specific credential systems.
diff --git a/crates/credential/cargo-credential-1password/Cargo.toml b/crates/credential/cargo-credential-1password/Cargo.toml
new file mode 100644
index 0000000..093fde8
--- /dev/null
+++ b/crates/credential/cargo-credential-1password/Cargo.toml
@@ -0,0 +1,12 @@
+[package]
+name = "cargo-credential-1password"
+version = "0.2.0"
+edition = "2021"
+license = "MIT OR Apache-2.0"
+repository = "https://github.com/rust-lang/cargo"
+description = "A Cargo credential process that stores tokens in a 1password vault."
+
+[dependencies]
+cargo-credential = { version = "0.2.0", path = "../cargo-credential" }
+serde = { version = "1.0.117", features = ["derive"] }
+serde_json = "1.0.59"
diff --git a/crates/credential/cargo-credential-1password/src/main.rs b/crates/credential/cargo-credential-1password/src/main.rs
new file mode 100644
index 0000000..4f512b7
--- /dev/null
+++ b/crates/credential/cargo-credential-1password/src/main.rs
@@ -0,0 +1,314 @@
+//! Cargo registry 1password credential process.
+
+use cargo_credential::{Credential, Error};
+use serde::Deserialize;
+use std::io::Read;
+use std::process::{Command, Stdio};
+
+const CARGO_TAG: &str = "cargo-registry";
+
+/// Implementation of 1password keychain access for Cargo registries.
+struct OnePasswordKeychain {
+ account: Option<String>,
+ vault: Option<String>,
+}
+
+/// 1password Login item type, used for the JSON output of `op item get`.
+#[derive(Deserialize)]
+struct Login {
+ fields: Vec<Field>,
+}
+
+#[derive(Deserialize)]
+struct Field {
+ id: String,
+ value: Option<String>,
+}
+
+/// 1password item from `op items list`.
+#[derive(Deserialize)]
+struct ListItem {
+ id: String,
+ urls: Vec<Url>,
+}
+
+#[derive(Deserialize)]
+struct Url {
+ href: String,
+}
+
+impl OnePasswordKeychain {
+ fn new() -> Result<OnePasswordKeychain, Error> {
+ let mut args = std::env::args().skip(1);
+ let mut action = false;
+ let mut account = None;
+ let mut vault = None;
+ while let Some(arg) = args.next() {
+ match arg.as_str() {
+ "--account" => {
+ account = Some(args.next().ok_or("--account needs an arg")?);
+ }
+ "--vault" => {
+ vault = Some(args.next().ok_or("--vault needs an arg")?);
+ }
+ s if s.starts_with('-') => {
+ return Err(format!("unknown option {}", s).into());
+ }
+ _ => {
+ if action {
+ return Err("too many arguments".into());
+ } else {
+ action = true;
+ }
+ }
+ }
+ }
+ Ok(OnePasswordKeychain { account, vault })
+ }
+
+ fn signin(&self) -> Result<Option<String>, Error> {
+ // If there are any session env vars, we'll assume that this is the
+ // correct account, and that the user knows what they are doing.
+ if std::env::vars().any(|(name, _)| name.starts_with("OP_SESSION_")) {
+ return Ok(None);
+ }
+ let mut cmd = Command::new("op");
+ cmd.args(&["signin", "--raw"]);
+ cmd.stdout(Stdio::piped());
+ self.with_tty(&mut cmd)?;
+ let mut child = cmd
+ .spawn()
+ .map_err(|e| format!("failed to spawn `op`: {}", e))?;
+ let mut buffer = String::new();
+ child
+ .stdout
+ .as_mut()
+ .unwrap()
+ .read_to_string(&mut buffer)
+ .map_err(|e| format!("failed to get session from `op`: {}", e))?;
+ if let Some(end) = buffer.find('\n') {
+ buffer.truncate(end);
+ }
+ let status = child
+ .wait()
+ .map_err(|e| format!("failed to wait for `op`: {}", e))?;
+ if !status.success() {
+ return Err(format!("failed to run `op signin`: {}", status).into());
+ }
+ if buffer.is_empty() {
+ // When using CLI integration, `op signin` returns no output,
+ // so there is no need to set the session.
+ return Ok(None);
+ }
+ Ok(Some(buffer))
+ }
+
+ fn make_cmd(&self, session: &Option<String>, args: &[&str]) -> Command {
+ let mut cmd = Command::new("op");
+ cmd.args(args);
+ if let Some(account) = &self.account {
+ cmd.arg("--account");
+ cmd.arg(account);
+ }
+ if let Some(vault) = &self.vault {
+ cmd.arg("--vault");
+ cmd.arg(vault);
+ }
+ if let Some(session) = session {
+ cmd.arg("--session");
+ cmd.arg(session);
+ }
+ cmd
+ }
+
+ fn with_tty(&self, cmd: &mut Command) -> Result<(), Error> {
+ #[cfg(unix)]
+ const IN_DEVICE: &str = "/dev/tty";
+ #[cfg(windows)]
+ const IN_DEVICE: &str = "CONIN$";
+ let stdin = std::fs::OpenOptions::new()
+ .read(true)
+ .write(true)
+ .open(IN_DEVICE)?;
+ cmd.stdin(stdin);
+ Ok(())
+ }
+
+ fn run_cmd(&self, mut cmd: Command) -> Result<String, Error> {
+ cmd.stdout(Stdio::piped());
+ let mut child = cmd
+ .spawn()
+ .map_err(|e| format!("failed to spawn `op`: {}", e))?;
+ let mut buffer = String::new();
+ child
+ .stdout
+ .as_mut()
+ .unwrap()
+ .read_to_string(&mut buffer)
+ .map_err(|e| format!("failed to read `op` output: {}", e))?;
+ let status = child
+ .wait()
+ .map_err(|e| format!("failed to wait for `op`: {}", e))?;
+ if !status.success() {
+ return Err(format!("`op` command exit error: {}", status).into());
+ }
+ Ok(buffer)
+ }
+
+ fn search(&self, session: &Option<String>, index_url: &str) -> Result<Option<String>, Error> {
+ let cmd = self.make_cmd(
+ session,
+ &[
+ "items",
+ "list",
+ "--categories",
+ "Login",
+ "--tags",
+ CARGO_TAG,
+ "--format",
+ "json",
+ ],
+ );
+ let buffer = self.run_cmd(cmd)?;
+ let items: Vec<ListItem> = serde_json::from_str(&buffer)
+ .map_err(|e| format!("failed to deserialize JSON from 1password list: {}", e))?;
+ let mut matches = items
+ .into_iter()
+ .filter(|item| item.urls.iter().any(|url| url.href == index_url));
+ match matches.next() {
+ Some(login) => {
+ // Should this maybe just sort on `updatedAt` and return the newest one?
+ if matches.next().is_some() {
+ return Err(format!(
+ "too many 1password logins match registry `{}`, \
+ consider deleting the excess entries",
+ index_url
+ )
+ .into());
+ }
+ Ok(Some(login.id))
+ }
+ None => Ok(None),
+ }
+ }
+
+ fn modify(
+ &self,
+ session: &Option<String>,
+ id: &str,
+ token: &str,
+ _name: Option<&str>,
+ ) -> Result<(), Error> {
+ let cmd = self.make_cmd(
+ session,
+ &["item", "edit", id, &format!("password={}", token)],
+ );
+ self.run_cmd(cmd)?;
+ Ok(())
+ }
+
+ fn create(
+ &self,
+ session: &Option<String>,
+ index_url: &str,
+ token: &str,
+ name: Option<&str>,
+ ) -> Result<(), Error> {
+ let title = match name {
+ Some(name) => format!("Cargo registry token for {}", name),
+ None => "Cargo registry token".to_string(),
+ };
+ let mut cmd = self.make_cmd(
+ session,
+ &[
+ "item",
+ "create",
+ "--category",
+ "Login",
+ &format!("password={}", token),
+ &format!("url={}", index_url),
+ "--title",
+ &title,
+ "--tags",
+ CARGO_TAG,
+ ],
+ );
+ // For unknown reasons, `op item create` seems to not be happy if
+ // stdin is not a tty. Otherwise it returns with a 0 exit code without
+ // doing anything.
+ self.with_tty(&mut cmd)?;
+ self.run_cmd(cmd)?;
+ Ok(())
+ }
+
+ fn get_token(&self, session: &Option<String>, id: &str) -> Result<String, Error> {
+ let cmd = self.make_cmd(session, &["item", "get", "--format=json", id]);
+ let buffer = self.run_cmd(cmd)?;
+ let item: Login = serde_json::from_str(&buffer)
+ .map_err(|e| format!("failed to deserialize JSON from 1password get: {}", e))?;
+ let password = item.fields.into_iter().find(|item| item.id == "password");
+ match password {
+ Some(password) => password
+ .value
+ .ok_or_else(|| format!("missing password value for entry").into()),
+ None => Err("could not find password field".into()),
+ }
+ }
+
+ fn delete(&self, session: &Option<String>, id: &str) -> Result<(), Error> {
+ let cmd = self.make_cmd(session, &["item", "delete", id]);
+ self.run_cmd(cmd)?;
+ Ok(())
+ }
+}
+
+impl Credential for OnePasswordKeychain {
+ fn name(&self) -> &'static str {
+ env!("CARGO_PKG_NAME")
+ }
+
+ fn get(&self, index_url: &str) -> Result<String, Error> {
+ let session = self.signin()?;
+ if let Some(id) = self.search(&session, index_url)? {
+ self.get_token(&session, &id)
+ } else {
+ return Err(format!(
+ "no 1password entry found for registry `{}`, try `cargo login` to add a token",
+ index_url
+ )
+ .into());
+ }
+ }
+
+ fn store(&self, index_url: &str, token: &str, name: Option<&str>) -> Result<(), Error> {
+ let session = self.signin()?;
+ // Check if an item already exists.
+ if let Some(id) = self.search(&session, index_url)? {
+ self.modify(&session, &id, token, name)
+ } else {
+ self.create(&session, index_url, token, name)
+ }
+ }
+
+ fn erase(&self, index_url: &str) -> Result<(), Error> {
+ let session = self.signin()?;
+ // Check if an item already exists.
+ if let Some(id) = self.search(&session, index_url)? {
+ self.delete(&session, &id)?;
+ } else {
+ eprintln!("not currently logged in to `{}`", index_url);
+ }
+ Ok(())
+ }
+}
+
+fn main() {
+ let op = match OnePasswordKeychain::new() {
+ Ok(op) => op,
+ Err(e) => {
+ eprintln!("error: {}", e);
+ std::process::exit(1);
+ }
+ };
+ cargo_credential::main(op);
+}
diff --git a/crates/credential/cargo-credential-gnome-secret/Cargo.toml b/crates/credential/cargo-credential-gnome-secret/Cargo.toml
new file mode 100644
index 0000000..12e25cf
--- /dev/null
+++ b/crates/credential/cargo-credential-gnome-secret/Cargo.toml
@@ -0,0 +1,13 @@
+[package]
+name = "cargo-credential-gnome-secret"
+version = "0.2.0"
+edition = "2021"
+license = "MIT OR Apache-2.0"
+repository = "https://github.com/rust-lang/cargo"
+description = "A Cargo credential process that stores tokens with GNOME libsecret."
+
+[dependencies]
+cargo-credential = { version = "0.2.0", path = "../cargo-credential" }
+
+[build-dependencies]
+pkg-config = "0.3.19"
diff --git a/crates/credential/cargo-credential-gnome-secret/build.rs b/crates/credential/cargo-credential-gnome-secret/build.rs
new file mode 100644
index 0000000..9283535
--- /dev/null
+++ b/crates/credential/cargo-credential-gnome-secret/build.rs
@@ -0,0 +1,3 @@
+fn main() {
+ pkg_config::probe_library("libsecret-1").unwrap();
+}
diff --git a/crates/credential/cargo-credential-gnome-secret/src/main.rs b/crates/credential/cargo-credential-gnome-secret/src/main.rs
new file mode 100644
index 0000000..40972b0
--- /dev/null
+++ b/crates/credential/cargo-credential-gnome-secret/src/main.rs
@@ -0,0 +1,194 @@
+//! Cargo registry gnome libsecret credential process.
+
+use cargo_credential::{Credential, Error};
+use std::ffi::{CStr, CString};
+use std::os::raw::{c_char, c_int};
+use std::ptr::{null, null_mut};
+
+#[allow(non_camel_case_types)]
+type gchar = c_char;
+
+#[allow(non_camel_case_types)]
+type gboolean = c_int;
+
+type GQuark = u32;
+
+#[repr(C)]
+struct GError {
+ domain: GQuark,
+ code: c_int,
+ message: *mut gchar,
+}
+
+#[repr(C)]
+struct GCancellable {
+ _private: [u8; 0],
+}
+
+#[repr(C)]
+struct SecretSchema {
+ name: *const gchar,
+ flags: SecretSchemaFlags,
+ attributes: [SecretSchemaAttribute; 32],
+}
+
+#[repr(C)]
+#[derive(Copy, Clone)]
+struct SecretSchemaAttribute {
+ name: *const gchar,
+ attr_type: SecretSchemaAttributeType,
+}
+
+#[repr(C)]
+enum SecretSchemaFlags {
+ None = 0,
+}
+
+#[repr(C)]
+#[derive(Copy, Clone)]
+enum SecretSchemaAttributeType {
+ String = 0,
+}
+
+extern "C" {
+ fn secret_password_store_sync(
+ schema: *const SecretSchema,
+ collection: *const gchar,
+ label: *const gchar,
+ password: *const gchar,
+ cancellable: *mut GCancellable,
+ error: *mut *mut GError,
+ ...
+ ) -> gboolean;
+ fn secret_password_clear_sync(
+ schema: *const SecretSchema,
+ cancellable: *mut GCancellable,
+ error: *mut *mut GError,
+ ...
+ ) -> gboolean;
+ fn secret_password_lookup_sync(
+ schema: *const SecretSchema,
+ cancellable: *mut GCancellable,
+ error: *mut *mut GError,
+ ...
+ ) -> *mut gchar;
+}
+
+struct GnomeSecret;
+
+fn label(index_url: &str) -> CString {
+ CString::new(format!("cargo-registry:{}", index_url)).unwrap()
+}
+
+fn schema() -> SecretSchema {
+ let mut attributes = [SecretSchemaAttribute {
+ name: null(),
+ attr_type: SecretSchemaAttributeType::String,
+ }; 32];
+ attributes[0] = SecretSchemaAttribute {
+ name: b"url\0".as_ptr() as *const gchar,
+ attr_type: SecretSchemaAttributeType::String,
+ };
+ SecretSchema {
+ name: b"org.rust-lang.cargo.registry\0".as_ptr() as *const gchar,
+ flags: SecretSchemaFlags::None,
+ attributes,
+ }
+}
+
+impl Credential for GnomeSecret {
+ fn name(&self) -> &'static str {
+ env!("CARGO_PKG_NAME")
+ }
+
+ fn get(&self, index_url: &str) -> Result<String, Error> {
+ let mut error: *mut GError = null_mut();
+ let attr_url = CString::new("url").unwrap();
+ let index_url_c = CString::new(index_url).unwrap();
+ let schema = schema();
+ unsafe {
+ let token_c = secret_password_lookup_sync(
+ &schema,
+ null_mut(),
+ &mut error,
+ attr_url.as_ptr(),
+ index_url_c.as_ptr(),
+ null() as *const gchar,
+ );
+ if !error.is_null() {
+ return Err(format!(
+ "failed to get token: {}",
+ CStr::from_ptr((*error).message).to_str()?
+ )
+ .into());
+ }
+ if token_c.is_null() {
+ return Err(format!("cannot find token for {}", index_url).into());
+ }
+ let token = CStr::from_ptr(token_c)
+ .to_str()
+ .map_err(|e| format!("expected utf8 token: {}", e))?
+ .to_string();
+ Ok(token)
+ }
+ }
+
+ fn store(&self, index_url: &str, token: &str, name: Option<&str>) -> Result<(), Error> {
+ let label = label(name.unwrap_or(index_url));
+ let token = CString::new(token).unwrap();
+ let mut error: *mut GError = null_mut();
+ let attr_url = CString::new("url").unwrap();
+ let index_url_c = CString::new(index_url).unwrap();
+ let schema = schema();
+ unsafe {
+ secret_password_store_sync(
+ &schema,
+ b"default\0".as_ptr() as *const gchar,
+ label.as_ptr(),
+ token.as_ptr(),
+ null_mut(),
+ &mut error,
+ attr_url.as_ptr(),
+ index_url_c.as_ptr(),
+ null() as *const gchar,
+ );
+ if !error.is_null() {
+ return Err(format!(
+ "failed to store token: {}",
+ CStr::from_ptr((*error).message).to_str()?
+ )
+ .into());
+ }
+ }
+ Ok(())
+ }
+
+ fn erase(&self, index_url: &str) -> Result<(), Error> {
+ let schema = schema();
+ let mut error: *mut GError = null_mut();
+ let attr_url = CString::new("url").unwrap();
+ let index_url_c = CString::new(index_url).unwrap();
+ unsafe {
+ secret_password_clear_sync(
+ &schema,
+ null_mut(),
+ &mut error,
+ attr_url.as_ptr(),
+ index_url_c.as_ptr(),
+ null() as *const gchar,
+ );
+ if !error.is_null() {
+ return Err(format!(
+ "failed to erase token: {}",
+ CStr::from_ptr((*error).message).to_str()?
+ )
+ .into());
+ }
+ }
+ Ok(())
+ }
+}
+
+fn main() {
+ cargo_credential::main(GnomeSecret);
+}
diff --git a/crates/credential/cargo-credential-macos-keychain/Cargo.toml b/crates/credential/cargo-credential-macos-keychain/Cargo.toml
new file mode 100644
index 0000000..c2c22a4
--- /dev/null
+++ b/crates/credential/cargo-credential-macos-keychain/Cargo.toml
@@ -0,0 +1,11 @@
+[package]
+name = "cargo-credential-macos-keychain"
+version = "0.2.0"
+edition = "2021"
+license = "MIT OR Apache-2.0"
+repository = "https://github.com/rust-lang/cargo"
+description = "A Cargo credential process that stores tokens in a macOS keychain."
+
+[dependencies]
+cargo-credential = { version = "0.2.0", path = "../cargo-credential" }
+security-framework = "2.0.0"
diff --git a/crates/credential/cargo-credential-macos-keychain/src/main.rs b/crates/credential/cargo-credential-macos-keychain/src/main.rs
new file mode 100644
index 0000000..3fef3f9
--- /dev/null
+++ b/crates/credential/cargo-credential-macos-keychain/src/main.rs
@@ -0,0 +1,50 @@
+//! Cargo registry macos keychain credential process.
+
+use cargo_credential::{Credential, Error};
+use security_framework::os::macos::keychain::SecKeychain;
+
+struct MacKeychain;
+
+/// The account name is not used.
+const ACCOUNT: &'static str = "";
+
+fn registry(registry_name: &str) -> String {
+ format!("cargo-registry:{}", registry_name)
+}
+
+impl Credential for MacKeychain {
+ fn name(&self) -> &'static str {
+ env!("CARGO_PKG_NAME")
+ }
+
+ fn get(&self, index_url: &str) -> Result<String, Error> {
+ let keychain = SecKeychain::default().unwrap();
+ let service_name = registry(index_url);
+ let (pass, _item) = keychain.find_generic_password(&service_name, ACCOUNT)?;
+ String::from_utf8(pass.as_ref().to_vec())
+ .map_err(|_| "failed to convert token to UTF8".into())
+ }
+
+ fn store(&self, index_url: &str, token: &str, name: Option<&str>) -> Result<(), Error> {
+ let keychain = SecKeychain::default().unwrap();
+ let service_name = registry(name.unwrap_or(index_url));
+ if let Ok((_pass, mut item)) = keychain.find_generic_password(&service_name, ACCOUNT) {
+ item.set_password(token.as_bytes())?;
+ } else {
+ keychain.add_generic_password(&service_name, ACCOUNT, token.as_bytes())?;
+ }
+ Ok(())
+ }
+
+ fn erase(&self, index_url: &str) -> Result<(), Error> {
+ let keychain = SecKeychain::default().unwrap();
+ let service_name = registry(index_url);
+ let (_pass, item) = keychain.find_generic_password(&service_name, ACCOUNT)?;
+ item.delete();
+ Ok(())
+ }
+}
+
+fn main() {
+ cargo_credential::main(MacKeychain);
+}
diff --git a/crates/credential/cargo-credential-wincred/Cargo.toml b/crates/credential/cargo-credential-wincred/Cargo.toml
new file mode 100644
index 0000000..83c38e8
--- /dev/null
+++ b/crates/credential/cargo-credential-wincred/Cargo.toml
@@ -0,0 +1,11 @@
+[package]
+name = "cargo-credential-wincred"
+version = "0.2.0"
+edition = "2021"
+license = "MIT OR Apache-2.0"
+repository = "https://github.com/rust-lang/cargo"
+description = "A Cargo credential process that stores tokens with Windows Credential Manager."
+
+[dependencies]
+cargo-credential = { version = "0.2.0", path = "../cargo-credential" }
+windows-sys = { version = "0.45", features = ["Win32_Foundation", "Win32_Security_Credentials"] }
diff --git a/crates/credential/cargo-credential-wincred/src/main.rs b/crates/credential/cargo-credential-wincred/src/main.rs
new file mode 100644
index 0000000..8ae48f3
--- /dev/null
+++ b/crates/credential/cargo-credential-wincred/src/main.rs
@@ -0,0 +1,111 @@
+//! Cargo registry windows credential process.
+
+use cargo_credential::{Credential, Error};
+use std::ffi::OsStr;
+use std::os::windows::ffi::OsStrExt;
+
+use windows_sys::core::PWSTR;
+use windows_sys::Win32::Foundation::ERROR_NOT_FOUND;
+use windows_sys::Win32::Foundation::FILETIME;
+use windows_sys::Win32::Foundation::TRUE;
+use windows_sys::Win32::Security::Credentials::CredDeleteW;
+use windows_sys::Win32::Security::Credentials::CredReadW;
+use windows_sys::Win32::Security::Credentials::CredWriteW;
+use windows_sys::Win32::Security::Credentials::CREDENTIALW;
+use windows_sys::Win32::Security::Credentials::CRED_PERSIST_LOCAL_MACHINE;
+use windows_sys::Win32::Security::Credentials::CRED_TYPE_GENERIC;
+
+struct WindowsCredential;
+
+/// Converts a string to a nul-terminated wide UTF-16 byte sequence.
+fn wstr(s: &str) -> Vec<u16> {
+ let mut wide: Vec<u16> = OsStr::new(s).encode_wide().collect();
+ if wide.iter().any(|b| *b == 0) {
+ panic!("nul byte in wide string");
+ }
+ wide.push(0);
+ wide
+}
+
+fn target_name(registry_name: &str) -> Vec<u16> {
+ wstr(&format!("cargo-registry:{}", registry_name))
+}
+
+impl Credential for WindowsCredential {
+ fn name(&self) -> &'static str {
+ env!("CARGO_PKG_NAME")
+ }
+
+ fn get(&self, index_url: &str) -> Result<String, Error> {
+ let target_name = target_name(index_url);
+ let p_credential: *mut CREDENTIALW = std::ptr::null_mut() as *mut _;
+ unsafe {
+ if CredReadW(
+ target_name.as_ptr(),
+ CRED_TYPE_GENERIC,
+ 0,
+ p_credential as *mut _ as *mut _,
+ ) != TRUE
+ {
+ return Err(
+ format!("failed to fetch token: {}", std::io::Error::last_os_error()).into(),
+ );
+ }
+ let bytes = std::slice::from_raw_parts(
+ (*p_credential).CredentialBlob,
+ (*p_credential).CredentialBlobSize as usize,
+ );
+ String::from_utf8(bytes.to_vec()).map_err(|_| "failed to convert token to UTF8".into())
+ }
+ }
+
+ fn store(&self, index_url: &str, token: &str, name: Option<&str>) -> Result<(), Error> {
+ let token = token.as_bytes();
+ let target_name = target_name(index_url);
+ let comment = match name {
+ Some(name) => wstr(&format!("Cargo registry token for {}", name)),
+ None => wstr("Cargo registry token"),
+ };
+ let mut credential = CREDENTIALW {
+ Flags: 0,
+ Type: CRED_TYPE_GENERIC,
+ TargetName: target_name.as_ptr() as PWSTR,
+ Comment: comment.as_ptr() as PWSTR,
+ LastWritten: FILETIME {
+ dwLowDateTime: 0,
+ dwHighDateTime: 0,
+ },
+ CredentialBlobSize: token.len() as u32,
+ CredentialBlob: token.as_ptr() as *mut u8,
+ Persist: CRED_PERSIST_LOCAL_MACHINE,
+ AttributeCount: 0,
+ Attributes: std::ptr::null_mut(),
+ TargetAlias: std::ptr::null_mut(),
+ UserName: std::ptr::null_mut(),
+ };
+ let result = unsafe { CredWriteW(&mut credential, 0) };
+ if result != TRUE {
+ let err = std::io::Error::last_os_error();
+ return Err(format!("failed to store token: {}", err).into());
+ }
+ Ok(())
+ }
+
+ fn erase(&self, index_url: &str) -> Result<(), Error> {
+ let target_name = target_name(index_url);
+ let result = unsafe { CredDeleteW(target_name.as_ptr(), CRED_TYPE_GENERIC, 0) };
+ if result != TRUE {
+ let err = std::io::Error::last_os_error();
+ if err.raw_os_error() == Some(ERROR_NOT_FOUND as i32) {
+ eprintln!("not currently logged in to `{}`", index_url);
+ return Ok(());
+ }
+ return Err(format!("failed to remove token: {}", err).into());
+ }
+ Ok(())
+ }
+}
+
+fn main() {
+ cargo_credential::main(WindowsCredential);
+}
diff --git a/crates/credential/cargo-credential/Cargo.toml b/crates/credential/cargo-credential/Cargo.toml
new file mode 100644
index 0000000..2addaf5
--- /dev/null
+++ b/crates/credential/cargo-credential/Cargo.toml
@@ -0,0 +1,9 @@
+[package]
+name = "cargo-credential"
+version = "0.2.0"
+edition = "2021"
+license = "MIT OR Apache-2.0"
+repository = "https://github.com/rust-lang/cargo"
+description = "A library to assist writing Cargo credential helpers."
+
+[dependencies]
diff --git a/crates/credential/cargo-credential/README.md b/crates/credential/cargo-credential/README.md
new file mode 100644
index 0000000..1f75e59
--- /dev/null
+++ b/crates/credential/cargo-credential/README.md
@@ -0,0 +1,41 @@
+# cargo-credential
+
+This package is a library to assist writing a Cargo credential helper, which
+provides an interface to store tokens for authorizing access to a registry
+such as https://crates.io/.
+
+Documentation about credential processes may be found at
+https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#credential-process
+
+Example implementations may be found at
+https://github.com/rust-lang/cargo/tree/master/crates/credential
+
+## Usage
+
+Create a Cargo project with this as a dependency:
+
+```toml
+# Add this to your Cargo.toml:
+
+[dependencies]
+cargo-credential = "0.1"
+```
+
+And then include a `main.rs` binary which implements the `Credential` trait, and calls
+the `main` function which will call the appropriate method of the trait:
+
+```rust
+// src/main.rs
+
+use cargo_credential::{Credential, Error};
+
+struct MyCredential;
+
+impl Credential for MyCredential {
+ /// implement trait methods here...
+}
+
+fn main() {
+ cargo_credential::main(MyCredential);
+}
+```
diff --git a/crates/credential/cargo-credential/src/lib.rs b/crates/credential/cargo-credential/src/lib.rs
new file mode 100644
index 0000000..3baf42d
--- /dev/null
+++ b/crates/credential/cargo-credential/src/lib.rs
@@ -0,0 +1,86 @@
+//! Helper library for writing Cargo credential processes.
+//!
+//! A credential process should have a `struct` that implements the `Credential` trait.
+//! The `main` function should be called with an instance of that struct, such as:
+//!
+//! ```rust,ignore
+//! fn main() {
+//! cargo_credential::main(MyCredential);
+//! }
+//! ```
+//!
+//! This will determine the action to perform (get/store/erase) by looking at
+//! the CLI arguments for the first argument that does not start with `-`. It
+//! will then call the corresponding method of the trait to perform the
+//! requested action.
+
+pub type Error = Box<dyn std::error::Error>;
+
+pub trait Credential {
+ /// Returns the name of this credential process.
+ fn name(&self) -> &'static str;
+
+ /// Retrieves a token for the given registry.
+ fn get(&self, index_url: &str) -> Result<String, Error>;
+
+ /// Stores the given token for the given registry.
+ fn store(&self, index_url: &str, token: &str, name: Option<&str>) -> Result<(), Error>;
+
+ /// Removes the token for the given registry.
+ ///
+ /// If the user is not logged in, this should print a message to stderr if
+ /// possible indicating that the user is not currently logged in, and
+ /// return `Ok`.
+ fn erase(&self, index_url: &str) -> Result<(), Error>;
+}
+
+/// Runs the credential interaction by processing the command-line and
+/// environment variables.
+pub fn main(credential: impl Credential) {
+ let name = credential.name();
+ if let Err(e) = doit(credential) {
+ eprintln!("{} error: {}", name, e);
+ std::process::exit(1);
+ }
+}
+
+fn env(name: &str) -> Result<String, Error> {
+ std::env::var(name).map_err(|_| format!("environment variable `{}` is not set", name).into())
+}
+
+fn doit(credential: impl Credential) -> Result<(), Error> {
+ let which = std::env::args()
+ .skip(1)
+ .skip_while(|arg| arg.starts_with('-'))
+ .next()
+ .ok_or_else(|| "first argument must be the {action}")?;
+ let index_url = env("CARGO_REGISTRY_INDEX_URL")?;
+ let name = std::env::var("CARGO_REGISTRY_NAME_OPT").ok();
+ let result = match which.as_ref() {
+ "get" => credential.get(&index_url).and_then(|token| {
+ println!("{}", token);
+ Ok(())
+ }),
+ "store" => {
+ read_token().and_then(|token| credential.store(&index_url, &token, name.as_deref()))
+ }
+ "erase" => credential.erase(&index_url),
+ _ => {
+ return Err(format!(
+ "unexpected command-line argument `{}`, expected get/store/erase",
+ which
+ )
+ .into())
+ }
+ };
+ result.map_err(|e| format!("failed to `{}` token: {}", which, e).into())
+}
+
+fn read_token() -> Result<String, Error> {
+ let mut buffer = String::new();
+ std::io::stdin().read_line(&mut buffer)?;
+ if buffer.ends_with('\n') {
+ buffer.pop();
+ }
+ Ok(buffer)
+}
diff --git a/crates/home/CHANGELOG.md b/crates/home/CHANGELOG.md
new file mode 100644
index 0000000..7674667
--- /dev/null
+++ b/crates/home/CHANGELOG.md
@@ -0,0 +1,46 @@
+# Changelog
+All notable changes to this project will be documented in this file.
+
+The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
+and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
+
+<!-- ## [Unreleased] -->
+
+## [0.5.4] - 2022-10-10
+- Add `_with_env` variants of functions to support in-process threaded tests for
+ rustup.
+
+## [0.5.3] - 2020-01-07
+
+Use Rust 1.36.0 as minimum Rust version.
+
+## [0.5.2] - 2020-01-05
+
+*YANKED since it cannot be built on Rust 1.36.0*
+
+### Changed
+- Check for emptiness of `CARGO_HOME` and `RUSTUP_HOME` environment variables.
+- Windows: Use `SHGetFolderPath` to replace `GetUserProfileDirectory` syscall.
+ * Remove `scopeguard` dependency.
+
+## [0.5.1] - 2019-10-12
+### Changed
+- Disable unnecessary features for `scopeguard`. Thanks @mati865.
+
+## [0.5.0] - 2019-08-21
+### Added
+- Add `home_dir` implementation for Windows UWP platforms.
+
+### Fixed
+- Fix `rustup_home` implementation when `RUSTUP_HOME` is an absolute directory.
+- Fix `cargo_home` implementation when `CARGO_HOME` is an absolute directory.
+
+### Removed
+- Remove support for `multirust` folder used in old version of `rustup`.
+
+[Unreleased]: https://github.com/brson/home/compare/v0.5.4...HEAD
+[0.5.4]: https://github.com/brson/home/compare/v0.5.3...v0.5.4
+[0.5.3]: https://github.com/brson/home/compare/v0.5.2...v0.5.3
+[0.5.2]: https://github.com/brson/home/compare/v0.5.1...v0.5.2
+[0.5.1]: https://github.com/brson/home/compare/v0.5.0...v0.5.1
+[0.5.0]: https://github.com/brson/home/compare/0.4.2...v0.5.0
diff --git a/crates/home/Cargo.toml b/crates/home/Cargo.toml
new file mode 100644
index 0000000..18459df
--- /dev/null
+++ b/crates/home/Cargo.toml
@@ -0,0 +1,20 @@
+[package]
+name = "home"
+version = "0.5.4" # also update `html_root_url` in `src/lib.rs`
+authors = [ "Brian Anderson <andersrb@gmail.com>" ]
+documentation = "https://docs.rs/home"
+edition = "2018"
+include = [
+ "/src",
+ "/Cargo.toml",
+ "/CHANGELOG",
+ "/LICENSE-*",
+ "/README.md",
+]
+license = "MIT OR Apache-2.0"
+readme = "README.md"
+repository = "https://github.com/brson/home"
+description = "Shared definitions of home directories"
+
+[target.'cfg(windows)'.dependencies]
+windows-sys = { version = "0.45.0", features = ["Win32_Foundation", "Win32_UI_Shell"] }
diff --git a/crates/home/LICENSE-APACHE b/crates/home/LICENSE-APACHE
new file mode 120000
index 0000000..1cd601d
--- /dev/null
+++ b/crates/home/LICENSE-APACHE
@@ -0,0 +1 @@
+../../LICENSE-APACHE \ No newline at end of file
diff --git a/crates/home/LICENSE-MIT b/crates/home/LICENSE-MIT
new file mode 120000
index 0000000..b2cfbdc
--- /dev/null
+++ b/crates/home/LICENSE-MIT
@@ -0,0 +1 @@
+../../LICENSE-MIT \ No newline at end of file
diff --git a/crates/home/README.md b/crates/home/README.md
new file mode 100644
index 0000000..db2ba92
--- /dev/null
+++ b/crates/home/README.md
@@ -0,0 +1,27 @@
+[![Documentation](https://docs.rs/home/badge.svg)](https://docs.rs/home)
+[![Crates.io](https://img.shields.io/crates/v/home.svg)](https://crates.io/crates/home)
+
+Canonical definitions of `home_dir`, `cargo_home`, and `rustup_home`.
+
+This provides the definition of `home_dir` used by Cargo and rustup,
+as well functions to find the correct value of `CARGO_HOME` and
+`RUSTUP_HOME`.
+
+The definition of `home_dir` provided by the standard library is
+incorrect because it considers the `HOME` environment variable on
+Windows. This causes surprising situations where a Rust program will
+behave differently depending on whether it is run under a Unix
+emulation environment like Cygwin or MinGW. Neither Cargo nor rustup
+use the standard libraries definition - they use the definition here.
+
+This crate further provides two functions, `cargo_home` and
+`rustup_home`, which are the canonical way to determine the location
+that Cargo and rustup store their data.
+
+See [rust-lang/rust#43321].
+
+[rust-lang/rust#43321]: https://github.com/rust-lang/rust/issues/43321
+
+## License
+
+MIT OR Apache-2.0
diff --git a/crates/home/src/env.rs b/crates/home/src/env.rs
new file mode 100644
index 0000000..e47273b
--- /dev/null
+++ b/crates/home/src/env.rs
@@ -0,0 +1,106 @@
+//! Lower-level utilities for mocking the process environment.
+
+use std::{
+ ffi::OsString,
+ io,
+ path::{Path, PathBuf},
+};
+
+/// Permits parameterizing the home functions via the _from variants - used for
+/// in-process unit testing by rustup.
+pub trait Env {
+ /// Return the path to the the users home dir, or None if any error occurs:
+ /// see home_inner.
+ fn home_dir(&self) -> Option<PathBuf>;
+ /// Return the current working directory.
+ fn current_dir(&self) -> io::Result<PathBuf>;
+ /// Get an environment variable, as per std::env::var_os.
+ fn var_os(&self, key: &str) -> Option<OsString>;
+}
+
+/// Implements Env for the OS context, both Unix style and Windows.
+///
+/// This is trait permits in-process testing by providing a control point to
+/// allow in-process divergence on what is normally process wide state.
+///
+/// Implementations should be provided by whatever testing framework the caller
+/// is using. Code that is not performing in-process threaded testing requiring
+/// isolated rustup/cargo directories does not need this trait or the _from
+/// functions.
+pub struct OsEnv;
+impl Env for OsEnv {
+ fn home_dir(&self) -> Option<PathBuf> {
+ crate::home_dir_inner()
+ }
+ fn current_dir(&self) -> io::Result<PathBuf> {
+ std::env::current_dir()
+ }
+ fn var_os(&self, key: &str) -> Option<OsString> {
+ std::env::var_os(key)
+ }
+}
+
+pub const OS_ENV: OsEnv = OsEnv {};
+
+/// Returns the path of the current user's home directory from [`Env::home_dir`].
+pub fn home_dir_with_env(env: &dyn Env) -> Option<PathBuf> {
+ env.home_dir()
+}
+
+/// Variant of cargo_home where the environment source is parameterized. This is
+/// specifically to support in-process testing scenarios as environment
+/// variables and user home metadata are normally process global state. See the
+/// [`Env`] trait.
+pub fn cargo_home_with_env(env: &dyn Env) -> io::Result<PathBuf> {
+ let cwd = env.current_dir()?;
+ cargo_home_with_cwd_env(env, &cwd)
+}
+
+/// Variant of cargo_home_with_cwd where the environment source is
+/// parameterized. This is specifically to support in-process testing scenarios
+/// as environment variables and user home metadata are normally process global
+/// state. See the OsEnv trait.
+pub fn cargo_home_with_cwd_env(env: &dyn Env, cwd: &Path) -> io::Result<PathBuf> {
+ match env.var_os("CARGO_HOME").filter(|h| !h.is_empty()) {
+ Some(home) => {
+ let home = PathBuf::from(home);
+ if home.is_absolute() {
+ Ok(home)
+ } else {
+ Ok(cwd.join(&home))
+ }
+ }
+ _ => home_dir_with_env(env)
+ .map(|p| p.join(".cargo"))
+ .ok_or_else(|| io::Error::new(io::ErrorKind::Other, "could not find cargo home dir")),
+ }
+}
+
+/// Variant of cargo_home_with_cwd where the environment source is
+/// parameterized. This is specifically to support in-process testing scenarios
+/// as environment variables and user home metadata are normally process global
+/// state. See the OsEnv trait.
+pub fn rustup_home_with_env(env: &dyn Env) -> io::Result<PathBuf> {
+ let cwd = env.current_dir()?;
+ rustup_home_with_cwd_env(env, &cwd)
+}
+
+/// Variant of cargo_home_with_cwd where the environment source is
+/// parameterized. This is specifically to support in-process testing scenarios
+/// as environment variables and user home metadata are normally process global
+/// state. See the OsEnv trait.
+pub fn rustup_home_with_cwd_env(env: &dyn Env, cwd: &Path) -> io::Result<PathBuf> {
+ match env.var_os("RUSTUP_HOME").filter(|h| !h.is_empty()) {
+ Some(home) => {
+ let home = PathBuf::from(home);
+ if home.is_absolute() {
+ Ok(home)
+ } else {
+ Ok(cwd.join(&home))
+ }
+ }
+ _ => home_dir_with_env(env)
+ .map(|d| d.join(".rustup"))
+ .ok_or_else(|| io::Error::new(io::ErrorKind::Other, "could not find rustup home dir")),
+ }
+}
diff --git a/crates/home/src/lib.rs b/crates/home/src/lib.rs
new file mode 100644
index 0000000..306026e
--- /dev/null
+++ b/crates/home/src/lib.rs
@@ -0,0 +1,149 @@
+//! Canonical definitions of `home_dir`, `cargo_home`, and `rustup_home`.
+//!
+//! This provides the definition of `home_dir` used by Cargo and
+//! rustup, as well functions to find the correct value of
+//! `CARGO_HOME` and `RUSTUP_HOME`.
+//!
+//! See also the [`dirs`](https://docs.rs/dirs) crate.
+//!
+//! _Note that as of 2019/08/06 it appears that cargo uses this crate. And
+//! rustup has used this crate since 2019/08/21._
+//!
+//! The definition of `home_dir` provided by the standard library is
+//! incorrect because it considers the `HOME` environment variable on
+//! Windows. This causes surprising situations where a Rust program
+//! will behave differently depending on whether it is run under a
+//! Unix emulation environment like Cygwin or MinGW. Neither Cargo nor
+//! rustup use the standard libraries definition - they use the
+//! definition here.
+//!
+//! This crate further provides two functions, `cargo_home` and
+//! `rustup_home`, which are the canonical way to determine the
+//! location that Cargo and rustup store their data.
+//!
+//! See also this [discussion].
+//!
+//! [discussion]: https://github.com/rust-lang/rust/pull/46799#issuecomment-361156935
+
+#![doc(html_root_url = "https://docs.rs/home/0.5.4")]
+#![deny(rust_2018_idioms)]
+
+pub mod env;
+
+#[cfg(target_os = "windows")]
+mod windows;
+
+use std::io;
+use std::path::{Path, PathBuf};
+
+/// Returns the path of the current user's home directory if known.
+///
+/// # Unix
+///
+/// Returns the value of the `HOME` environment variable if it is set
+/// and not equal to the empty string. Otherwise, it tries to determine the
+/// home directory by invoking the `getpwuid_r` function on the UID of the
+/// current user.
+///
+/// # Windows
+///
+/// Returns the value of the `USERPROFILE` environment variable if it
+/// is set and not equal to the empty string. If both do not exist,
+/// [`SHGetFolderPathW`][msdn] is used to return the appropriate path.
+///
+/// [msdn]: https://docs.microsoft.com/en-us/windows/win32/api/shlobj_core/nf-shlobj_core-shgetfolderpathw
+///
+/// # Examples
+///
+/// ```
+/// match home::home_dir() {
+/// Some(path) => println!("{}", path.display()),
+/// None => println!("Impossible to get your home dir!"),
+/// }
+/// ```
+pub fn home_dir() -> Option<PathBuf> {
+ env::home_dir_with_env(&env::OS_ENV)
+}
+
+#[cfg(windows)]
+use windows::home_dir_inner;
+
+#[cfg(any(unix, target_os = "redox"))]
+fn home_dir_inner() -> Option<PathBuf> {
+ #[allow(deprecated)]
+ std::env::home_dir()
+}
+
+/// Returns the storage directory used by Cargo, often knowns as
+/// `.cargo` or `CARGO_HOME`.
+///
+/// It returns one of the following values, in this order of
+/// preference:
+///
+/// - The value of the `CARGO_HOME` environment variable, if it is
+/// an absolute path.
+/// - The value of the current working directory joined with the value
+/// of the `CARGO_HOME` environment variable, if `CARGO_HOME` is a
+/// relative directory.
+/// - The `.cargo` directory in the user's home directory, as reported
+/// by the `home_dir` function.
+///
+/// # Errors
+///
+/// This function fails if it fails to retrieve the current directory,
+/// or if the home directory cannot be determined.
+///
+/// # Examples
+///
+/// ```
+/// match home::cargo_home() {
+/// Ok(path) => println!("{}", path.display()),
+/// Err(err) => eprintln!("Cannot get your cargo home dir: {:?}", err),
+/// }
+/// ```
+pub fn cargo_home() -> io::Result<PathBuf> {
+ env::cargo_home_with_env(&env::OS_ENV)
+}
+
+/// Returns the storage directory used by Cargo within `cwd`.
+/// For more details, see [`cargo_home`](fn.cargo_home.html).
+pub fn cargo_home_with_cwd(cwd: &Path) -> io::Result<PathBuf> {
+ env::cargo_home_with_cwd_env(&env::OS_ENV, cwd)
+}
+
+/// Returns the storage directory used by rustup, often knowns as
+/// `.rustup` or `RUSTUP_HOME`.
+///
+/// It returns one of the following values, in this order of
+/// preference:
+///
+/// - The value of the `RUSTUP_HOME` environment variable, if it is
+/// an absolute path.
+/// - The value of the current working directory joined with the value
+/// of the `RUSTUP_HOME` environment variable, if `RUSTUP_HOME` is a
+/// relative directory.
+/// - The `.rustup` directory in the user's home directory, as reported
+/// by the `home_dir` function.
+///
+/// # Errors
+///
+/// This function fails if it fails to retrieve the current directory,
+/// or if the home directory cannot be determined.
+///
+/// # Examples
+///
+/// ```
+/// match home::rustup_home() {
+/// Ok(path) => println!("{}", path.display()),
+/// Err(err) => eprintln!("Cannot get your rustup home dir: {:?}", err),
+/// }
+/// ```
+pub fn rustup_home() -> io::Result<PathBuf> {
+ env::rustup_home_with_env(&env::OS_ENV)
+}
+
+/// Returns the storage directory used by rustup within `cwd`.
+/// For more details, see [`rustup_home`](fn.rustup_home.html).
+pub fn rustup_home_with_cwd(cwd: &Path) -> io::Result<PathBuf> {
+ env::rustup_home_with_cwd_env(&env::OS_ENV, cwd)
+}
diff --git a/crates/home/src/windows.rs b/crates/home/src/windows.rs
new file mode 100644
index 0000000..a35dc9c
--- /dev/null
+++ b/crates/home/src/windows.rs
@@ -0,0 +1,66 @@
+use std::env;
+use std::ffi::OsString;
+use std::os::windows::ffi::OsStringExt;
+use std::path::PathBuf;
+
+use windows_sys::Win32::Foundation::{MAX_PATH, S_OK};
+use windows_sys::Win32::UI::Shell::{SHGetFolderPathW, CSIDL_PROFILE};
+
+pub fn home_dir_inner() -> Option<PathBuf> {
+ env::var_os("USERPROFILE")
+ .filter(|s| !s.is_empty())
+ .map(PathBuf::from)
+ .or_else(home_dir_crt)
+}
+
+#[cfg(not(target_vendor = "uwp"))]
+fn home_dir_crt() -> Option<PathBuf> {
+ unsafe {
+ let mut path: Vec<u16> = Vec::with_capacity(MAX_PATH as usize);
+ match SHGetFolderPathW(0, CSIDL_PROFILE as i32, 0, 0, path.as_mut_ptr()) {
+ S_OK => {
+ let len = wcslen(path.as_ptr());
+ path.set_len(len);
+ let s = OsString::from_wide(&path);
+ Some(PathBuf::from(s))
+ }
+ _ => None,
+ }
+ }
+}
+
+#[cfg(target_vendor = "uwp")]
+fn home_dir_crt() -> Option<PathBuf> {
+ None
+}
+
+extern "C" {
+ fn wcslen(buf: *const u16) -> usize;
+}
+
+#[cfg(not(target_vendor = "uwp"))]
+#[cfg(test)]
+mod tests {
+ use super::home_dir_inner;
+ use std::env;
+ use std::ops::Deref;
+ use std::path::{Path, PathBuf};
+
+ #[test]
+ fn test_with_without() {
+ let olduserprofile = env::var_os("USERPROFILE").unwrap();
+
+ env::remove_var("HOME");
+ env::remove_var("USERPROFILE");
+
+ assert_eq!(home_dir_inner(), Some(PathBuf::from(olduserprofile)));
+
+ let home = Path::new(r"C:\Users\foo tar baz");
+
+ env::set_var("HOME", home.as_os_str());
+ assert_ne!(home_dir_inner().as_ref().map(Deref::deref), Some(home));
+
+ env::set_var("USERPROFILE", home.as_os_str());
+ assert_eq!(home_dir_inner().as_ref().map(Deref::deref), Some(home));
+ }
+}
diff --git a/crates/mdman/Cargo.lock b/crates/mdman/Cargo.lock
new file mode 100644
index 0000000..51fe47a
--- /dev/null
+++ b/crates/mdman/Cargo.lock
@@ -0,0 +1,459 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 3
+
+[[package]]
+name = "anyhow"
+version = "1.0.32"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6b602bfe940d21c130f3895acd65221e8a61270debe89d628b9cb4e3ccb8569b"
+
+[[package]]
+name = "bitflags"
+version = "1.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
+
+[[package]]
+name = "block-buffer"
+version = "0.7.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c0940dc441f31689269e10ac70eb1002a3a1d3ad1390e030043662eb7fe4688b"
+dependencies = [
+ "block-padding",
+ "byte-tools",
+ "byteorder",
+ "generic-array",
+]
+
+[[package]]
+name = "block-padding"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fa79dedbb091f449f1f39e53edf88d5dbe95f895dae6135a8d7b881fb5af73f5"
+dependencies = [
+ "byte-tools",
+]
+
+[[package]]
+name = "byte-tools"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e3b5ca7a04898ad4bcd41c90c5285445ff5b791899bb1b0abdd2a2aa791211d7"
+
+[[package]]
+name = "byteorder"
+version = "1.3.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "08c48aae112d48ed9f069b33538ea9e3e90aa263cfa3d1c24309612b1f7472de"
+
+[[package]]
+name = "cfg-if"
+version = "0.1.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822"
+
+[[package]]
+name = "ctor"
+version = "0.1.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "39858aa5bac06462d4dd4b9164848eb81ffc4aa5c479746393598fd193afa227"
+dependencies = [
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "diff"
+version = "0.1.13"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8"
+
+[[package]]
+name = "digest"
+version = "0.8.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f3d0c8c8752312f9713efd397ff63acb9f85585afbf179282e720e7704954dd5"
+dependencies = [
+ "generic-array",
+]
+
+[[package]]
+name = "fake-simd"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e88a8acf291dafb59c2d96e8f59828f3838bb1a70398823ade51a84de6a6deed"
+
+[[package]]
+name = "form_urlencoded"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5fc25a87fa4fd2094bffb06925852034d90a17f0d1e05197d4956d3555752191"
+dependencies = [
+ "matches",
+ "percent-encoding",
+]
+
+[[package]]
+name = "generic-array"
+version = "0.12.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c68f0274ae0e023facc3c97b2e00f076be70e254bc851d972503b328db79b2ec"
+dependencies = [
+ "typenum",
+]
+
+[[package]]
+name = "handlebars"
+version = "3.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "86dbc8a0746b08f363d2e00da48e6c9ceb75c198ac692d2715fcbb5bee74c87d"
+dependencies = [
+ "log",
+ "pest",
+ "pest_derive",
+ "quick-error",
+ "serde",
+ "serde_json",
+ "walkdir",
+]
+
+[[package]]
+name = "idna"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "02e2673c30ee86b5b96a9cb52ad15718aa1f966f5ab9ad54a8b95d5ca33120a9"
+dependencies = [
+ "matches",
+ "unicode-bidi",
+ "unicode-normalization",
+]
+
+[[package]]
+name = "itoa"
+version = "0.4.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dc6f3ad7b9d11a0c00842ff8de1b60ee58661048eb8049ed33c73594f359d7e6"
+
+[[package]]
+name = "log"
+version = "0.4.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4fabed175da42fed1fa0746b0ea71f412aa9d35e76e95e59b192c64b9dc2bf8b"
+dependencies = [
+ "cfg-if",
+]
+
+[[package]]
+name = "maplit"
+version = "1.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3e2e65a1a2e43cfcb47a895c4c8b10d1f4a61097f9f254f183aee60cad9c651d"
+
+[[package]]
+name = "matches"
+version = "0.1.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7ffc5c5338469d4d3ea17d269fa8ea3512ad247247c30bd2df69e68309ed0a08"
+
+[[package]]
+name = "mdman"
+version = "0.1.0"
+dependencies = [
+ "anyhow",
+ "handlebars",
+ "pretty_assertions",
+ "pulldown-cmark",
+ "same-file",
+ "serde_json",
+ "url",
+]
+
+[[package]]
+name = "memchr"
+version = "2.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
+
+[[package]]
+name = "opaque-debug"
+version = "0.2.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2839e79665f131bdb5782e51f2c6c9599c133c6098982a54c794358bf432529c"
+
+[[package]]
+name = "output_vt100"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "53cdc5b785b7a58c5aad8216b3dfa114df64b0b06ae6e1501cef91df2fbdf8f9"
+dependencies = [
+ "winapi",
+]
+
+[[package]]
+name = "percent-encoding"
+version = "2.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e"
+
+[[package]]
+name = "pest"
+version = "2.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "10f4872ae94d7b90ae48754df22fd42ad52ce740b8f370b03da4835417403e53"
+dependencies = [
+ "ucd-trie",
+]
+
+[[package]]
+name = "pest_derive"
+version = "2.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "833d1ae558dc601e9a60366421196a8d94bc0ac980476d0b67e1d0988d72b2d0"
+dependencies = [
+ "pest",
+ "pest_generator",
+]
+
+[[package]]
+name = "pest_generator"
+version = "2.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "99b8db626e31e5b81787b9783425769681b347011cc59471e33ea46d2ea0cf55"
+dependencies = [
+ "pest",
+ "pest_meta",
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "pest_meta"
+version = "2.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "54be6e404f5317079812fc8f9f5279de376d8856929e21c184ecf6bbd692a11d"
+dependencies = [
+ "maplit",
+ "pest",
+ "sha-1",
+]
+
+[[package]]
+name = "pretty_assertions"
+version = "1.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a25e9bcb20aa780fd0bb16b72403a9064d6b3f22f026946029acb941a50af755"
+dependencies = [
+ "ctor",
+ "diff",
+ "output_vt100",
+ "yansi",
+]
+
+[[package]]
+name = "proc-macro2"
+version = "1.0.19"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "04f5f085b5d71e2188cb8271e5da0161ad52c3f227a661a3c135fdf28e258b12"
+dependencies = [
+ "unicode-xid",
+]
+
+[[package]]
+name = "pulldown-cmark"
+version = "0.9.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2d9cc634bc78768157b5cbfe988ffcd1dcba95cd2b2f03a88316c08c6d00ed63"
+dependencies = [
+ "bitflags",
+ "memchr",
+ "unicase",
+]
+
+[[package]]
+name = "quick-error"
+version = "1.2.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0"
+
+[[package]]
+name = "quote"
+version = "1.0.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "aa563d17ecb180e500da1cfd2b028310ac758de548efdd203e18f283af693f37"
+dependencies = [
+ "proc-macro2",
+]
+
+[[package]]
+name = "ryu"
+version = "1.0.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "71d301d4193d031abdd79ff7e3dd721168a9572ef3fe51a1517aba235bd8f86e"
+
+[[package]]
+name = "same-file"
+version = "1.0.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502"
+dependencies = [
+ "winapi-util",
+]
+
+[[package]]
+name = "serde"
+version = "1.0.114"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5317f7588f0a5078ee60ef675ef96735a1442132dc645eb1d12c018620ed8cd3"
+
+[[package]]
+name = "serde_json"
+version = "1.0.57"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "164eacbdb13512ec2745fb09d51fd5b22b0d65ed294a1dcf7285a360c80a675c"
+dependencies = [
+ "itoa",
+ "ryu",
+ "serde",
+]
+
+[[package]]
+name = "sha-1"
+version = "0.8.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f7d94d0bede923b3cea61f3f1ff57ff8cdfd77b400fb8f9998949e0cf04163df"
+dependencies = [
+ "block-buffer",
+ "digest",
+ "fake-simd",
+ "opaque-debug",
+]
+
+[[package]]
+name = "syn"
+version = "1.0.36"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4cdb98bcb1f9d81d07b536179c269ea15999b5d14ea958196413869445bb5250"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "unicode-xid",
+]
+
+[[package]]
+name = "tinyvec"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "53953d2d3a5ad81d9f844a32f14ebb121f50b650cd59d0ee2a07cf13c617efed"
+
+[[package]]
+name = "typenum"
+version = "1.12.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "373c8a200f9e67a0c95e62a4f52fbf80c23b4381c05a17845531982fa99e6b33"
+
+[[package]]
+name = "ucd-trie"
+version = "0.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "56dee185309b50d1f11bfedef0fe6d036842e3fb77413abef29f8f8d1c5d4c1c"
+
+[[package]]
+name = "unicase"
+version = "2.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "50f37be617794602aabbeee0be4f259dc1778fabe05e2d67ee8f79326d5cb4f6"
+dependencies = [
+ "version_check",
+]
+
+[[package]]
+name = "unicode-bidi"
+version = "0.3.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "49f2bd0c6468a8230e1db229cff8029217cf623c767ea5d60bfbd42729ea54d5"
+dependencies = [
+ "matches",
+]
+
+[[package]]
+name = "unicode-normalization"
+version = "0.1.13"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6fb19cf769fa8c6a80a162df694621ebeb4dafb606470b2b2fce0be40a98a977"
+dependencies = [
+ "tinyvec",
+]
+
+[[package]]
+name = "unicode-xid"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f7fe0bb3479651439c9112f72b6c505038574c9fbb575ed1bf3b797fa39dd564"
+
+[[package]]
+name = "url"
+version = "2.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a507c383b2d33b5fc35d1861e77e6b383d158b2da5e14fe51b83dfedf6fd578c"
+dependencies = [
+ "form_urlencoded",
+ "idna",
+ "matches",
+ "percent-encoding",
+]
+
+[[package]]
+name = "version_check"
+version = "0.9.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b5a972e5669d67ba988ce3dc826706fb0a8b01471c088cb0b6110b805cc36aed"
+
+[[package]]
+name = "walkdir"
+version = "2.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "777182bc735b6424e1a57516d35ed72cb8019d85c8c9bf536dccb3445c1a2f7d"
+dependencies = [
+ "same-file",
+ "winapi",
+ "winapi-util",
+]
+
+[[package]]
+name = "winapi"
+version = "0.3.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
+dependencies = [
+ "winapi-i686-pc-windows-gnu",
+ "winapi-x86_64-pc-windows-gnu",
+]
+
+[[package]]
+name = "winapi-i686-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
+
+[[package]]
+name = "winapi-util"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178"
+dependencies = [
+ "winapi",
+]
+
+[[package]]
+name = "winapi-x86_64-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
+
+[[package]]
+name = "yansi"
+version = "0.5.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec"
diff --git a/crates/mdman/Cargo.toml b/crates/mdman/Cargo.toml
new file mode 100644
index 0000000..92cdf2e
--- /dev/null
+++ b/crates/mdman/Cargo.toml
@@ -0,0 +1,17 @@
+[package]
+name = "mdman"
+version = "0.1.0"
+edition = "2021"
+license = "MIT OR Apache-2.0"
+description = "Creates a man page page from markdown."
+
+[dependencies]
+anyhow = "1.0.31"
+handlebars = { version = "3.2.1", features = ["dir_source"] }
+pulldown-cmark = { version = "0.9.2", default-features = false }
+same-file = "1.0.6"
+serde_json = "1.0.56"
+url = "2.2.2"
+
+[dev-dependencies]
+pretty_assertions = "1.3.0"
diff --git a/crates/mdman/README.md b/crates/mdman/README.md
new file mode 100644
index 0000000..e28b596
--- /dev/null
+++ b/crates/mdman/README.md
@@ -0,0 +1,7 @@
+# mdman
+
+mdman is a small utility for creating man pages from markdown text files.
+
+## Usage
+
+See the [man page](doc/out/mdman.md) generated by this tool.
diff --git a/crates/mdman/build-man.sh b/crates/mdman/build-man.sh
new file mode 100755
index 0000000..9286b17
--- /dev/null
+++ b/crates/mdman/build-man.sh
@@ -0,0 +1,7 @@
+#!/bin/bash
+
+set -e
+
+cargo run -- -t md -o doc/out doc/*.md
+cargo run -- -t txt -o doc/out doc/*.md
+cargo run -- -t man -o doc/out doc/*.md
diff --git a/crates/mdman/doc/mdman.md b/crates/mdman/doc/mdman.md
new file mode 100644
index 0000000..2025c13
--- /dev/null
+++ b/crates/mdman/doc/mdman.md
@@ -0,0 +1,95 @@
+# mdman(1)
+
+## NAME
+
+mdman - Converts markdown to a man page
+
+## SYNOPSIS
+
+`mdman` [_options_] `-t` _type_ `-o` _outdir_ _sources..._
+
+## DESCRIPTION
+
+Converts a markdown file to a man page.
+
+The source file is first processed as a
+[handlebars](https://handlebarsjs.com/) template. Then, it is processed as
+markdown into the target format. This supports different output formats,
+such as troff or plain text.
+
+Every man page should start with a level-1 header with the man name and
+section, such as `# mdman(1)`.
+
+The handlebars template has several special tags to assist with generating the
+man page:
+
+{{{{raw}}}}
+- Every block of command-line options must be wrapped between `{{#options}}`
+ and `{{/options}}` tags. This tells the processor where the options start
+ and end.
+- Each option must be expressed with a `{{#option}}` block. The parameters to
+ the the block are a sequence of strings indicating the option. For example,
+ ```{{#option "`-p` _spec_..." "`--package` _spec_..."}}``` is an option that
+ has two different forms. The text within the string is processed as markdown.
+ It is recommended to use formatting similar to this example.
+
+ The content of the `{{#option}}` block should contain a detailed description
+ of the option.
+
+ Use the `{{/option}}` tag to end the option block.
+- References to other man pages should use the `{{man name section}}`
+ expression. For example, `{{man "mdman" 1}}` will generate a reference to
+ the `mdman(1)` man page. For non-troff output, the `--man` option will tell
+ `mdman` how to create links to the man page. If there is no matching `--man`
+ option, then it links to a file named _name_`.md` in the same directory.
+- Variables can be set with `{{*set name="value"}}`. These variables can
+ then be referenced with `{{name}}` expressions.
+- Partial templates should be placed in a directory named `includes`
+ next to the source file. Templates can be included with an expression like
+ `{{> template-name}}`.
+- Other helpers include:
+ - `{{lower value}}` Converts the given value to lowercase.
+{{{{/raw}}}}
+
+## OPTIONS
+
+{{#options}}
+
+{{#option "`-t` _type_"}}
+Specifies the output type. The following output types are supported:
+- `man` — A troff-style man page. Outputs with a numbered extension (like
+ `.1`) matching the man page section.
+- `md` — A markdown file, after all handlebars processing has been finished.
+ Outputs with the `.md` extension.
+- `txt` — A text file, rendered for situations where a man page viewer isn't
+ available. Outputs with the `.txt` extension.
+{{/option}}
+
+{{#option "`-o` _outdir_"}}
+Specifies the directory where to save the output.
+{{/option}}
+
+{{#option "`--url` _base_url_"}}
+Specifies a base URL to use for relative URLs within the document. Any
+relative URL will be joined with this URL.
+{{/option}}
+
+{{#option "`--man` _name_`:`_section_`=`_url_"}}
+Specifies a URL to use for the given man page. When the `\{{man name
+section}}` expression is used, the given URL will be inserted as a link. This
+may be specified multiple times. If a man page reference does not have a
+matching `--man` entry, then a relative link to a file named _name_`.md` will
+be used.
+{{/option}}
+
+{{#option "_sources..._"}}
+The source input filename, may be specified multiple times.
+{{/option}}
+
+{{/options}}
+
+## EXAMPLES
+
+1. Convert the given documents to man pages:
+
+ mdman -t man -o doc doc/mdman.md
diff --git a/crates/mdman/doc/out/mdman.1 b/crates/mdman/doc/out/mdman.1
new file mode 100644
index 0000000..0718d6d
--- /dev/null
+++ b/crates/mdman/doc/out/mdman.1
@@ -0,0 +1,124 @@
+'\" t
+.TH "MDMAN" "1"
+.nh
+.ad l
+.ss \n[.ss] 0
+.SH "NAME"
+mdman \- Converts markdown to a man page
+.SH "SYNOPSIS"
+\fBmdman\fR [\fIoptions\fR] \fB\-t\fR \fItype\fR \fB\-o\fR \fIoutdir\fR \fIsources...\fR
+.SH "DESCRIPTION"
+Converts a markdown file to a man page.
+.sp
+The source file is first processed as a
+\fIhandlebars\fR <https://handlebarsjs.com/> template. Then, it is processed as
+markdown into the target format. This supports different output formats,
+such as troff or plain text.
+.sp
+Every man page should start with a level\-1 header with the man name and
+section, such as \fB# mdman(1)\fR\&.
+.sp
+The handlebars template has several special tags to assist with generating the
+man page:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'Every block of command\-line options must be wrapped between \fB{{#options}}\fR
+and \fB{{/options}}\fR tags. This tells the processor where the options start
+and end.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'Each option must be expressed with a \fB{{#option}}\fR block. The parameters to
+the the block are a sequence of strings indicating the option. For example,
+\fB{{#option "`\-p` _spec_..." "`\-\-package` _spec_..."}}\fR is an option that
+has two different forms. The text within the string is processed as markdown.
+It is recommended to use formatting similar to this example.
+.sp
+The content of the \fB{{#option}}\fR block should contain a detailed description
+of the option.
+.sp
+Use the \fB{{/option}}\fR tag to end the option block.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'References to other man pages should use the \fB{{man name section}}\fR
+expression. For example, \fB{{man "mdman" 1}}\fR will generate a reference to
+the \fBmdman(1)\fR man page. For non\-troff output, the \fB\-\-man\fR option will tell
+\fBmdman\fR how to create links to the man page. If there is no matching \fB\-\-man\fR
+option, then it links to a file named \fIname\fR\fB\&.md\fR in the same directory.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'Variables can be set with \fB{{*set name="value"}}\fR\&. These variables can
+then be referenced with \fB{{name}}\fR expressions.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'Partial templates should be placed in a directory named \fBincludes\fR
+next to the source file. Templates can be included with an expression like
+\fB{{> template\-name}}\fR\&.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'Other helpers include:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fB{{lower value}}\fR Converts the given value to lowercase.
+.RE
+.RE
+.SH "OPTIONS"
+.sp
+\fB\-t\fR \fItype\fR
+.RS 4
+Specifies the output type. The following output types are supported:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBman\fR \[em] A troff\-style man page. Outputs with a numbered extension (like
+\fB\&.1\fR) matching the man page section.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBmd\fR \[em] A markdown file, after all handlebars processing has been finished.
+Outputs with the \fB\&.md\fR extension.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBtxt\fR \[em] A text file, rendered for situations where a man page viewer isn't
+available. Outputs with the \fB\&.txt\fR extension.
+.RE
+.RE
+.sp
+\fB\-o\fR \fIoutdir\fR
+.RS 4
+Specifies the directory where to save the output.
+.RE
+.sp
+\fB\-\-url\fR \fIbase_url\fR
+.RS 4
+Specifies a base URL to use for relative URLs within the document. Any
+relative URL will be joined with this URL.
+.RE
+.sp
+\fB\-\-man\fR \fIname\fR\fB:\fR\fIsection\fR\fB=\fR\fIurl\fR
+.RS 4
+Specifies a URL to use for the given man page. When the \fB{{man name section}}\fR expression is used, the given URL will be inserted as a link. This
+may be specified multiple times. If a man page reference does not have a
+matching \fB\-\-man\fR entry, then a relative link to a file named \fIname\fR\fB\&.md\fR will
+be used.
+.RE
+.sp
+\fIsources...\fR
+.RS 4
+The source input filename, may be specified multiple times.
+.RE
+.SH "EXAMPLES"
+.sp
+.RS 4
+\h'-04' 1.\h'+01'Convert the given documents to man pages:
+.sp
+.RS 4
+.nf
+mdman \-t man \-o doc doc/mdman.md
+.fi
+.RE
+.RE
diff --git a/crates/mdman/doc/out/mdman.md b/crates/mdman/doc/out/mdman.md
new file mode 100644
index 0000000..d0dd345
--- /dev/null
+++ b/crates/mdman/doc/out/mdman.md
@@ -0,0 +1,95 @@
+# mdman(1)
+
+## NAME
+
+mdman - Converts markdown to a man page
+
+## SYNOPSIS
+
+`mdman` [_options_] `-t` _type_ `-o` _outdir_ _sources..._
+
+## DESCRIPTION
+
+Converts a markdown file to a man page.
+
+The source file is first processed as a
+[handlebars](https://handlebarsjs.com/) template. Then, it is processed as
+markdown into the target format. This supports different output formats,
+such as troff or plain text.
+
+Every man page should start with a level-1 header with the man name and
+section, such as `# mdman(1)`.
+
+The handlebars template has several special tags to assist with generating the
+man page:
+
+- Every block of command-line options must be wrapped between `{{#options}}`
+ and `{{/options}}` tags. This tells the processor where the options start
+ and end.
+- Each option must be expressed with a `{{#option}}` block. The parameters to
+ the the block are a sequence of strings indicating the option. For example,
+ ```{{#option "`-p` _spec_..." "`--package` _spec_..."}}``` is an option that
+ has two different forms. The text within the string is processed as markdown.
+ It is recommended to use formatting similar to this example.
+
+ The content of the `{{#option}}` block should contain a detailed description
+ of the option.
+
+ Use the `{{/option}}` tag to end the option block.
+- References to other man pages should use the `{{man name section}}`
+ expression. For example, `{{man "mdman" 1}}` will generate a reference to
+ the `mdman(1)` man page. For non-troff output, the `--man` option will tell
+ `mdman` how to create links to the man page. If there is no matching `--man`
+ option, then it links to a file named _name_`.md` in the same directory.
+- Variables can be set with `{{*set name="value"}}`. These variables can
+ then be referenced with `{{name}}` expressions.
+- Partial templates should be placed in a directory named `includes`
+ next to the source file. Templates can be included with an expression like
+ `{{> template-name}}`.
+- Other helpers include:
+ - `{{lower value}}` Converts the given value to lowercase.
+
+
+## OPTIONS
+
+<dl>
+
+<dt class="option-term" id="option-mdman--t"><a class="option-anchor" href="#option-mdman--t"></a><code>-t</code> <em>type</em></dt>
+<dd class="option-desc">Specifies the output type. The following output types are supported:</p>
+<ul>
+<li><code>man</code> — A troff-style man page. Outputs with a numbered extension (like
+<code>.1</code>) matching the man page section.</li>
+<li><code>md</code> — A markdown file, after all handlebars processing has been finished.
+Outputs with the <code>.md</code> extension.</li>
+<li><code>txt</code> — A text file, rendered for situations where a man page viewer isn't
+available. Outputs with the <code>.txt</code> extension.</li>
+</ul></dd>
+
+
+<dt class="option-term" id="option-mdman--o"><a class="option-anchor" href="#option-mdman--o"></a><code>-o</code> <em>outdir</em></dt>
+<dd class="option-desc">Specifies the directory where to save the output.</dd>
+
+
+<dt class="option-term" id="option-mdman---url"><a class="option-anchor" href="#option-mdman---url"></a><code>--url</code> <em>base_url</em></dt>
+<dd class="option-desc">Specifies a base URL to use for relative URLs within the document. Any
+relative URL will be joined with this URL.</dd>
+
+
+<dt class="option-term" id="option-mdman---man"><a class="option-anchor" href="#option-mdman---man"></a><code>--man</code> <em>name</em><code>:</code><em>section</em><code>=</code><em>url</em></dt>
+<dd class="option-desc">Specifies a URL to use for the given man page. When the <code>{{man name section}}</code> expression is used, the given URL will be inserted as a link. This
+may be specified multiple times. If a man page reference does not have a
+matching <code>--man</code> entry, then a relative link to a file named <em>name</em><code>.md</code> will
+be used.</dd>
+
+
+<dt class="option-term" id="option-mdman-sources..."><a class="option-anchor" href="#option-mdman-sources..."></a><em>sources...</em></dt>
+<dd class="option-desc">The source input filename, may be specified multiple times.</dd>
+
+
+</dl>
+
+## EXAMPLES
+
+1. Convert the given documents to man pages:
+
+ mdman -t man -o doc doc/mdman.md
diff --git a/crates/mdman/doc/out/mdman.txt b/crates/mdman/doc/out/mdman.txt
new file mode 100644
index 0000000..83fa7de
--- /dev/null
+++ b/crates/mdman/doc/out/mdman.txt
@@ -0,0 +1,91 @@
+MDMAN(1)
+
+NAME
+ mdman - Converts markdown to a man page
+
+SYNOPSIS
+ mdman [options] -t type -o outdir sources...
+
+DESCRIPTION
+ Converts a markdown file to a man page.
+
+ The source file is first processed as a handlebars
+ <https://handlebarsjs.com/> template. Then, it is processed as markdown
+ into the target format. This supports different output formats, such as
+ troff or plain text.
+
+ Every man page should start with a level-1 header with the man name and
+ section, such as # mdman(1).
+
+ The handlebars template has several special tags to assist with
+ generating the man page:
+
+ o Every block of command-line options must be wrapped between
+ {{#options}} and {{/options}} tags. This tells the processor where
+ the options start and end.
+
+ o Each option must be expressed with a {{#option}} block. The
+ parameters to the the block are a sequence of strings indicating the
+ option. For example, {{#option "`-p` _spec_..." "`--package`
+ _spec_..."}} is an option that has two different forms. The text
+ within the string is processed as markdown. It is recommended to use
+ formatting similar to this example.
+
+ The content of the {{#option}} block should contain a detailed
+ description of the option.
+
+ Use the {{/option}} tag to end the option block.
+
+ o References to other man pages should use the {{man name section}}
+ expression. For example, {{man "mdman" 1}} will generate a reference
+ to the mdman(1) man page. For non-troff output, the --man option will
+ tell mdman how to create links to the man page. If there is no
+ matching --man option, then it links to a file named name.md in the
+ same directory.
+
+ o Variables can be set with {{*set name="value"}}. These variables can
+ then be referenced with {{name}} expressions.
+
+ o Partial templates should be placed in a directory named includes next
+ to the source file. Templates can be included with an expression like
+ {{> template-name}}.
+
+ o Other helpers include:
+
+ o {{lower value}} Converts the given value to lowercase.
+
+OPTIONS
+ -t type
+ Specifies the output type. The following output types are supported:
+
+ o man — A troff-style man page. Outputs with a numbered extension
+ (like .1) matching the man page section.
+
+ o md — A markdown file, after all handlebars processing has been
+ finished. Outputs with the .md extension.
+
+ o txt — A text file, rendered for situations where a man page
+ viewer isn't available. Outputs with the .txt extension.
+
+ -o outdir
+ Specifies the directory where to save the output.
+
+ --url base_url
+ Specifies a base URL to use for relative URLs within the document.
+ Any relative URL will be joined with this URL.
+
+ --man name:section=url
+ Specifies a URL to use for the given man page. When the {{man name
+ section}} expression is used, the given URL will be inserted as a
+ link. This may be specified multiple times. If a man page reference
+ does not have a matching --man entry, then a relative link to a file
+ named name.md will be used.
+
+ sources...
+ The source input filename, may be specified multiple times.
+
+EXAMPLES
+ 1. Convert the given documents to man pages:
+
+ mdman -t man -o doc doc/mdman.md
+
diff --git a/crates/mdman/src/format.rs b/crates/mdman/src/format.rs
new file mode 100644
index 0000000..7bc9781
--- /dev/null
+++ b/crates/mdman/src/format.rs
@@ -0,0 +1,20 @@
+use anyhow::Error;
+
+pub mod man;
+pub mod md;
+pub mod text;
+
+pub trait Formatter {
+ /// Renders the given markdown to the formatter's output.
+ fn render(&self, input: &str) -> Result<String, Error>;
+ /// Renders the start of a block of options (triggered by `{{#options}}`).
+ fn render_options_start(&self) -> &'static str;
+ /// Renders the end of a block of options (triggered by `{{/options}}`).
+ fn render_options_end(&self) -> &'static str;
+ /// Renders an option (triggered by `{{#option}}`).
+ fn render_option(&self, params: &[&str], block: &str, man_name: &str) -> Result<String, Error>;
+ /// Converts a man page reference into markdown that is appropriate for this format.
+ ///
+ /// Triggered by `{{man name section}}`.
+ fn linkify_man_to_md(&self, name: &str, section: u8) -> Result<String, Error>;
+}
diff --git a/crates/mdman/src/format/man.rs b/crates/mdman/src/format/man.rs
new file mode 100644
index 0000000..9767fdd
--- /dev/null
+++ b/crates/mdman/src/format/man.rs
@@ -0,0 +1,436 @@
+//! Man-page formatter.
+
+use crate::util::{header_text, parse_name_and_section};
+use crate::EventIter;
+use anyhow::{bail, Error};
+use pulldown_cmark::{Alignment, Event, HeadingLevel, LinkType, Tag};
+use std::fmt::Write;
+use url::Url;
+
+pub struct ManFormatter {
+ url: Option<Url>,
+}
+
+impl ManFormatter {
+ pub fn new(url: Option<Url>) -> ManFormatter {
+ ManFormatter { url }
+ }
+}
+
+impl super::Formatter for ManFormatter {
+ fn render(&self, input: &str) -> Result<String, Error> {
+ ManRenderer::render(input, self.url.clone())
+ }
+
+ fn render_options_start(&self) -> &'static str {
+ // Tell pulldown_cmark to ignore this.
+ // This will be stripped out later.
+ "<![CDATA["
+ }
+
+ fn render_options_end(&self) -> &'static str {
+ "]]>"
+ }
+
+ fn render_option(
+ &self,
+ params: &[&str],
+ block: &str,
+ _man_name: &str,
+ ) -> Result<String, Error> {
+ let rendered_options = params
+ .iter()
+ .map(|param| {
+ let r = self.render(param)?;
+ Ok(r.trim().trim_start_matches(".sp").to_string())
+ })
+ .collect::<Result<Vec<_>, Error>>()?;
+ let rendered_block = self.render(block)?;
+ let rendered_block = rendered_block.trim().trim_start_matches(".sp").trim();
+ // .RS = move left margin to right 4.
+ // .RE = move margin back one level.
+ Ok(format!(
+ "\n.sp\n{}\n.RS 4\n{}\n.RE\n",
+ rendered_options.join(", "),
+ rendered_block
+ ))
+ }
+
+ fn linkify_man_to_md(&self, name: &str, section: u8) -> Result<String, Error> {
+ Ok(format!("`{}`({})", name, section))
+ }
+}
+
+#[derive(Copy, Clone)]
+enum Font {
+ Bold,
+ Italic,
+}
+
+impl Font {
+ fn str_from_stack(font_stack: &[Font]) -> &'static str {
+ let has_bold = font_stack.iter().any(|font| matches!(font, Font::Bold));
+ let has_italic = font_stack.iter().any(|font| matches!(font, Font::Italic));
+ match (has_bold, has_italic) {
+ (false, false) => "\\fR", // roman (normal)
+ (false, true) => "\\fI", // italic
+ (true, false) => "\\fB", // bold
+ (true, true) => "\\f(BI", // bold italic
+ }
+ }
+}
+
+struct ManRenderer<'e> {
+ output: String,
+ parser: EventIter<'e>,
+ font_stack: Vec<Font>,
+}
+
+impl<'e> ManRenderer<'e> {
+ fn render(input: &str, url: Option<Url>) -> Result<String, Error> {
+ let parser = crate::md_parser(input, url);
+ let output = String::with_capacity(input.len() * 3 / 2);
+ let mut mr = ManRenderer {
+ parser,
+ output,
+ font_stack: Vec::new(),
+ };
+ mr.push_man()?;
+ Ok(mr.output)
+ }
+
+ fn push_man(&mut self) -> Result<(), Error> {
+ // If this is true, this is inside a cdata block used for hiding
+ // content from pulldown_cmark.
+ let mut in_cdata = false;
+ // The current list stack. None if unordered, Some if ordered with the
+ // given number as the current index.
+ let mut list: Vec<Option<u64>> = Vec::new();
+ // Used in some cases where spacing isn't desired.
+ let mut suppress_paragraph = false;
+ let mut table_cell_index = 0;
+
+ while let Some((event, range)) = self.parser.next() {
+ let this_suppress_paragraph = suppress_paragraph;
+ suppress_paragraph = false;
+ match event {
+ Event::Start(tag) => {
+ match tag {
+ Tag::Paragraph => {
+ if !this_suppress_paragraph {
+ self.flush();
+ self.output.push_str(".sp\n");
+ }
+ }
+ Tag::Heading(level, ..) => {
+ if level == HeadingLevel::H1 {
+ self.push_top_header()?;
+ } else if level == HeadingLevel::H2 {
+ // Section header
+ let text = header_text(&mut self.parser)?;
+ self.flush();
+ write!(self.output, ".SH \"{}\"\n", text)?;
+ suppress_paragraph = true;
+ } else {
+ // Subsection header
+ let text = header_text(&mut self.parser)?;
+ self.flush();
+ write!(self.output, ".SS \"{}\"\n", text)?;
+ suppress_paragraph = true;
+ }
+ }
+ Tag::BlockQuote => {
+ self.flush();
+ // .RS = move left margin over 3
+ // .ll = shrink line length
+ self.output.push_str(".RS 3\n.ll -5\n.sp\n");
+ suppress_paragraph = true;
+ }
+ Tag::CodeBlock(_kind) => {
+ // space down, indent 4, no-fill mode
+ self.flush();
+ self.output.push_str(".sp\n.RS 4\n.nf\n");
+ }
+ Tag::List(start) => list.push(start),
+ Tag::Item => {
+ // Note: This uses explicit movement instead of .IP
+ // because the spacing on .IP looks weird to me.
+ // space down, indent 4
+ self.flush();
+ self.output.push_str(".sp\n.RS 4\n");
+ match list.last_mut().expect("item must have list start") {
+ // Ordered list.
+ Some(n) => {
+ // move left 4, output the list index number, move right 1.
+ write!(self.output, "\\h'-04' {}.\\h'+01'", n)?;
+ *n += 1;
+ }
+ // Unordered list.
+ None => self.output.push_str("\\h'-04'\\(bu\\h'+02'"),
+ }
+ suppress_paragraph = true;
+ }
+ Tag::FootnoteDefinition(_label) => unimplemented!(),
+ Tag::Table(alignment) => {
+ // Table start
+ // allbox = draw a box around all the cells
+ // tab(:) = Use `:` to separate cell data (instead of tab)
+ // ; = end of options
+ self.output.push_str(
+ "\n.TS\n\
+ allbox tab(:);\n",
+ );
+ let alignments: Vec<_> = alignment
+ .iter()
+ .map(|a| match a {
+ Alignment::Left | Alignment::None => "lt",
+ Alignment::Center => "ct",
+ Alignment::Right => "rt",
+ })
+ .collect();
+ self.output.push_str(&alignments.join(" "));
+ self.output.push_str(".\n");
+ table_cell_index = 0;
+ }
+ Tag::TableHead => {
+ table_cell_index = 0;
+ }
+ Tag::TableRow => {
+ table_cell_index = 0;
+ self.output.push('\n');
+ }
+ Tag::TableCell => {
+ if table_cell_index != 0 {
+ // Separator between columns.
+ self.output.push(':');
+ }
+ // Start a text block.
+ self.output.push_str("T{\n");
+ table_cell_index += 1
+ }
+ Tag::Emphasis => self.push_font(Font::Italic),
+ Tag::Strong => self.push_font(Font::Bold),
+ // Strikethrough isn't usually supported for TTY.
+ Tag::Strikethrough => self.output.push_str("~~"),
+ Tag::Link(link_type, dest_url, _title) => {
+ if dest_url.starts_with('#') {
+ // In a man page, page-relative anchors don't
+ // have much meaning.
+ continue;
+ }
+ match link_type {
+ LinkType::Autolink | LinkType::Email => {
+ // The text is a copy of the URL, which is not needed.
+ match self.parser.next() {
+ Some((Event::Text(_), _range)) => {}
+ _ => bail!("expected text after autolink"),
+ }
+ }
+ LinkType::Inline
+ | LinkType::Reference
+ | LinkType::Collapsed
+ | LinkType::Shortcut => {
+ self.push_font(Font::Italic);
+ }
+ // This is currently unused. This is only
+ // emitted with a broken link callback, but I
+ // felt it is too annoying to escape `[` in
+ // option descriptions.
+ LinkType::ReferenceUnknown
+ | LinkType::CollapsedUnknown
+ | LinkType::ShortcutUnknown => {
+ bail!(
+ "link with missing reference `{}` located at offset {}",
+ dest_url,
+ range.start
+ );
+ }
+ }
+ }
+ Tag::Image(_link_type, _dest_url, _title) => {
+ bail!("images are not currently supported")
+ }
+ }
+ }
+ Event::End(tag) => {
+ match &tag {
+ Tag::Paragraph => self.flush(),
+ Tag::Heading(..) => {}
+ Tag::BlockQuote => {
+ self.flush();
+ // restore left margin, restore line length
+ self.output.push_str(".br\n.RE\n.ll\n");
+ }
+ Tag::CodeBlock(_kind) => {
+ self.flush();
+ // Restore fill mode, move margin back one level.
+ self.output.push_str(".fi\n.RE\n");
+ }
+ Tag::List(_) => {
+ list.pop();
+ }
+ Tag::Item => {
+ self.flush();
+ // Move margin back one level.
+ self.output.push_str(".RE\n");
+ }
+ Tag::FootnoteDefinition(_label) => {}
+ Tag::Table(_) => {
+ // Table end
+ // I don't know why, but the .sp is needed to provide
+ // space with the following content.
+ self.output.push_str("\n.TE\n.sp\n");
+ }
+ Tag::TableHead => {}
+ Tag::TableRow => {}
+ Tag::TableCell => {
+ // End text block.
+ self.output.push_str("\nT}");
+ }
+ Tag::Emphasis | Tag::Strong => self.pop_font(),
+ Tag::Strikethrough => self.output.push_str("~~"),
+ Tag::Link(link_type, dest_url, _title) => {
+ if dest_url.starts_with('#') {
+ continue;
+ }
+ match link_type {
+ LinkType::Autolink | LinkType::Email => {}
+ LinkType::Inline
+ | LinkType::Reference
+ | LinkType::Collapsed
+ | LinkType::Shortcut => {
+ self.pop_font();
+ self.output.push(' ');
+ }
+ _ => {
+ panic!("unexpected tag {:?}", tag);
+ }
+ }
+ write!(self.output, "<{}>", escape(&dest_url)?)?;
+ }
+ Tag::Image(_link_type, _dest_url, _title) => {}
+ }
+ }
+ Event::Text(t) => {
+ self.output.push_str(&escape(&t)?);
+ }
+ Event::Code(t) => {
+ self.push_font(Font::Bold);
+ self.output.push_str(&escape(&t)?);
+ self.pop_font();
+ }
+ Event::Html(t) => {
+ if t.starts_with("<![CDATA[") {
+ // CDATA is a special marker used for handling options.
+ in_cdata = true;
+ } else if in_cdata {
+ if t.trim().ends_with("]]>") {
+ in_cdata = false;
+ } else if !t.trim().is_empty() {
+ self.output.push_str(&t);
+ }
+ } else {
+ self.output.push_str(&escape(&t)?);
+ }
+ }
+ Event::FootnoteReference(_t) => {}
+ Event::SoftBreak => self.output.push('\n'),
+ Event::HardBreak => {
+ self.flush();
+ self.output.push_str(".br\n");
+ }
+ Event::Rule => {
+ self.flush();
+ // \l' **length** ' Draw horizontal line (default underscore).
+ // \n(.lu Gets value from register "lu" (current line length)
+ self.output.push_str("\\l'\\n(.lu'\n");
+ }
+ Event::TaskListMarker(_b) => unimplemented!(),
+ }
+ }
+ Ok(())
+ }
+
+ fn flush(&mut self) {
+ if !self.output.ends_with('\n') {
+ self.output.push('\n');
+ }
+ }
+
+ /// Switch to the given font.
+ ///
+ /// Because the troff sequence `\fP` for switching to the "previous" font
+ /// doesn't support nesting, this needs to emulate it here. This is needed
+ /// for situations like **hi _there_**.
+ fn push_font(&mut self, font: Font) {
+ self.font_stack.push(font);
+ self.output.push_str(Font::str_from_stack(&self.font_stack));
+ }
+
+ fn pop_font(&mut self) {
+ self.font_stack.pop();
+ self.output.push_str(Font::str_from_stack(&self.font_stack));
+ }
+
+ /// Parse and render the first top-level header of the document.
+ fn push_top_header(&mut self) -> Result<(), Error> {
+ // This enables the tbl preprocessor for tables.
+ // This seems to be enabled by default on every modern system I could
+ // find, but it doesn't seem to hurt to enable this.
+ self.output.push_str("'\\\" t\n");
+ // Extract the name of the man page.
+ let text = header_text(&mut self.parser)?;
+ let (name, section) = parse_name_and_section(&text)?;
+ // .TH = Table header
+ // .nh = disable hyphenation
+ // .ad l = Left-adjust mode (disable justified).
+ // .ss sets sentence_space_size to 0 (prevents double spaces after .
+ // if . is last on the line)
+ write!(
+ self.output,
+ ".TH \"{}\" \"{}\"\n\
+ .nh\n\
+ .ad l\n\
+ .ss \\n[.ss] 0\n",
+ escape(&name.to_uppercase())?,
+ section
+ )?;
+ Ok(())
+ }
+}
+
+fn escape(s: &str) -> Result<String, Error> {
+ // Note: Possible source on output escape sequences: https://man7.org/linux/man-pages/man7/groff_char.7.html.
+ // Otherwise, use generic escaping in the form `\[u1EE7]` or `\[u1F994]`.
+
+ let mut replaced = s
+ .replace('\\', "\\(rs")
+ .replace('-', "\\-")
+ .replace('\u{00A0}', "\\ ") // non-breaking space (non-stretchable)
+ .replace('–', "\\[en]") // \u{2013} en-dash
+ .replace('—', "\\[em]") // \u{2014} em-dash
+ .replace('‘', "\\[oq]") // \u{2018} left single quote
+ .replace('’', "\\[cq]") // \u{2019} right single quote or apostrophe
+ .replace('“', "\\[lq]") // \u{201C} left double quote
+ .replace('”', "\\[rq]") // \u{201D} right double quote
+ .replace('…', "\\[u2026]") // \u{2026} ellipsis
+ .replace('│', "|") // \u{2502} box drawing light vertical (could use \[br])
+ .replace('├', "|") // \u{251C} box drawings light vertical and right
+ .replace('└', "`") // \u{2514} box drawings light up and right
+ .replace('─', "\\-") // \u{2500} box drawing light horizontal
+ ;
+ if replaced.starts_with('.') {
+ replaced = format!("\\&.{}", &replaced[1..]);
+ }
+
+ if let Some(ch) = replaced.chars().find(|ch| {
+ !matches!(ch, '\n' | ' ' | '!'..='/' | '0'..='9'
+ | ':'..='@' | 'A'..='Z' | '['..='`' | 'a'..='z' | '{'..='~')
+ }) {
+ bail!(
+ "character {:?} is not allowed (update the translation table if needed)",
+ ch
+ );
+ }
+ Ok(replaced)
+}
diff --git a/crates/mdman/src/format/md.rs b/crates/mdman/src/format/md.rs
new file mode 100644
index 0000000..0e1c498
--- /dev/null
+++ b/crates/mdman/src/format/md.rs
@@ -0,0 +1,112 @@
+//! Markdown formatter.
+
+use crate::util::unwrap;
+use crate::ManMap;
+use anyhow::{bail, format_err, Error};
+use std::fmt::Write;
+
+pub struct MdFormatter {
+ man_map: ManMap,
+}
+
+impl MdFormatter {
+ pub fn new(man_map: ManMap) -> MdFormatter {
+ MdFormatter { man_map }
+ }
+}
+
+impl MdFormatter {
+ fn render_html(&self, input: &str) -> Result<String, Error> {
+ let parser = crate::md_parser(input, None);
+ let mut html_output: String = String::with_capacity(input.len() * 3 / 2);
+ pulldown_cmark::html::push_html(&mut html_output, parser.map(|(e, _r)| e));
+ Ok(html_output)
+ }
+}
+
+impl super::Formatter for MdFormatter {
+ fn render(&self, input: &str) -> Result<String, Error> {
+ Ok(input.replace("\r\n", "\n"))
+ }
+
+ fn render_options_start(&self) -> &'static str {
+ "<dl>"
+ }
+
+ fn render_options_end(&self) -> &'static str {
+ "</dl>"
+ }
+
+ fn render_option(&self, params: &[&str], block: &str, man_name: &str) -> Result<String, Error> {
+ let mut result = String::new();
+ fn unwrap_p(t: &str) -> &str {
+ unwrap(t, "<p>", "</p>")
+ }
+
+ for param in params {
+ let rendered = self.render_html(param)?;
+ let no_p = unwrap_p(&rendered);
+ // split out first term to use as the id.
+ let first = no_p
+ .split_whitespace()
+ .next()
+ .ok_or_else(|| format_err!("did not expect option `{}` to be empty", param))?;
+ let no_tags = trim_tags(first);
+ if no_tags.is_empty() {
+ bail!("unexpected empty option with no tags `{}`", param);
+ }
+ let id = format!("option-{}-{}", man_name, no_tags);
+ write!(
+ result,
+ "<dt class=\"option-term\" id=\"{ID}\">\
+ <a class=\"option-anchor\" href=\"#{ID}\"></a>{OPTION}</dt>\n",
+ ID = id,
+ OPTION = no_p
+ )?;
+ }
+ let rendered_block = self.render_html(block)?;
+ write!(
+ result,
+ "<dd class=\"option-desc\">{}</dd>\n",
+ unwrap_p(&rendered_block)
+ )?;
+ Ok(result)
+ }
+
+ fn linkify_man_to_md(&self, name: &str, section: u8) -> Result<String, Error> {
+ let s = match self.man_map.get(&(name.to_string(), section)) {
+ Some(link) => format!("[{}({})]({})", name, section, link),
+ None => format!("[{}({})]({}.html)", name, section, name),
+ };
+ Ok(s)
+ }
+}
+
+fn trim_tags(s: &str) -> String {
+ // This is a hack. It removes all HTML tags.
+ let mut in_tag = false;
+ let mut in_char_ref = false;
+ s.chars()
+ .filter(|&ch| match ch {
+ '<' if in_tag => panic!("unexpected nested tag"),
+ '&' if in_char_ref => panic!("unexpected nested char ref"),
+ '<' => {
+ in_tag = true;
+ false
+ }
+ '&' => {
+ in_char_ref = true;
+ false
+ }
+ '>' if in_tag => {
+ in_tag = false;
+ false
+ }
+ ';' if in_char_ref => {
+ in_char_ref = false;
+ false
+ }
+ _ => !in_tag && !in_char_ref,
+ })
+ .collect()
+}
diff --git a/crates/mdman/src/format/text.rs b/crates/mdman/src/format/text.rs
new file mode 100644
index 0000000..ae07985
--- /dev/null
+++ b/crates/mdman/src/format/text.rs
@@ -0,0 +1,605 @@
+//! Text formatter.
+
+use crate::util::{header_text, unwrap};
+use crate::EventIter;
+use anyhow::{bail, Error};
+use pulldown_cmark::{Alignment, Event, HeadingLevel, LinkType, Tag};
+use std::fmt::Write;
+use std::mem;
+use url::Url;
+
+pub struct TextFormatter {
+ url: Option<Url>,
+}
+
+impl TextFormatter {
+ pub fn new(url: Option<Url>) -> TextFormatter {
+ TextFormatter { url }
+ }
+}
+
+impl super::Formatter for TextFormatter {
+ fn render(&self, input: &str) -> Result<String, Error> {
+ TextRenderer::render(input, self.url.clone(), 0)
+ }
+
+ fn render_options_start(&self) -> &'static str {
+ // Tell pulldown_cmark to ignore this.
+ // This will be stripped out later.
+ "<![CDATA["
+ }
+
+ fn render_options_end(&self) -> &'static str {
+ "]]>"
+ }
+
+ fn render_option(
+ &self,
+ params: &[&str],
+ block: &str,
+ _man_name: &str,
+ ) -> Result<String, Error> {
+ let rendered_options = params
+ .iter()
+ .map(|param| TextRenderer::render(param, self.url.clone(), 0))
+ .collect::<Result<Vec<_>, Error>>()?;
+ let trimmed: Vec<_> = rendered_options.iter().map(|o| o.trim()).collect();
+ // Wrap in HTML tags, they will be stripped out during rendering.
+ Ok(format!(
+ "<dt>{}</dt>\n<dd>{}</dd>\n<br>\n",
+ trimmed.join(", "),
+ block
+ ))
+ }
+
+ fn linkify_man_to_md(&self, name: &str, section: u8) -> Result<String, Error> {
+ Ok(format!("`{}`({})", name, section))
+ }
+}
+
+struct TextRenderer<'e> {
+ output: String,
+ indent: usize,
+ /// The current line being written. Once a line break is encountered (such
+ /// as starting a new paragraph), this will be written to `output` via
+ /// `flush`.
+ line: String,
+ /// The current word being written. Once a break is encountered (such as a
+ /// space) this will be written to `line` via `flush_word`.
+ word: String,
+ parser: EventIter<'e>,
+ /// The base URL used for relative URLs.
+ url: Option<Url>,
+ table: Table,
+}
+
+impl<'e> TextRenderer<'e> {
+ fn render(input: &str, url: Option<Url>, indent: usize) -> Result<String, Error> {
+ let parser = crate::md_parser(input, url.clone());
+ let output = String::with_capacity(input.len() * 3 / 2);
+ let mut mr = TextRenderer {
+ output,
+ indent,
+ line: String::new(),
+ word: String::new(),
+ parser,
+ url,
+ table: Table::new(),
+ };
+ mr.push_md()?;
+ Ok(mr.output)
+ }
+
+ fn push_md(&mut self) -> Result<(), Error> {
+ // If this is true, this is inside a cdata block used for hiding
+ // content from pulldown_cmark.
+ let mut in_cdata = false;
+ // The current list stack. None if unordered, Some if ordered with the
+ // given number as the current index.
+ let mut list: Vec<Option<u64>> = Vec::new();
+ // Used in some cases where spacing isn't desired.
+ let mut suppress_paragraph = false;
+ // Whether or not word-wrapping is enabled.
+ let mut wrap_text = true;
+
+ while let Some((event, range)) = self.parser.next() {
+ let this_suppress_paragraph = suppress_paragraph;
+ // Always reset suppression, even if the next event isn't a
+ // paragraph. This is in essence, a 1-token lookahead where the
+ // suppression is only enabled if the next event is a paragraph.
+ suppress_paragraph = false;
+ match event {
+ Event::Start(tag) => {
+ match tag {
+ Tag::Paragraph => {
+ if !this_suppress_paragraph {
+ self.flush();
+ }
+ }
+ Tag::Heading(level, ..) => {
+ self.flush();
+ if level == HeadingLevel::H1 {
+ let text = header_text(&mut self.parser)?;
+ self.push_to_line(&text.to_uppercase());
+ self.hard_break();
+ self.hard_break();
+ } else if level == HeadingLevel::H2 {
+ let text = header_text(&mut self.parser)?;
+ self.push_to_line(&text.to_uppercase());
+ self.flush();
+ self.indent = 7;
+ } else {
+ let text = header_text(&mut self.parser)?;
+ self.push_indent((level as usize - 2) * 3);
+ self.push_to_line(&text);
+ self.flush();
+ self.indent = (level as usize - 1) * 3 + 1;
+ }
+ }
+ Tag::BlockQuote => {
+ self.indent += 3;
+ }
+ Tag::CodeBlock(_kind) => {
+ self.flush();
+ wrap_text = false;
+ self.indent += 4;
+ }
+ Tag::List(start) => list.push(start),
+ Tag::Item => {
+ self.flush();
+ match list.last_mut().expect("item must have list start") {
+ // Ordered list.
+ Some(n) => {
+ self.push_indent(self.indent);
+ write!(self.line, "{}.", n)?;
+ *n += 1;
+ }
+ // Unordered list.
+ None => {
+ self.push_indent(self.indent);
+ self.push_to_line("o ")
+ }
+ }
+ self.indent += 3;
+ suppress_paragraph = true;
+ }
+ Tag::FootnoteDefinition(_label) => unimplemented!(),
+ Tag::Table(alignment) => {
+ assert!(self.table.alignment.is_empty());
+ self.flush();
+ self.table.alignment.extend(alignment);
+ let table = self.table.process(&mut self.parser, self.indent)?;
+ self.output.push_str(&table);
+ self.hard_break();
+ self.table = Table::new();
+ }
+ Tag::TableHead | Tag::TableRow | Tag::TableCell => {
+ bail!("unexpected table element")
+ }
+ Tag::Emphasis => {}
+ Tag::Strong => {}
+ // Strikethrough isn't usually supported for TTY.
+ Tag::Strikethrough => self.word.push_str("~~"),
+ Tag::Link(link_type, dest_url, _title) => {
+ if dest_url.starts_with('#') {
+ // In a man page, page-relative anchors don't
+ // have much meaning.
+ continue;
+ }
+ match link_type {
+ LinkType::Autolink | LinkType::Email => {
+ // The text is a copy of the URL, which is not needed.
+ match self.parser.next() {
+ Some((Event::Text(_), _range)) => {}
+ _ => bail!("expected text after autolink"),
+ }
+ }
+ LinkType::Inline
+ | LinkType::Reference
+ | LinkType::Collapsed
+ | LinkType::Shortcut => {}
+ // This is currently unused. This is only
+ // emitted with a broken link callback, but I
+ // felt it is too annoying to escape `[` in
+ // option descriptions.
+ LinkType::ReferenceUnknown
+ | LinkType::CollapsedUnknown
+ | LinkType::ShortcutUnknown => {
+ bail!(
+ "link with missing reference `{}` located at offset {}",
+ dest_url,
+ range.start
+ );
+ }
+ }
+ }
+ Tag::Image(_link_type, _dest_url, _title) => {
+ bail!("images are not currently supported")
+ }
+ }
+ }
+ Event::End(tag) => match &tag {
+ Tag::Paragraph => {
+ self.flush();
+ self.hard_break();
+ }
+ Tag::Heading(..) => {}
+ Tag::BlockQuote => {
+ self.indent -= 3;
+ }
+ Tag::CodeBlock(_kind) => {
+ self.hard_break();
+ wrap_text = true;
+ self.indent -= 4;
+ }
+ Tag::List(_) => {
+ list.pop();
+ }
+ Tag::Item => {
+ self.flush();
+ self.indent -= 3;
+ self.hard_break();
+ }
+ Tag::FootnoteDefinition(_label) => {}
+ Tag::Table(_) => {}
+ Tag::TableHead => {}
+ Tag::TableRow => {}
+ Tag::TableCell => {}
+ Tag::Emphasis => {}
+ Tag::Strong => {}
+ Tag::Strikethrough => self.word.push_str("~~"),
+ Tag::Link(link_type, dest_url, _title) => {
+ if dest_url.starts_with('#') {
+ continue;
+ }
+ match link_type {
+ LinkType::Autolink | LinkType::Email => {}
+ LinkType::Inline
+ | LinkType::Reference
+ | LinkType::Collapsed
+ | LinkType::Shortcut => self.flush_word(),
+ _ => {
+ panic!("unexpected tag {:?}", tag);
+ }
+ }
+ self.flush_word();
+ write!(self.word, "<{}>", dest_url)?;
+ }
+ Tag::Image(_link_type, _dest_url, _title) => {}
+ },
+ Event::Text(t) | Event::Code(t) => {
+ if wrap_text {
+ let chunks = split_chunks(&t);
+ for chunk in chunks {
+ if chunk == " " {
+ self.flush_word();
+ } else {
+ self.word.push_str(chunk);
+ }
+ }
+ } else {
+ for line in t.lines() {
+ self.push_indent(self.indent);
+ self.push_to_line(line);
+ self.flush();
+ }
+ }
+ }
+ Event::Html(t) => {
+ if t.starts_with("<![CDATA[") {
+ // CDATA is a special marker used for handling options.
+ in_cdata = true;
+ self.flush();
+ } else if in_cdata {
+ if t.trim().ends_with("]]>") {
+ in_cdata = false;
+ } else {
+ let trimmed = t.trim();
+ if trimmed.is_empty() {
+ continue;
+ }
+ if trimmed == "<br>" {
+ self.hard_break();
+ } else if trimmed.starts_with("<dt>") {
+ let opts = unwrap(trimmed, "<dt>", "</dt>");
+ self.push_indent(self.indent);
+ self.push_to_line(opts);
+ self.flush();
+ } else if trimmed.starts_with("<dd>") {
+ let mut def = String::new();
+ while let Some((Event::Html(t), _range)) = self.parser.next() {
+ if t.starts_with("</dd>") {
+ break;
+ }
+ def.push_str(&t);
+ }
+ let rendered =
+ TextRenderer::render(&def, self.url.clone(), self.indent + 4)?;
+ self.push_to_line(rendered.trim_end());
+ self.flush();
+ } else {
+ self.push_to_line(&t);
+ self.flush();
+ }
+ }
+ } else {
+ self.push_to_line(&t);
+ self.flush();
+ }
+ }
+ Event::FootnoteReference(_t) => {}
+ Event::SoftBreak => self.flush_word(),
+ Event::HardBreak => self.flush(),
+ Event::Rule => {
+ self.flush();
+ self.push_indent(self.indent);
+ self.push_to_line(&"_".repeat(79 - self.indent * 2));
+ self.flush();
+ }
+ Event::TaskListMarker(_b) => unimplemented!(),
+ }
+ }
+ Ok(())
+ }
+
+ fn flush(&mut self) {
+ self.flush_word();
+ if !self.line.is_empty() {
+ self.output.push_str(&self.line);
+ self.output.push('\n');
+ self.line.clear();
+ }
+ }
+
+ fn hard_break(&mut self) {
+ self.flush();
+ if !self.output.ends_with("\n\n") {
+ self.output.push('\n');
+ }
+ }
+
+ fn flush_word(&mut self) {
+ if self.word.is_empty() {
+ return;
+ }
+ if self.line.len() + self.word.len() >= 79 {
+ self.output.push_str(&self.line);
+ self.output.push('\n');
+ self.line.clear();
+ }
+ if self.line.is_empty() {
+ self.push_indent(self.indent);
+ self.line.push_str(&self.word);
+ } else {
+ self.line.push(' ');
+ self.line.push_str(&self.word);
+ }
+ self.word.clear();
+ }
+
+ fn push_indent(&mut self, indent: usize) {
+ for _ in 0..indent {
+ self.line.push(' ');
+ }
+ }
+
+ fn push_to_line(&mut self, text: &str) {
+ self.flush_word();
+ self.line.push_str(text);
+ }
+}
+
+/// Splits the text on whitespace.
+///
+/// Consecutive whitespace is collapsed to a single ' ', and is included as a
+/// separate element in the result.
+fn split_chunks(text: &str) -> Vec<&str> {
+ let mut result = Vec::new();
+ let mut start = 0;
+ while start < text.len() {
+ match text[start..].find(' ') {
+ Some(i) => {
+ if i != 0 {
+ result.push(&text[start..start + i]);
+ }
+ result.push(" ");
+ // Skip past whitespace.
+ match text[start + i..].find(|c| c != ' ') {
+ Some(n) => {
+ start = start + i + n;
+ }
+ None => {
+ break;
+ }
+ }
+ }
+ None => {
+ result.push(&text[start..]);
+ break;
+ }
+ }
+ }
+ result
+}
+
+struct Table {
+ alignment: Vec<Alignment>,
+ rows: Vec<Vec<String>>,
+ row: Vec<String>,
+ cell: String,
+}
+
+impl Table {
+ fn new() -> Table {
+ Table {
+ alignment: Vec::new(),
+ rows: Vec::new(),
+ row: Vec::new(),
+ cell: String::new(),
+ }
+ }
+
+ /// Processes table events and generates a text table.
+ fn process(&mut self, parser: &mut EventIter<'_>, indent: usize) -> Result<String, Error> {
+ while let Some((event, _range)) = parser.next() {
+ match event {
+ Event::Start(tag) => match tag {
+ Tag::TableHead
+ | Tag::TableRow
+ | Tag::TableCell
+ | Tag::Emphasis
+ | Tag::Strong => {}
+ Tag::Strikethrough => self.cell.push_str("~~"),
+ // Links not yet supported, they usually won't fit.
+ Tag::Link(_, _, _) => {}
+ _ => bail!("unexpected tag in table: {:?}", tag),
+ },
+ Event::End(tag) => match tag {
+ Tag::Table(_) => return self.render(indent),
+ Tag::TableCell => {
+ let cell = mem::replace(&mut self.cell, String::new());
+ self.row.push(cell);
+ }
+ Tag::TableHead | Tag::TableRow => {
+ let row = mem::replace(&mut self.row, Vec::new());
+ self.rows.push(row);
+ }
+ Tag::Strikethrough => self.cell.push_str("~~"),
+ _ => {}
+ },
+ Event::Text(t) | Event::Code(t) => {
+ self.cell.push_str(&t);
+ }
+ Event::Html(t) => bail!("html unsupported in tables: {:?}", t),
+ _ => bail!("unexpected event in table: {:?}", event),
+ }
+ }
+ bail!("table end not reached");
+ }
+
+ fn render(&self, indent: usize) -> Result<String, Error> {
+ // This is an extremely primitive layout routine.
+ // First compute the potential maximum width of each cell.
+ // 2 for 1 space margin on left and right.
+ let width_acc = vec![2; self.alignment.len()];
+ let mut col_widths = self
+ .rows
+ .iter()
+ .map(|row| row.iter().map(|cell| cell.len()))
+ .fold(width_acc, |mut acc, row| {
+ acc.iter_mut()
+ .zip(row)
+ // +3 for left/right margin and | symbol
+ .for_each(|(a, b)| *a = (*a).max(b + 3));
+ acc
+ });
+ // Shrink each column until it fits the total width, proportional to
+ // the columns total percent width.
+ let max_width = 78 - indent;
+ // Include total len for | characters, and +1 for final |.
+ let total_width = col_widths.iter().sum::<usize>() + col_widths.len() + 1;
+ if total_width > max_width {
+ let to_shrink = total_width - max_width;
+ // Compute percentage widths, and shrink each column based on its
+ // total percentage.
+ for width in &mut col_widths {
+ let percent = *width as f64 / total_width as f64;
+ *width -= (to_shrink as f64 * percent).ceil() as usize;
+ }
+ }
+ // Start rendering.
+ let mut result = String::new();
+
+ // Draw the horizontal line separating each row.
+ let mut row_line = String::new();
+ row_line.push_str(&" ".repeat(indent));
+ row_line.push('+');
+ let lines = col_widths
+ .iter()
+ .map(|width| "-".repeat(*width))
+ .collect::<Vec<_>>();
+ row_line.push_str(&lines.join("+"));
+ row_line.push('+');
+ row_line.push('\n');
+
+ // Draw top of the table.
+ result.push_str(&row_line);
+ // Draw each row.
+ for row in &self.rows {
+ // Word-wrap and fill each column as needed.
+ let filled = fill_row(row, &col_widths, &self.alignment);
+ // Need to transpose the cells across rows for cells that span
+ // multiple rows.
+ let height = filled.iter().map(|c| c.len()).max().unwrap();
+ for row_i in 0..height {
+ result.push_str(&" ".repeat(indent));
+ result.push('|');
+ for filled_row in &filled {
+ let cell = &filled_row[row_i];
+ result.push_str(cell);
+ result.push('|');
+ }
+ result.push('\n');
+ }
+ result.push_str(&row_line);
+ }
+ Ok(result)
+ }
+}
+
+/// Formats a row, filling cells with spaces and word-wrapping text.
+///
+/// Returns a vec of cells, where each cell is split into multiple lines.
+fn fill_row(row: &[String], col_widths: &[usize], alignment: &[Alignment]) -> Vec<Vec<String>> {
+ let mut cell_lines = row
+ .iter()
+ .zip(col_widths)
+ .zip(alignment)
+ .map(|((cell, width), alignment)| fill_cell(cell, *width - 2, *alignment))
+ .collect::<Vec<_>>();
+ // Fill each cell to match the maximum vertical height of the tallest cell.
+ let max_lines = cell_lines.iter().map(|cell| cell.len()).max().unwrap();
+ for (cell, width) in cell_lines.iter_mut().zip(col_widths) {
+ if cell.len() < max_lines {
+ cell.extend(std::iter::repeat(" ".repeat(*width)).take(max_lines - cell.len()));
+ }
+ }
+ cell_lines
+}
+
+/// Formats a cell. Word-wraps based on width, and adjusts based on alignment.
+///
+/// Returns a vec of lines for the cell.
+fn fill_cell(text: &str, width: usize, alignment: Alignment) -> Vec<String> {
+ let fill_width = |text: &str| match alignment {
+ Alignment::None | Alignment::Left => format!(" {:<width$} ", text, width = width),
+ Alignment::Center => format!(" {:^width$} ", text, width = width),
+ Alignment::Right => format!(" {:>width$} ", text, width = width),
+ };
+ if text.len() < width {
+ // No wrapping necessary, just format.
+ vec![fill_width(text)]
+ } else {
+ // Word-wrap the cell.
+ let mut result = Vec::new();
+ let mut line = String::new();
+ for word in text.split_whitespace() {
+ if line.len() + word.len() >= width {
+ // todo: word.len() > width
+ result.push(fill_width(&line));
+ line.clear();
+ }
+ if line.is_empty() {
+ line.push_str(word);
+ } else {
+ line.push(' ');
+ line.push_str(&word);
+ }
+ }
+ if !line.is_empty() {
+ result.push(fill_width(&line));
+ }
+
+ result
+ }
+}
diff --git a/crates/mdman/src/hbs.rs b/crates/mdman/src/hbs.rs
new file mode 100644
index 0000000..81ad7ee
--- /dev/null
+++ b/crates/mdman/src/hbs.rs
@@ -0,0 +1,215 @@
+//! Handlebars template processing.
+
+use crate::format::Formatter;
+use anyhow::Error;
+use handlebars::{
+ handlebars_helper, Context, Decorator, Handlebars, Helper, HelperDef, HelperResult, Output,
+ RenderContext, RenderError, Renderable,
+};
+use std::collections::HashMap;
+use std::path::Path;
+
+type FormatterRef<'a> = &'a (dyn Formatter + Send + Sync);
+
+/// Processes the handlebars template at the given file.
+pub fn expand(file: &Path, formatter: FormatterRef) -> Result<String, Error> {
+ let mut handlebars = Handlebars::new();
+ handlebars.set_strict_mode(true);
+ handlebars.register_helper("lower", Box::new(lower));
+ handlebars.register_helper("options", Box::new(OptionsHelper { formatter }));
+ handlebars.register_helper("option", Box::new(OptionHelper { formatter }));
+ handlebars.register_helper("man", Box::new(ManLinkHelper { formatter }));
+ handlebars.register_decorator("set", Box::new(set_decorator));
+ handlebars.register_template_file("template", file)?;
+ let includes = file.parent().unwrap().join("includes");
+ handlebars.register_templates_directory(".md", includes)?;
+ let man_name = file
+ .file_stem()
+ .expect("expected filename")
+ .to_str()
+ .expect("utf8 filename")
+ .to_string();
+ let data = HashMap::from([("man_name", man_name)]);
+ let expanded = handlebars.render("template", &data)?;
+ Ok(expanded)
+}
+
+/// Helper for `{{#options}}` block.
+struct OptionsHelper<'a> {
+ formatter: FormatterRef<'a>,
+}
+
+impl HelperDef for OptionsHelper<'_> {
+ fn call<'reg: 'rc, 'rc>(
+ &self,
+ h: &Helper<'reg, 'rc>,
+ r: &'reg Handlebars<'reg>,
+ ctx: &'rc Context,
+ rc: &mut RenderContext<'reg, 'rc>,
+ out: &mut dyn Output,
+ ) -> HelperResult {
+ if in_options(rc) {
+ return Err(RenderError::new("options blocks cannot be nested"));
+ }
+ // Prevent nested {{#options}}.
+ set_in_context(rc, "__MDMAN_IN_OPTIONS", serde_json::Value::Bool(true));
+ let s = self.formatter.render_options_start();
+ out.write(&s)?;
+ let t = match h.template() {
+ Some(t) => t,
+ None => return Err(RenderError::new("options block must not be empty")),
+ };
+ let block = t.renders(r, ctx, rc)?;
+ out.write(&block)?;
+
+ let s = self.formatter.render_options_end();
+ out.write(&s)?;
+ remove_from_context(rc, "__MDMAN_IN_OPTIONS");
+ Ok(())
+ }
+}
+
+/// Whether or not the context is currently inside a `{{#options}}` block.
+fn in_options(rc: &RenderContext<'_, '_>) -> bool {
+ rc.context()
+ .map_or(false, |ctx| ctx.data().get("__MDMAN_IN_OPTIONS").is_some())
+}
+
+/// Helper for `{{#option}}` block.
+struct OptionHelper<'a> {
+ formatter: FormatterRef<'a>,
+}
+
+impl HelperDef for OptionHelper<'_> {
+ fn call<'reg: 'rc, 'rc>(
+ &self,
+ h: &Helper<'reg, 'rc>,
+ r: &'reg Handlebars<'reg>,
+ ctx: &'rc Context,
+ rc: &mut RenderContext<'reg, 'rc>,
+ out: &mut dyn Output,
+ ) -> HelperResult {
+ if !in_options(rc) {
+ return Err(RenderError::new("option must be in options block"));
+ }
+ let params = h.params();
+ if params.is_empty() {
+ return Err(RenderError::new(
+ "option block must have at least one param",
+ ));
+ }
+ // Convert params to strings.
+ let params = params
+ .iter()
+ .map(|param| {
+ param
+ .value()
+ .as_str()
+ .ok_or_else(|| RenderError::new("option params must be strings"))
+ })
+ .collect::<Result<Vec<&str>, RenderError>>()?;
+ let t = match h.template() {
+ Some(t) => t,
+ None => return Err(RenderError::new("option block must not be empty")),
+ };
+ // Render the block.
+ let block = t.renders(r, ctx, rc)?;
+
+ // Get the name of this page.
+ let man_name = ctx
+ .data()
+ .get("man_name")
+ .expect("expected man_name in context")
+ .as_str()
+ .expect("expect man_name str");
+
+ // Ask the formatter to convert this option to its format.
+ let option = self
+ .formatter
+ .render_option(&params, &block, man_name)
+ .map_err(|e| RenderError::new(format!("option render failed: {}", e)))?;
+ out.write(&option)?;
+ Ok(())
+ }
+}
+
+/// Helper for `{{man name section}}` expression.
+struct ManLinkHelper<'a> {
+ formatter: FormatterRef<'a>,
+}
+
+impl HelperDef for ManLinkHelper<'_> {
+ fn call<'reg: 'rc, 'rc>(
+ &self,
+ h: &Helper<'reg, 'rc>,
+ _r: &'reg Handlebars<'reg>,
+ _ctx: &'rc Context,
+ _rc: &mut RenderContext<'reg, 'rc>,
+ out: &mut dyn Output,
+ ) -> HelperResult {
+ let params = h.params();
+ if params.len() != 2 {
+ return Err(RenderError::new("{{man}} must have two arguments"));
+ }
+ let name = params[0]
+ .value()
+ .as_str()
+ .ok_or_else(|| RenderError::new("man link name must be a string"))?;
+ let section = params[1]
+ .value()
+ .as_u64()
+ .ok_or_else(|| RenderError::new("man link section must be an integer"))?;
+ let section =
+ u8::try_from(section).map_err(|_e| RenderError::new("section number too large"))?;
+ let link = self
+ .formatter
+ .linkify_man_to_md(name, section)
+ .map_err(|e| RenderError::new(format!("failed to linkify man: {}", e)))?;
+ out.write(&link)?;
+ Ok(())
+ }
+}
+
+/// `{{*set var=value}}` decorator.
+///
+/// This sets a variable to a value within the template context.
+fn set_decorator(
+ d: &Decorator,
+ _: &Handlebars,
+ _ctx: &Context,
+ rc: &mut RenderContext,
+) -> Result<(), RenderError> {
+ let data_to_set = d.hash();
+ for (k, v) in data_to_set {
+ set_in_context(rc, k, v.value().clone());
+ }
+ Ok(())
+}
+
+/// Sets a variable to a value within the context.
+fn set_in_context(rc: &mut RenderContext, key: &str, value: serde_json::Value) {
+ let mut ctx = match rc.context() {
+ Some(c) => (*c).clone(),
+ None => Context::wraps(serde_json::Value::Object(serde_json::Map::new())).unwrap(),
+ };
+ if let serde_json::Value::Object(m) = ctx.data_mut() {
+ m.insert(key.to_string(), value);
+ rc.set_context(ctx);
+ } else {
+ panic!("expected object in context");
+ }
+}
+
+/// Removes a variable from the context.
+fn remove_from_context(rc: &mut RenderContext, key: &str) {
+ let ctx = rc.context().expect("cannot remove from null context");
+ let mut ctx = (*ctx).clone();
+ if let serde_json::Value::Object(m) = ctx.data_mut() {
+ m.remove(key);
+ rc.set_context(ctx);
+ } else {
+ panic!("expected object in context");
+ }
+}
+
+handlebars_helper!(lower: |s: str| s.to_lowercase());
diff --git a/crates/mdman/src/lib.rs b/crates/mdman/src/lib.rs
new file mode 100644
index 0000000..01c3c8d
--- /dev/null
+++ b/crates/mdman/src/lib.rs
@@ -0,0 +1,122 @@
+//! mdman markdown to man converter.
+
+use anyhow::{bail, Context, Error};
+use pulldown_cmark::{CowStr, Event, LinkType, Options, Parser, Tag};
+use std::collections::HashMap;
+use std::fs;
+use std::io::{self, BufRead};
+use std::ops::Range;
+use std::path::Path;
+use url::Url;
+
+mod format;
+mod hbs;
+mod util;
+
+use format::Formatter;
+
+/// Mapping of `(name, section)` of a man page to a URL.
+pub type ManMap = HashMap<(String, u8), String>;
+
+/// A man section.
+pub type Section = u8;
+
+/// The output formats supported by mdman.
+#[derive(Copy, Clone)]
+pub enum Format {
+ Man,
+ Md,
+ Text,
+}
+
+impl Format {
+ /// The filename extension for the format.
+ pub fn extension(&self, section: Section) -> String {
+ match self {
+ Format::Man => section.to_string(),
+ Format::Md => "md".to_string(),
+ Format::Text => "txt".to_string(),
+ }
+ }
+}
+
+/// Converts the handlebars markdown file at the given path into the given
+/// format, returning the translated result.
+pub fn convert(
+ file: &Path,
+ format: Format,
+ url: Option<Url>,
+ man_map: ManMap,
+) -> Result<String, Error> {
+ let formatter: Box<dyn Formatter + Send + Sync> = match format {
+ Format::Man => Box::new(format::man::ManFormatter::new(url)),
+ Format::Md => Box::new(format::md::MdFormatter::new(man_map)),
+ Format::Text => Box::new(format::text::TextFormatter::new(url)),
+ };
+ let expanded = hbs::expand(file, &*formatter)?;
+ // pulldown-cmark can behave a little differently with Windows newlines,
+ // just normalize it.
+ let expanded = expanded.replace("\r\n", "\n");
+ formatter.render(&expanded)
+}
+
+/// Pulldown-cmark iterator yielding an `(event, range)` tuple.
+type EventIter<'a> = Box<dyn Iterator<Item = (Event<'a>, Range<usize>)> + 'a>;
+
+/// Creates a new markdown parser with the given input.
+pub(crate) fn md_parser(input: &str, url: Option<Url>) -> EventIter {
+ let mut options = Options::empty();
+ options.insert(Options::ENABLE_TABLES);
+ options.insert(Options::ENABLE_FOOTNOTES);
+ options.insert(Options::ENABLE_STRIKETHROUGH);
+ options.insert(Options::ENABLE_SMART_PUNCTUATION);
+ let parser = Parser::new_ext(input, options);
+ let parser = parser.into_offset_iter();
+ // Translate all links to include the base url.
+ let parser = parser.map(move |(event, range)| match event {
+ Event::Start(Tag::Link(lt, dest_url, title)) if !matches!(lt, LinkType::Email) => (
+ Event::Start(Tag::Link(lt, join_url(url.as_ref(), dest_url), title)),
+ range,
+ ),
+ Event::End(Tag::Link(lt, dest_url, title)) if !matches!(lt, LinkType::Email) => (
+ Event::End(Tag::Link(lt, join_url(url.as_ref(), dest_url), title)),
+ range,
+ ),
+ _ => (event, range),
+ });
+ Box::new(parser)
+}
+
+fn join_url<'a>(base: Option<&Url>, dest: CowStr<'a>) -> CowStr<'a> {
+ match base {
+ Some(base_url) => {
+ // Absolute URL or page-relative anchor doesn't need to be translated.
+ if dest.contains(':') || dest.starts_with('#') {
+ dest
+ } else {
+ let joined = base_url.join(&dest).unwrap_or_else(|e| {
+ panic!("failed to join URL `{}` to `{}`: {}", dest, base_url, e)
+ });
+ String::from(joined).into()
+ }
+ }
+ None => dest,
+ }
+}
+
+pub fn extract_section(file: &Path) -> Result<Section, Error> {
+ let f = fs::File::open(file).with_context(|| format!("could not open `{}`", file.display()))?;
+ let mut f = io::BufReader::new(f);
+ let mut line = String::new();
+ f.read_line(&mut line)?;
+ if !line.starts_with("# ") {
+ bail!("expected input file to start with # header");
+ }
+ let (_name, section) = util::parse_name_and_section(&line[2..].trim()).with_context(|| {
+ format!(
+ "expected input file to have header with the format `# command-name(1)`, found: `{}`",
+ line
+ )
+ })?;
+ Ok(section)
+}
diff --git a/crates/mdman/src/main.rs b/crates/mdman/src/main.rs
new file mode 100644
index 0000000..2bdf96d
--- /dev/null
+++ b/crates/mdman/src/main.rs
@@ -0,0 +1,133 @@
+use anyhow::{bail, format_err, Context, Error};
+use mdman::{Format, ManMap};
+use std::collections::HashMap;
+use std::path::{Path, PathBuf};
+use url::Url;
+
+/// Command-line options.
+struct Options {
+ format: Format,
+ output_dir: PathBuf,
+ sources: Vec<PathBuf>,
+ url: Option<Url>,
+ man_map: ManMap,
+}
+
+fn main() {
+ if let Err(e) = run() {
+ eprintln!("error: {}", e);
+ for cause in e.chain().skip(1) {
+ eprintln!("\nCaused by:");
+ for line in cause.to_string().lines() {
+ if line.is_empty() {
+ eprintln!();
+ } else {
+ eprintln!(" {}", line);
+ }
+ }
+ }
+ std::process::exit(1);
+ }
+}
+
+fn run() -> Result<(), Error> {
+ let opts = process_args()?;
+ if !opts.output_dir.exists() {
+ std::fs::create_dir_all(&opts.output_dir).with_context(|| {
+ format!(
+ "failed to create output directory {}",
+ opts.output_dir.display()
+ )
+ })?;
+ }
+ for source in &opts.sources {
+ let section = mdman::extract_section(source)?;
+ let filename =
+ Path::new(source.file_name().unwrap()).with_extension(opts.format.extension(section));
+ let out_path = opts.output_dir.join(filename);
+ if same_file::is_same_file(source, &out_path).unwrap_or(false) {
+ bail!("cannot output to the same file as the source");
+ }
+ println!("Converting {} -> {}", source.display(), out_path.display());
+ let result = mdman::convert(&source, opts.format, opts.url.clone(), opts.man_map.clone())
+ .with_context(|| format!("failed to translate {}", source.display()))?;
+
+ std::fs::write(out_path, result)?;
+ }
+ Ok(())
+}
+
+fn process_args() -> Result<Options, Error> {
+ let mut format = None;
+ let mut output = None;
+ let mut url = None;
+ let mut man_map: ManMap = HashMap::new();
+ let mut sources = Vec::new();
+ let mut args = std::env::args().skip(1);
+ while let Some(arg) = args.next() {
+ match arg.as_str() {
+ "-t" => {
+ format = match args.next().as_deref() {
+ Some("man") => Some(Format::Man),
+ Some("md") => Some(Format::Md),
+ Some("txt") => Some(Format::Text),
+ Some(s) => bail!("unknown output format: {}", s),
+ None => bail!("-t requires a value (man, md, txt)"),
+ };
+ }
+ "-o" => {
+ output = match args.next() {
+ Some(s) => Some(PathBuf::from(s)),
+ None => bail!("-o requires a value"),
+ };
+ }
+ "--url" => {
+ url = match args.next() {
+ Some(s) => {
+ let url = Url::parse(&s)
+ .with_context(|| format!("could not convert `{}` to a url", s))?;
+ if !url.path().ends_with('/') {
+ bail!("url `{}` should end with a /", url);
+ }
+ Some(url)
+ }
+ None => bail!("--url requires a value"),
+ }
+ }
+ "--man" => {
+ let man = args
+ .next()
+ .ok_or_else(|| format_err!("--man requires a value"))?;
+ let parts: Vec<_> = man.splitn(2, '=').collect();
+ let key_parts: Vec<_> = parts[0].splitn(2, ':').collect();
+ if parts.len() != 2 || key_parts.len() != 2 {
+ bail!("--man expected value with form name:1=link");
+ }
+ let section: u8 = key_parts[1].parse().with_context(|| {
+ format!("expected unsigned integer for section, got `{}`", parts[1])
+ })?;
+ man_map.insert((key_parts[0].to_string(), section), parts[1].to_string());
+ }
+ s => {
+ sources.push(PathBuf::from(s));
+ }
+ }
+ }
+ if format.is_none() {
+ bail!("-t must be specified (man, md, txt)");
+ }
+ if output.is_none() {
+ bail!("-o must be specified (output directory)");
+ }
+ if sources.is_empty() {
+ bail!("at least one source must be specified");
+ }
+ let opts = Options {
+ format: format.unwrap(),
+ output_dir: output.unwrap(),
+ sources,
+ url,
+ man_map,
+ };
+ Ok(opts)
+}
diff --git a/crates/mdman/src/util.rs b/crates/mdman/src/util.rs
new file mode 100644
index 0000000..a4c71ad
--- /dev/null
+++ b/crates/mdman/src/util.rs
@@ -0,0 +1,44 @@
+///! General utilities.
+use crate::EventIter;
+use anyhow::{bail, format_err, Context, Error};
+use pulldown_cmark::{CowStr, Event, Tag};
+
+/// Splits the text `foo(1)` into "foo" and `1`.
+pub fn parse_name_and_section(text: &str) -> Result<(&str, u8), Error> {
+ let mut i = text.split_terminator(&['(', ')'][..]);
+ let name = i
+ .next()
+ .ok_or_else(|| format_err!("man reference must have a name"))?;
+ let section = i
+ .next()
+ .ok_or_else(|| format_err!("man reference must have a section such as mycommand(1)"))?;
+ if let Some(s) = i.next() {
+ bail!(
+ "man reference must have the form mycommand(1), got extra part `{}`",
+ s
+ );
+ }
+ let section: u8 = section
+ .parse()
+ .with_context(|| format!("section must be a number, got {}", section))?;
+ Ok((name, section))
+}
+
+/// Extracts the text from a header after Tag::Heading has been received.
+pub fn header_text<'e>(parser: &mut EventIter<'e>) -> Result<CowStr<'e>, Error> {
+ let text = match parser.next() {
+ Some((Event::Text(t), _range)) => t,
+ e => bail!("expected plain text in man header, got {:?}", e),
+ };
+ match parser.next() {
+ Some((Event::End(Tag::Heading(..)), _range)) => {
+ return Ok(text);
+ }
+ e => bail!("expected plain text in man header, got {:?}", e),
+ }
+}
+
+/// Removes tags from the front and back of a string.
+pub fn unwrap<'t>(text: &'t str, front: &str, back: &str) -> &'t str {
+ text.trim().trim_start_matches(front).trim_end_matches(back)
+}
diff --git a/crates/mdman/tests/compare.rs b/crates/mdman/tests/compare.rs
new file mode 100644
index 0000000..3e679d1
--- /dev/null
+++ b/crates/mdman/tests/compare.rs
@@ -0,0 +1,48 @@
+//! Compares input to expected output.
+//!
+//! Use the MDMAN_BLESS environment variable to automatically update the
+//! expected output.
+
+use mdman::{Format, ManMap};
+use pretty_assertions::assert_eq;
+use std::path::PathBuf;
+use url::Url;
+
+fn run(name: &str) {
+ let input = PathBuf::from(format!("tests/compare/{}.md", name));
+ let url = Some(Url::parse("https://example.org/").unwrap());
+ let mut map = ManMap::new();
+ map.insert(
+ ("other-cmd".to_string(), 1),
+ "https://example.org/commands/other-cmd.html".to_string(),
+ );
+
+ for &format in &[Format::Man, Format::Md, Format::Text] {
+ let section = mdman::extract_section(&input).unwrap();
+ let result = mdman::convert(&input, format, url.clone(), map.clone()).unwrap();
+ let expected_path = format!(
+ "tests/compare/expected/{}.{}",
+ name,
+ format.extension(section)
+ );
+ if std::env::var("MDMAN_BLESS").is_ok() {
+ std::fs::write(&expected_path, result).unwrap();
+ } else {
+ let expected = std::fs::read_to_string(&expected_path).unwrap();
+ // Fix if Windows checked out with autocrlf.
+ let expected = expected.replace("\r\n", "\n");
+ assert_eq!(expected, result);
+ }
+ }
+}
+
+macro_rules! test( ($name:ident) => (
+ #[test]
+ fn $name() { run(stringify!($name)); }
+) );
+
+test!(formatting);
+test!(links);
+test!(options);
+test!(tables);
+test!(vars);
diff --git a/crates/mdman/tests/compare/expected/formatting.1 b/crates/mdman/tests/compare/expected/formatting.1
new file mode 100644
index 0000000..840734c
--- /dev/null
+++ b/crates/mdman/tests/compare/expected/formatting.1
@@ -0,0 +1,118 @@
+'\" t
+.TH "FORMATTING" "1"
+.nh
+.ad l
+.ss \n[.ss] 0
+.sp
+This is \fBnested \f(BIformatting\fB \fBtext\fB\fR\&.
+.SH "SECOND HEADING"
+Some text at second level.
+.SS "Third heading"
+Some text at third level.
+.SS "Fourth heading"
+Some text at fourth level.
+.SH "Quotes and blocks."
+Here are some quotes and blocks.
+.RS 3
+.ll -5
+.sp
+This is a block quote. Ambidextrously koala apart that prudent blindly alas
+far amid dear goodness turgid so exact inside oh and alas much fanciful that
+dark on spoon\-fed adequately insolent walking crud.
+.br
+.RE
+.ll
+.sp
+.RS 4
+.nf
+This is a code block. Groundhog watchfully sudden firefly some self\-consciously hotly jeepers satanic after that this parrot this at virtuous
+some mocking the leaned jeez nightingale as much mallard so because jeez
+turned dear crud grizzly strenuously.
+
+ Indented and should be unmodified.
+.fi
+.RE
+.sp
+.RS 4
+.nf
+This is an indented code block. Egregiously yikes animatedly since outside beseechingly a badger hey shakily giraffe a one wow one this
+goodness regarding reindeer so astride before.
+
+ Doubly indented
+.fi
+.RE
+.SH "Lists"
+.sp
+.RS 4
+\h'-04' 1.\h'+01'Ordered list
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'Unordered list
+.sp
+With a second paragraph inside it
+.sp
+.RS 4
+\h'-04' 1.\h'+01'Inner ordered list
+.RE
+.sp
+.RS 4
+\h'-04' 2.\h'+01'Another
+.RE
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'Eggs
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'Milk
+.sp
+.RS 4
+\h'-04' 5.\h'+01'Don\[cq]t start at one.
+.RE
+.sp
+.RS 4
+\h'-04' 6.\h'+01'tamarind
+.RE
+.RE
+.RE
+.sp
+.RS 4
+\h'-04' 2.\h'+01'Second element
+.RE
+.sp
+.RS 4
+\h'-04' 3.\h'+01'Third element
+.RE
+.SH "Breaks"
+This has a
+.br
+hard break in it
+and a soft one.
+.SH "Horizontal rule"
+This should contain a line:
+\l'\n(.lu'
+.sp
+Nice!
+.SH "Strange characters"
+Handles escaping for characters
+.sp
+\&.dot at the start of a line.
+.sp
+\(rsfBnot really troff
+.sp
+Various characters \(rs \- \[en] \[em] \- | | `
+.sp
+.RS 4
+.nf
+tree
+`\-\- example
+ |\-\- salamander
+ | |\-\- honey
+ | `\-\- some
+ |\-\- fancifully
+ `\-\- trout
+.fi
+.RE
+.sp
+\ \ \ \ non\-breaking space.
diff --git a/crates/mdman/tests/compare/expected/formatting.md b/crates/mdman/tests/compare/expected/formatting.md
new file mode 100644
index 0000000..3b9f5b8
--- /dev/null
+++ b/crates/mdman/tests/compare/expected/formatting.md
@@ -0,0 +1,95 @@
+# formatting(1)
+
+This is **nested _formatting_ `text`**.
+
+## SECOND HEADING
+
+Some text at second level.
+
+### Third heading
+
+Some text at third level.
+
+#### Fourth heading
+
+Some text at fourth level.
+
+## Quotes and blocks.
+
+Here are some quotes and blocks.
+
+> This is a block quote. Ambidextrously koala apart that prudent blindly alas
+> far amid dear goodness turgid so exact inside oh and alas much fanciful that
+> dark on spoon-fed adequately insolent walking crud.
+
+```
+This is a code block. Groundhog watchfully sudden firefly some self-consciously hotly jeepers satanic after that this parrot this at virtuous
+some mocking the leaned jeez nightingale as much mallard so because jeez
+turned dear crud grizzly strenuously.
+
+ Indented and should be unmodified.
+```
+
+ This is an indented code block. Egregiously yikes animatedly since outside beseechingly a badger hey shakily giraffe a one wow one this
+ goodness regarding reindeer so astride before.
+
+ Doubly indented
+
+## Lists
+
+1. Ordered list
+
+ * Unordered list
+
+ With a second paragraph inside it
+
+ 1. Inner ordered list
+
+ 1. Another
+
+ * Eggs
+
+ * Milk
+
+ 5. Don't start at one.
+ 6. tamarind
+
+1. Second element
+
+1. Third element
+
+## Breaks
+
+This has a\
+hard break in it
+and a soft one.
+
+## Horizontal rule
+
+This should contain a line:
+
+---
+
+Nice!
+
+## Strange characters
+
+Handles escaping for characters
+
+.dot at the start of a line.
+
+\fBnot really troff
+
+Various characters \ - – — ─ │ ├ └
+
+```
+tree
+└── example
+ ├── salamander
+ │ ├── honey
+ │ └── some
+ ├── fancifully
+ └── trout
+```
+
+&nbsp;&nbsp;&nbsp;&nbsp;non-breaking space.
diff --git a/crates/mdman/tests/compare/expected/formatting.txt b/crates/mdman/tests/compare/expected/formatting.txt
new file mode 100644
index 0000000..b5258c4
--- /dev/null
+++ b/crates/mdman/tests/compare/expected/formatting.txt
@@ -0,0 +1,84 @@
+FORMATTING(1)
+
+This is nested formatting text.
+
+SECOND HEADING
+ Some text at second level.
+
+ Third heading
+ Some text at third level.
+
+ Fourth heading
+ Some text at fourth level.
+
+QUOTES AND BLOCKS.
+ Here are some quotes and blocks.
+
+ This is a block quote. Ambidextrously koala apart that prudent
+ blindly alas far amid dear goodness turgid so exact inside oh and
+ alas much fanciful that dark on spoon-fed adequately insolent walking
+ crud.
+
+ This is a code block. Groundhog watchfully sudden firefly some self-consciously hotly jeepers satanic after that this parrot this at virtuous
+ some mocking the leaned jeez nightingale as much mallard so because jeez
+ turned dear crud grizzly strenuously.
+
+ Indented and should be unmodified.
+
+ This is an indented code block. Egregiously yikes animatedly since outside beseechingly a badger hey shakily giraffe a one wow one this
+ goodness regarding reindeer so astride before.
+
+ Doubly indented
+
+LISTS
+ 1. Ordered list
+
+ o Unordered list
+
+ With a second paragraph inside it
+
+ 1. Inner ordered list
+
+ 2. Another
+
+ o Eggs
+
+ o Milk
+
+ 5. Don’t start at one.
+
+ 6. tamarind
+
+ 2. Second element
+
+ 3. Third element
+
+BREAKS
+ This has a
+ hard break in it and a soft one.
+
+HORIZONTAL RULE
+ This should contain a line:
+
+ _________________________________________________________________
+ Nice!
+
+STRANGE CHARACTERS
+ Handles escaping for characters
+
+ .dot at the start of a line.
+
+ \fBnot really troff
+
+ Various characters \ - – — ─ │ ├ └
+
+ tree
+ └── example
+ ├── salamander
+ │ ├── honey
+ │ └── some
+ ├── fancifully
+ └── trout
+
+     non-breaking space.
+
diff --git a/crates/mdman/tests/compare/expected/links.1 b/crates/mdman/tests/compare/expected/links.1
new file mode 100644
index 0000000..e56cef7
--- /dev/null
+++ b/crates/mdman/tests/compare/expected/links.1
@@ -0,0 +1,45 @@
+'\" t
+.TH "LINKS" "1"
+.nh
+.ad l
+.ss \n[.ss] 0
+.SH "NAME"
+links \- Test of different link kinds
+.SH "DESCRIPTION"
+Inline link: \fIinline link\fR <https://example.com/inline>
+.sp
+Reference link: \fIthis is a link\fR <https://example.com/bar>
+.sp
+Collapsed: \fIcollapsed\fR <https://example.com/collapsed>
+.sp
+Shortcut: \fIshortcut\fR <https://example.com/shortcut>
+.sp
+Autolink: <https://example.com/auto>
+.sp
+Email: <foo@example.com>
+.sp
+Relative link: \fIrelative link\fR <https://example.org/foo/bar.html>
+.sp
+Collapsed unknown: [collapsed unknown][]
+.sp
+Reference unknown: [foo][unknown]
+.sp
+Shortcut unknown: [shortcut unknown]
+.sp
+\fBother\-cmd\fR(1)
+.sp
+\fBlocal\-cmd\fR(1)
+.sp
+\fISome link\fR <https://example.org/foo.html>
+.sp
+\fB\-\-include\fR
+.RS 4
+Testing an \fIincluded link\fR <https://example.org/included_link.html>\&.
+.RE
+.SH "OPTIONS"
+.sp
+\fB\-\-foo\-bar\fR
+.RS 4
+Example \fIlink\fR <https://example.org/bar.html>\&.
+See \fBother\-cmd\fR(1), \fBlocal\-cmd\fR(1)
+.RE
diff --git a/crates/mdman/tests/compare/expected/links.md b/crates/mdman/tests/compare/expected/links.md
new file mode 100644
index 0000000..11afcf3
--- /dev/null
+++ b/crates/mdman/tests/compare/expected/links.md
@@ -0,0 +1,56 @@
+# links(1)
+
+## NAME
+
+links - Test of different link kinds
+
+## DESCRIPTION
+
+Inline link: [inline link](https://example.com/inline)
+
+Reference link: [this is a link][bar]
+
+Collapsed: [collapsed][]
+
+Shortcut: [shortcut]
+
+Autolink: <https://example.com/auto>
+
+Email: <foo@example.com>
+
+Relative link: [relative link](foo/bar.html)
+
+Collapsed unknown: [collapsed unknown][]
+
+Reference unknown: [foo][unknown]
+
+Shortcut unknown: [shortcut unknown]
+
+[other-cmd(1)](https://example.org/commands/other-cmd.html)
+
+[local-cmd(1)](local-cmd.html)
+
+[Some link](foo.html)
+
+<dl>
+<dt class="option-term" id="option-links---include"><a class="option-anchor" href="#option-links---include"></a><code>--include</code></dt>
+<dd class="option-desc">Testing an <a href="included_link.html">included link</a>.</dd>
+
+</dl>
+
+
+## OPTIONS
+
+<dl>
+
+<dt class="option-term" id="option-links---foo-bar"><a class="option-anchor" href="#option-links---foo-bar"></a><code>--foo-bar</code></dt>
+<dd class="option-desc">Example <a href="bar.html">link</a>.
+See <a href="https://example.org/commands/other-cmd.html">other-cmd(1)</a>, <a href="local-cmd.html">local-cmd(1)</a></dd>
+
+
+</dl>
+
+
+[bar]: https://example.com/bar
+[collapsed]: https://example.com/collapsed
+[shortcut]: https://example.com/shortcut
diff --git a/crates/mdman/tests/compare/expected/links.txt b/crates/mdman/tests/compare/expected/links.txt
new file mode 100644
index 0000000..7748c3d
--- /dev/null
+++ b/crates/mdman/tests/compare/expected/links.txt
@@ -0,0 +1,40 @@
+LINKS(1)
+
+NAME
+ links - Test of different link kinds
+
+DESCRIPTION
+ Inline link: inline link <https://example.com/inline>
+
+ Reference link: this is a link <https://example.com/bar>
+
+ Collapsed: collapsed <https://example.com/collapsed>
+
+ Shortcut: shortcut <https://example.com/shortcut>
+
+ Autolink: <https://example.com/auto>
+
+ Email: <foo@example.com>
+
+ Relative link: relative link <https://example.org/foo/bar.html>
+
+ Collapsed unknown: [collapsed unknown][]
+
+ Reference unknown: [foo][unknown]
+
+ Shortcut unknown: [shortcut unknown]
+
+ other-cmd(1)
+
+ local-cmd(1)
+
+ Some link <https://example.org/foo.html>
+
+ --include
+ Testing an included link <https://example.org/included_link.html>.
+
+OPTIONS
+ --foo-bar
+ Example link <https://example.org/bar.html>. See other-cmd(1),
+ local-cmd(1)
+
diff --git a/crates/mdman/tests/compare/expected/options.1 b/crates/mdman/tests/compare/expected/options.1
new file mode 100644
index 0000000..d362421
--- /dev/null
+++ b/crates/mdman/tests/compare/expected/options.1
@@ -0,0 +1,94 @@
+'\" t
+.TH "MY\-COMMAND" "1"
+.nh
+.ad l
+.ss \n[.ss] 0
+.SH "NAME"
+my\-command \- A brief description
+.SH "SYNOPSIS"
+\fBmy\-command\fR [\fB\-\-abc\fR | \fB\-\-xyz\fR] \fIname\fR
+.br
+\fBmy\-command\fR [\fB\-f\fR \fIfile\fR]
+.br
+\fBmy\-command\fR (\fB\-m\fR | \fB\-M\fR) [\fIoldbranch\fR] \fInewbranch\fR
+.br
+\fBmy\-command\fR (\fB\-d\fR | \fB\-D\fR) [\fB\-r\fR] \fIbranchname\fR\[u2026]
+.SH "DESCRIPTION"
+A description of the command.
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'One
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'Sub one
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'Sub two
+.RE
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'Two
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'Three
+.RE
+.SH "OPTIONS"
+.SS "Command options"
+.sp
+\fB\-\-foo\-bar\fR
+.RS 4
+Demo \fIemphasis\fR, \fBstrong\fR, ~~strike~~
+.RE
+.sp
+\fB\-p\fR \fIspec\fR,
+\fB\-\-package\fR \fIspec\fR
+.RS 4
+This has multiple flags.
+.RE
+.sp
+\fInamed\-arg\[u2026]\fR
+.RS 4
+A named argument.
+.RE
+.SS "Common Options"
+.sp
+\fB@\fR\fIfilename\fR
+.RS 4
+Load from filename.
+.RE
+.sp
+\fB\-\-foo\fR [\fIbar\fR]
+.RS 4
+Flag with optional value.
+.RE
+.sp
+\fB\-\-foo\fR[\fB=\fR\fIbar\fR]
+.RS 4
+Alternate syntax for optional value (with required = for disambiguation).
+.RE
+.SH "EXAMPLES"
+.sp
+.RS 4
+\h'-04' 1.\h'+01'An example
+.sp
+.RS 4
+.nf
+my\-command \-\-abc
+.fi
+.RE
+.RE
+.sp
+.RS 4
+\h'-04' 2.\h'+01'Another example
+.sp
+.RS 4
+.nf
+my\-command \-\-xyz
+.fi
+.RE
+.RE
+.SH "SEE ALSO"
+\fBother\-command\fR(1) \fBabc\fR(7)
diff --git a/crates/mdman/tests/compare/expected/options.md b/crates/mdman/tests/compare/expected/options.md
new file mode 100644
index 0000000..19b0b44
--- /dev/null
+++ b/crates/mdman/tests/compare/expected/options.md
@@ -0,0 +1,77 @@
+# my-command(1)
+
+## NAME
+
+my-command - A brief description
+
+## SYNOPSIS
+
+`my-command` [`--abc` | `--xyz`] _name_\
+`my-command` [`-f` _file_]\
+`my-command` (`-m` | `-M`) [_oldbranch_] _newbranch_\
+`my-command` (`-d` | `-D`) [`-r`] _branchname_...
+
+## DESCRIPTION
+
+A description of the command.
+
+* One
+ * Sub one
+ * Sub two
+* Two
+* Three
+
+
+## OPTIONS
+
+### Command options
+
+<dl>
+
+<dt class="option-term" id="option-options---foo-bar"><a class="option-anchor" href="#option-options---foo-bar"></a><code>--foo-bar</code></dt>
+<dd class="option-desc">Demo <em>emphasis</em>, <strong>strong</strong>, <del>strike</del></dd>
+
+
+<dt class="option-term" id="option-options--p"><a class="option-anchor" href="#option-options--p"></a><code>-p</code> <em>spec</em></dt>
+<dt class="option-term" id="option-options---package"><a class="option-anchor" href="#option-options---package"></a><code>--package</code> <em>spec</em></dt>
+<dd class="option-desc">This has multiple flags.</dd>
+
+
+<dt class="option-term" id="option-options-named-arg…"><a class="option-anchor" href="#option-options-named-arg…"></a><em>named-arg…</em></dt>
+<dd class="option-desc">A named argument.</dd>
+
+
+</dl>
+
+### Common Options
+
+<dl>
+<dt class="option-term" id="option-options-@filename"><a class="option-anchor" href="#option-options-@filename"></a><code>@</code><em>filename</em></dt>
+<dd class="option-desc">Load from filename.</dd>
+
+
+<dt class="option-term" id="option-options---foo"><a class="option-anchor" href="#option-options---foo"></a><code>--foo</code> [<em>bar</em>]</dt>
+<dd class="option-desc">Flag with optional value.</dd>
+
+
+<dt class="option-term" id="option-options---foo[=bar]"><a class="option-anchor" href="#option-options---foo[=bar]"></a><code>--foo</code>[<code>=</code><em>bar</em>]</dt>
+<dd class="option-desc">Alternate syntax for optional value (with required = for disambiguation).</dd>
+
+
+</dl>
+
+
+## EXAMPLES
+
+1. An example
+
+ ```
+ my-command --abc
+ ```
+
+1. Another example
+
+ my-command --xyz
+
+## SEE ALSO
+[other-command(1)](other-command.html) [abc(7)](abc.html)
diff --git a/crates/mdman/tests/compare/expected/options.txt b/crates/mdman/tests/compare/expected/options.txt
new file mode 100644
index 0000000..9bfdec6
--- /dev/null
+++ b/crates/mdman/tests/compare/expected/options.txt
@@ -0,0 +1,57 @@
+MY-COMMAND(1)
+
+NAME
+ my-command - A brief description
+
+SYNOPSIS
+ my-command [--abc | --xyz] name
+ my-command [-f file]
+ my-command (-m | -M) [oldbranch] newbranch
+ my-command (-d | -D) [-r] branchname…
+
+DESCRIPTION
+ A description of the command.
+
+ o One
+ o Sub one
+
+ o Sub two
+
+ o Two
+
+ o Three
+
+OPTIONS
+ Command options
+ --foo-bar
+ Demo emphasis, strong, ~~strike~~
+
+ -p spec, --package spec
+ This has multiple flags.
+
+ named-arg…
+ A named argument.
+
+ Common Options
+ @filename
+ Load from filename.
+
+ --foo [bar]
+ Flag with optional value.
+
+ --foo[=bar]
+ Alternate syntax for optional value (with required = for
+ disambiguation).
+
+EXAMPLES
+ 1. An example
+
+ my-command --abc
+
+ 2. Another example
+
+ my-command --xyz
+
+SEE ALSO
+ other-command(1) abc(7)
+
diff --git a/crates/mdman/tests/compare/expected/tables.1 b/crates/mdman/tests/compare/expected/tables.1
new file mode 100644
index 0000000..7175a3e
--- /dev/null
+++ b/crates/mdman/tests/compare/expected/tables.1
@@ -0,0 +1,108 @@
+'\" t
+.TH "TABLES" "1"
+.nh
+.ad l
+.ss \n[.ss] 0
+.SH "DESCRIPTION"
+Testing tables.
+
+.TS
+allbox tab(:);
+lt.
+T{
+Single col
+T}
+T{
+Hi! :)
+T}
+.TE
+.sp
+
+.TS
+allbox tab(:);
+lt lt lt.
+T{
+Header content
+T}:T{
+With \fBformat\fR \fItext\fR
+T}:T{
+Another column
+T}
+T{
+Some data
+T}:T{
+More data
+T}:T{
+
+T}
+T{
+Extra long amount of text within a column
+T}:T{
+hi
+T}:T{
+there
+T}
+.TE
+.sp
+
+.TS
+allbox tab(:);
+lt ct rt.
+T{
+Left aligned
+T}:T{
+Center aligned
+T}:T{
+Right aligned
+T}
+T{
+abc
+T}:T{
+def
+T}:T{
+ghi
+T}
+.TE
+.sp
+
+.TS
+allbox tab(:);
+lt ct rt.
+T{
+Left aligned
+T}:T{
+Center aligned
+T}:T{
+Right aligned
+T}
+T{
+X
+T}:T{
+X
+T}:T{
+X
+T}
+T{
+Extra long text 123456789012 with mixed widths.
+T}:T{
+Extra long text 123456789012 with mixed widths.
+T}:T{
+Extra long text 123456789012 with mixed widths.
+T}
+.TE
+.sp
+
+.TS
+allbox tab(:);
+lt.
+T{
+Link check
+T}
+T{
+\fIfoo\fR <https://example.com/>
+T}
+T{
+<https://example.com/>
+T}
+.TE
+.sp
diff --git a/crates/mdman/tests/compare/expected/tables.md b/crates/mdman/tests/compare/expected/tables.md
new file mode 100644
index 0000000..831132c
--- /dev/null
+++ b/crates/mdman/tests/compare/expected/tables.md
@@ -0,0 +1,35 @@
+# tables(1)
+
+## DESCRIPTION
+
+Testing tables.
+
+| Single col |
+--------------
+| Hi! :) |
+
+
+Header content | With `format` *text* | Another column
+---------------|----------------------|----------------
+Some data | More data |
+Extra long amount of text within a column | hi | there
+
+
+Left aligned | Center aligned | Right aligned
+-------------|:--------------:|--------------:
+abc | def | ghi
+
+
+Left aligned | Center aligned | Right aligned
+-------------|:--------------:|--------------:
+X | X | X
+Extra long text 123456789012 with mixed widths. | Extra long text 123456789012 with mixed widths. | Extra long text 123456789012 with mixed widths.
+
+
+| Link check |
+--------------
+| [foo] |
+| <https://example.com/> |
+
+
+[foo]: https://example.com/
diff --git a/crates/mdman/tests/compare/expected/tables.txt b/crates/mdman/tests/compare/expected/tables.txt
new file mode 100644
index 0000000..fed53f9
--- /dev/null
+++ b/crates/mdman/tests/compare/expected/tables.txt
@@ -0,0 +1,45 @@
+TABLES(1)
+
+DESCRIPTION
+ Testing tables.
+
+ +-------------+
+ | Single col |
+ +-------------+
+ | Hi! :) |
+ +-------------+
+
+ +-------------------------------------+----------------+--------------+
+ | Header content | With format | Another |
+ | | text | column |
+ +-------------------------------------+----------------+--------------+
+ | Some data | More data | |
+ +-------------------------------------+----------------+--------------+
+ | Extra long amount of text within a | hi | there |
+ | column | | |
+ +-------------------------------------+----------------+--------------+
+
+ +---------------+-----------------+----------------+
+ | Left aligned | Center aligned | Right aligned |
+ +---------------+-----------------+----------------+
+ | abc | def | ghi |
+ +---------------+-----------------+----------------+
+
+ +-----------------------+-----------------------+-----------------------+
+ | Left aligned | Center aligned | Right aligned |
+ +-----------------------+-----------------------+-----------------------+
+ | X | X | X |
+ +-----------------------+-----------------------+-----------------------+
+ | Extra long text | Extra long text | Extra long text |
+ | 123456789012 with | 123456789012 with | 123456789012 with |
+ | mixed widths. | mixed widths. | mixed widths. |
+ +-----------------------+-----------------------+-----------------------+
+
+ +-----------------------+
+ | Link check |
+ +-----------------------+
+ | foo |
+ +-----------------------+
+ | https://example.com/ |
+ +-----------------------+
+
diff --git a/crates/mdman/tests/compare/expected/vars.7 b/crates/mdman/tests/compare/expected/vars.7
new file mode 100644
index 0000000..0ee33ad
--- /dev/null
+++ b/crates/mdman/tests/compare/expected/vars.7
@@ -0,0 +1,9 @@
+'\" t
+.TH "VARS" "7"
+.nh
+.ad l
+.ss \n[.ss] 0
+.sp
+Bar
+.sp
+bar
diff --git a/crates/mdman/tests/compare/expected/vars.md b/crates/mdman/tests/compare/expected/vars.md
new file mode 100644
index 0000000..2493aca
--- /dev/null
+++ b/crates/mdman/tests/compare/expected/vars.md
@@ -0,0 +1,7 @@
+# vars(7)
+
+
+
+Bar
+
+bar
diff --git a/crates/mdman/tests/compare/expected/vars.txt b/crates/mdman/tests/compare/expected/vars.txt
new file mode 100644
index 0000000..11d34ca
--- /dev/null
+++ b/crates/mdman/tests/compare/expected/vars.txt
@@ -0,0 +1,6 @@
+VARS(7)
+
+Bar
+
+bar
+
diff --git a/crates/mdman/tests/compare/formatting.md b/crates/mdman/tests/compare/formatting.md
new file mode 100644
index 0000000..3b9f5b8
--- /dev/null
+++ b/crates/mdman/tests/compare/formatting.md
@@ -0,0 +1,95 @@
+# formatting(1)
+
+This is **nested _formatting_ `text`**.
+
+## SECOND HEADING
+
+Some text at second level.
+
+### Third heading
+
+Some text at third level.
+
+#### Fourth heading
+
+Some text at fourth level.
+
+## Quotes and blocks.
+
+Here are some quotes and blocks.
+
+> This is a block quote. Ambidextrously koala apart that prudent blindly alas
+> far amid dear goodness turgid so exact inside oh and alas much fanciful that
+> dark on spoon-fed adequately insolent walking crud.
+
+```
+This is a code block. Groundhog watchfully sudden firefly some self-consciously hotly jeepers satanic after that this parrot this at virtuous
+some mocking the leaned jeez nightingale as much mallard so because jeez
+turned dear crud grizzly strenuously.
+
+ Indented and should be unmodified.
+```
+
+ This is an indented code block. Egregiously yikes animatedly since outside beseechingly a badger hey shakily giraffe a one wow one this
+ goodness regarding reindeer so astride before.
+
+ Doubly indented
+
+## Lists
+
+1. Ordered list
+
+ * Unordered list
+
+ With a second paragraph inside it
+
+ 1. Inner ordered list
+
+ 1. Another
+
+ * Eggs
+
+ * Milk
+
+ 5. Don't start at one.
+ 6. tamarind
+
+1. Second element
+
+1. Third element
+
+## Breaks
+
+This has a\
+hard break in it
+and a soft one.
+
+## Horizontal rule
+
+This should contain a line:
+
+---
+
+Nice!
+
+## Strange characters
+
+Handles escaping for characters
+
+.dot at the start of a line.
+
+\fBnot really troff
+
+Various characters \ - – — ─ │ ├ └
+
+```
+tree
+└── example
+ ├── salamander
+ │ ├── honey
+ │ └── some
+ ├── fancifully
+ └── trout
+```
+
+&nbsp;&nbsp;&nbsp;&nbsp;non-breaking space.
diff --git a/crates/mdman/tests/compare/includes/links-include.md b/crates/mdman/tests/compare/includes/links-include.md
new file mode 100644
index 0000000..7373360
--- /dev/null
+++ b/crates/mdman/tests/compare/includes/links-include.md
@@ -0,0 +1,7 @@
+[Some link](foo.html)
+
+{{#options}}
+{{#option "`--include`"}}
+Testing an [included link](included_link.html).
+{{/option}}
+{{/options}}
diff --git a/crates/mdman/tests/compare/includes/options-common.md b/crates/mdman/tests/compare/includes/options-common.md
new file mode 100644
index 0000000..07404e3
--- /dev/null
+++ b/crates/mdman/tests/compare/includes/options-common.md
@@ -0,0 +1,14 @@
+{{#options}}
+{{#option "`@`_filename_"}}
+Load from filename.
+{{/option}}
+
+{{#option "`--foo` [_bar_]"}}
+Flag with optional value.
+{{/option}}
+
+{{#option "`--foo`[`=`_bar_]"}}
+Alternate syntax for optional value (with required = for disambiguation).
+{{/option}}
+
+{{/options}}
diff --git a/crates/mdman/tests/compare/links.md b/crates/mdman/tests/compare/links.md
new file mode 100644
index 0000000..949f374
--- /dev/null
+++ b/crates/mdman/tests/compare/links.md
@@ -0,0 +1,49 @@
+# links(1)
+
+## NAME
+
+links - Test of different link kinds
+
+## DESCRIPTION
+
+Inline link: [inline link](https://example.com/inline)
+
+Reference link: [this is a link][bar]
+
+Collapsed: [collapsed][]
+
+Shortcut: [shortcut]
+
+Autolink: <https://example.com/auto>
+
+Email: <foo@example.com>
+
+Relative link: [relative link](foo/bar.html)
+
+Collapsed unknown: [collapsed unknown][]
+
+Reference unknown: [foo][unknown]
+
+Shortcut unknown: [shortcut unknown]
+
+{{man "other-cmd" 1}}
+
+{{man "local-cmd" 1}}
+
+{{> links-include}}
+
+## OPTIONS
+
+{{#options}}
+
+{{#option "`--foo-bar`"}}
+Example [link](bar.html).
+See {{man "other-cmd" 1}}, {{man "local-cmd" 1}}
+{{/option}}
+
+{{/options}}
+
+
+[bar]: https://example.com/bar
+[collapsed]: https://example.com/collapsed
+[shortcut]: https://example.com/shortcut
diff --git a/crates/mdman/tests/compare/options.md b/crates/mdman/tests/compare/options.md
new file mode 100644
index 0000000..51415b0
--- /dev/null
+++ b/crates/mdman/tests/compare/options.md
@@ -0,0 +1,62 @@
+# my-command(1)
+
+## NAME
+
+my-command - A brief description
+
+## SYNOPSIS
+
+`my-command` [`--abc` | `--xyz`] _name_\
+`my-command` [`-f` _file_]\
+`my-command` (`-m` | `-M`) [_oldbranch_] _newbranch_\
+`my-command` (`-d` | `-D`) [`-r`] _branchname_...
+
+## DESCRIPTION
+
+A description of the command.
+
+* One
+ * Sub one
+ * Sub two
+* Two
+* Three
+
+
+## OPTIONS
+
+### Command options
+
+{{#options}}
+
+{{#option "`--foo-bar`"}}
+Demo *emphasis*, **strong**, ~~strike~~
+{{/option}}
+
+{{#option "`-p` _spec_" "`--package` _spec_"}}
+This has multiple flags.
+{{/option}}
+
+{{#option "_named-arg..._"}}
+A named argument.
+{{/option}}
+
+{{/options}}
+
+### Common Options
+
+{{> options-common}}
+
+## EXAMPLES
+
+1. An example
+
+ ```
+ my-command --abc
+ ```
+
+1. Another example
+
+ my-command --xyz
+
+## SEE ALSO
+{{man "other-command" 1}} {{man "abc" 7}}
diff --git a/crates/mdman/tests/compare/tables.md b/crates/mdman/tests/compare/tables.md
new file mode 100644
index 0000000..831132c
--- /dev/null
+++ b/crates/mdman/tests/compare/tables.md
@@ -0,0 +1,35 @@
+# tables(1)
+
+## DESCRIPTION
+
+Testing tables.
+
+| Single col |
+--------------
+| Hi! :) |
+
+
+Header content | With `format` *text* | Another column
+---------------|----------------------|----------------
+Some data | More data |
+Extra long amount of text within a column | hi | there
+
+
+Left aligned | Center aligned | Right aligned
+-------------|:--------------:|--------------:
+abc | def | ghi
+
+
+Left aligned | Center aligned | Right aligned
+-------------|:--------------:|--------------:
+X | X | X
+Extra long text 123456789012 with mixed widths. | Extra long text 123456789012 with mixed widths. | Extra long text 123456789012 with mixed widths.
+
+
+| Link check |
+--------------
+| [foo] |
+| <https://example.com/> |
+
+
+[foo]: https://example.com/
diff --git a/crates/mdman/tests/compare/vars.md b/crates/mdman/tests/compare/vars.md
new file mode 100644
index 0000000..d41b765
--- /dev/null
+++ b/crates/mdman/tests/compare/vars.md
@@ -0,0 +1,7 @@
+# vars(7)
+
+{{*set foo="Bar"}}
+
+{{foo}}
+
+{{lower foo}}
diff --git a/crates/mdman/tests/invalid.rs b/crates/mdman/tests/invalid.rs
new file mode 100644
index 0000000..cc81d06
--- /dev/null
+++ b/crates/mdman/tests/invalid.rs
@@ -0,0 +1,34 @@
+//! Tests for errors and invalid input.
+
+use mdman::{Format, ManMap};
+use pretty_assertions::assert_eq;
+use std::path::PathBuf;
+
+fn run(name: &str, expected_error: &str) {
+ let input = PathBuf::from(format!("tests/invalid/{}", name));
+ match mdman::convert(&input, Format::Man, None, ManMap::new()) {
+ Ok(_) => {
+ panic!("expected {} to fail", name);
+ }
+ Err(e) => {
+ assert_eq!(expected_error, e.to_string());
+ }
+ }
+}
+
+macro_rules! test( ($name:ident, $file_name:expr, $error:expr) => (
+ #[test]
+ fn $name() { run($file_name, $error); }
+) );
+
+test!(
+ nested,
+ "nested.md",
+ "Error rendering \"template\" line 4, col 1: options blocks cannot be nested"
+);
+
+test!(
+ not_inside_options,
+ "not-inside-options.md",
+ "Error rendering \"template\" line 3, col 1: option must be in options block"
+);
diff --git a/crates/mdman/tests/invalid/nested.md b/crates/mdman/tests/invalid/nested.md
new file mode 100644
index 0000000..6a33e6d
--- /dev/null
+++ b/crates/mdman/tests/invalid/nested.md
@@ -0,0 +1,6 @@
+# nested(1)
+
+{{#options}}
+{{#options}}
+{{/options}}
+{{/options}}
diff --git a/crates/mdman/tests/invalid/not-inside-options.md b/crates/mdman/tests/invalid/not-inside-options.md
new file mode 100644
index 0000000..b6c816f
--- /dev/null
+++ b/crates/mdman/tests/invalid/not-inside-options.md
@@ -0,0 +1,5 @@
+# not-inside-options(1)
+
+{{#option "`-o`"}}
+Testing without options block.
+{{/option}}
diff --git a/crates/resolver-tests/Cargo.toml b/crates/resolver-tests/Cargo.toml
new file mode 100644
index 0000000..cc50ad3
--- /dev/null
+++ b/crates/resolver-tests/Cargo.toml
@@ -0,0 +1,12 @@
+[package]
+name = "resolver-tests"
+version = "0.1.0"
+edition = "2018"
+
+[dependencies]
+cargo = { path = "../.." }
+cargo-util = { path = "../cargo-util" }
+is-terminal = "0.4.0"
+lazy_static = "1.3.0"
+proptest = "0.9.1"
+varisat = "0.2.1"
diff --git a/crates/resolver-tests/src/lib.rs b/crates/resolver-tests/src/lib.rs
new file mode 100644
index 0000000..3ffb6c5
--- /dev/null
+++ b/crates/resolver-tests/src/lib.rs
@@ -0,0 +1,991 @@
+#![allow(clippy::all)]
+
+use std::cell::RefCell;
+use std::cmp::PartialEq;
+use std::cmp::{max, min};
+use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet};
+use std::fmt;
+use std::fmt::Write;
+use std::rc::Rc;
+use std::task::Poll;
+use std::time::Instant;
+
+use cargo::core::dependency::DepKind;
+use cargo::core::resolver::{self, ResolveOpts, VersionPreferences};
+use cargo::core::source::{GitReference, QueryKind, SourceId};
+use cargo::core::Resolve;
+use cargo::core::{Dependency, PackageId, Registry, Summary};
+use cargo::util::{CargoResult, Config, Graph, IntoUrl};
+
+use proptest::collection::{btree_map, vec};
+use proptest::prelude::*;
+use proptest::sample::Index;
+use proptest::string::string_regex;
+use varisat::{self, ExtendFormula};
+
+pub fn resolve(deps: Vec<Dependency>, registry: &[Summary]) -> CargoResult<Vec<PackageId>> {
+ resolve_with_config(deps, registry, &Config::default().unwrap())
+}
+
+pub fn resolve_and_validated(
+ deps: Vec<Dependency>,
+ registry: &[Summary],
+ sat_resolve: Option<SatResolve>,
+) -> CargoResult<Vec<PackageId>> {
+ let resolve = resolve_with_config_raw(deps.clone(), registry, &Config::default().unwrap());
+
+ match resolve {
+ Err(e) => {
+ let sat_resolve = sat_resolve.unwrap_or_else(|| SatResolve::new(registry));
+ if sat_resolve.sat_resolve(&deps) {
+ panic!(
+ "the resolve err but the sat_resolve thinks this will work:\n{}",
+ sat_resolve.use_packages().unwrap()
+ );
+ }
+ Err(e)
+ }
+ Ok(resolve) => {
+ let mut stack = vec![pkg_id("root")];
+ let mut used = HashSet::new();
+ let mut links = HashSet::new();
+ while let Some(p) = stack.pop() {
+ assert!(resolve.contains(&p));
+ if used.insert(p) {
+ // in the tests all `links` crates end in `-sys`
+ if p.name().ends_with("-sys") {
+ assert!(links.insert(p.name()));
+ }
+ stack.extend(resolve.deps(p).map(|(dp, deps)| {
+ for d in deps {
+ assert!(d.matches_id(dp));
+ }
+ dp
+ }));
+ }
+ }
+ let out = resolve.sort();
+ assert_eq!(out.len(), used.len());
+
+ let mut pub_deps: HashMap<PackageId, HashSet<_>> = HashMap::new();
+ for &p in out.iter() {
+ // make the list of `p` public dependencies
+ let mut self_pub_dep = HashSet::new();
+ self_pub_dep.insert(p);
+ for (dp, deps) in resolve.deps(p) {
+ if deps.iter().any(|d| d.is_public()) {
+ self_pub_dep.extend(pub_deps[&dp].iter().cloned())
+ }
+ }
+ pub_deps.insert(p, self_pub_dep);
+
+ // check if `p` has a public dependencies conflicts
+ let seen_dep: BTreeSet<_> = resolve
+ .deps(p)
+ .flat_map(|(dp, _)| pub_deps[&dp].iter().cloned())
+ .collect();
+ let seen_dep: Vec<_> = seen_dep.iter().collect();
+ for a in seen_dep.windows(2) {
+ if a[0].name() == a[1].name() {
+ panic!(
+ "the package {:?} can publicly see {:?} and {:?}",
+ p, a[0], a[1]
+ )
+ }
+ }
+ }
+ let sat_resolve = sat_resolve.unwrap_or_else(|| SatResolve::new(registry));
+ if !sat_resolve.sat_is_valid_solution(&out) {
+ panic!(
+ "the sat_resolve err but the resolve thinks this will work:\n{:?}",
+ resolve
+ );
+ }
+ Ok(out)
+ }
+ }
+}
+
+pub fn resolve_with_config(
+ deps: Vec<Dependency>,
+ registry: &[Summary],
+ config: &Config,
+) -> CargoResult<Vec<PackageId>> {
+ let resolve = resolve_with_config_raw(deps, registry, config)?;
+ Ok(resolve.sort())
+}
+
+pub fn resolve_with_config_raw(
+ deps: Vec<Dependency>,
+ registry: &[Summary],
+ config: &Config,
+) -> CargoResult<Resolve> {
+ struct MyRegistry<'a> {
+ list: &'a [Summary],
+ used: HashSet<PackageId>,
+ }
+ impl<'a> Registry for MyRegistry<'a> {
+ fn query(
+ &mut self,
+ dep: &Dependency,
+ kind: QueryKind,
+ f: &mut dyn FnMut(Summary),
+ ) -> Poll<CargoResult<()>> {
+ for summary in self.list.iter() {
+ let matched = match kind {
+ QueryKind::Exact => dep.matches(summary),
+ QueryKind::Fuzzy => true,
+ };
+ if matched {
+ self.used.insert(summary.package_id());
+ f(summary.clone());
+ }
+ }
+ Poll::Ready(Ok(()))
+ }
+
+ fn describe_source(&self, _src: SourceId) -> String {
+ String::new()
+ }
+
+ fn is_replaced(&self, _src: SourceId) -> bool {
+ false
+ }
+
+ fn block_until_ready(&mut self) -> CargoResult<()> {
+ Ok(())
+ }
+ }
+ impl<'a> Drop for MyRegistry<'a> {
+ fn drop(&mut self) {
+ if std::thread::panicking() && self.list.len() != self.used.len() {
+ // we found a case that causes a panic and did not use all of the input.
+ // lets print the part of the input that was used for minimization.
+ println!(
+ "{:?}",
+ PrettyPrintRegistry(
+ self.list
+ .iter()
+ .filter(|s| { self.used.contains(&s.package_id()) })
+ .cloned()
+ .collect()
+ )
+ );
+ }
+ }
+ }
+ let mut registry = MyRegistry {
+ list: registry,
+ used: HashSet::new(),
+ };
+ let summary = Summary::new(
+ config,
+ pkg_id("root"),
+ deps,
+ &BTreeMap::new(),
+ None::<&String>,
+ )
+ .unwrap();
+ let opts = ResolveOpts::everything();
+ let start = Instant::now();
+ let resolve = resolver::resolve(
+ &[(summary, opts)],
+ &[],
+ &mut registry,
+ &VersionPreferences::default(),
+ Some(config),
+ true,
+ );
+
+ // The largest test in our suite takes less then 30 sec.
+ // So lets fail the test if we have ben running for two long.
+ assert!(start.elapsed().as_secs() < 60);
+ resolve
+}
+
+const fn num_bits<T>() -> usize {
+ std::mem::size_of::<T>() * 8
+}
+
+fn log_bits(x: usize) -> usize {
+ if x == 0 {
+ return 0;
+ }
+ assert!(x > 0);
+ (num_bits::<usize>() as u32 - x.leading_zeros()) as usize
+}
+
+fn sat_at_most_one(solver: &mut impl varisat::ExtendFormula, vars: &[varisat::Var]) {
+ if vars.len() <= 1 {
+ return;
+ } else if vars.len() == 2 {
+ solver.add_clause(&[vars[0].negative(), vars[1].negative()]);
+ return;
+ } else if vars.len() == 3 {
+ solver.add_clause(&[vars[0].negative(), vars[1].negative()]);
+ solver.add_clause(&[vars[0].negative(), vars[2].negative()]);
+ solver.add_clause(&[vars[1].negative(), vars[2].negative()]);
+ return;
+ }
+ // use the "Binary Encoding" from
+ // https://www.it.uu.se/research/group/astra/ModRef10/papers/Alan%20M.%20Frisch%20and%20Paul%20A.%20Giannoros.%20SAT%20Encodings%20of%20the%20At-Most-k%20Constraint%20-%20ModRef%202010.pdf
+ let bits: Vec<varisat::Var> = solver.new_var_iter(log_bits(vars.len())).collect();
+ for (i, p) in vars.iter().enumerate() {
+ for b in 0..bits.len() {
+ solver.add_clause(&[p.negative(), bits[b].lit(((1 << b) & i) > 0)]);
+ }
+ }
+}
+
+fn sat_at_most_one_by_key<K: std::hash::Hash + Eq>(
+ cnf: &mut impl varisat::ExtendFormula,
+ data: impl Iterator<Item = (K, varisat::Var)>,
+) -> HashMap<K, Vec<varisat::Var>> {
+ // no two packages with the same links set
+ let mut by_keys: HashMap<K, Vec<varisat::Var>> = HashMap::new();
+ for (p, v) in data {
+ by_keys.entry(p).or_default().push(v)
+ }
+ for key in by_keys.values() {
+ sat_at_most_one(cnf, key);
+ }
+ by_keys
+}
+
+/// Resolution can be reduced to the SAT problem. So this is an alternative implementation
+/// of the resolver that uses a SAT library for the hard work. This is intended to be easy to read,
+/// as compared to the real resolver.
+///
+/// For the subset of functionality that are currently made by `registry_strategy` this will,
+/// find a valid resolution if one exists. The big thing that the real resolver does,
+/// that this one does not do is work with features and optional dependencies.
+///
+/// The SAT library dose not optimize for the newer version,
+/// so the selected packages may not match the real resolver.
+#[derive(Clone)]
+pub struct SatResolve(Rc<RefCell<SatResolveInner>>);
+struct SatResolveInner {
+ solver: varisat::Solver<'static>,
+ var_for_is_packages_used: HashMap<PackageId, varisat::Var>,
+ by_name: HashMap<&'static str, Vec<PackageId>>,
+}
+
+impl SatResolve {
+ pub fn new(registry: &[Summary]) -> Self {
+ let mut cnf = varisat::CnfFormula::new();
+ let var_for_is_packages_used: HashMap<PackageId, varisat::Var> = registry
+ .iter()
+ .map(|s| (s.package_id(), cnf.new_var()))
+ .collect();
+
+ // no two packages with the same links set
+ sat_at_most_one_by_key(
+ &mut cnf,
+ registry
+ .iter()
+ .map(|s| (s.links(), var_for_is_packages_used[&s.package_id()]))
+ .filter(|(l, _)| l.is_some()),
+ );
+
+ // no two semver compatible versions of the same package
+ let by_activations_keys = sat_at_most_one_by_key(
+ &mut cnf,
+ var_for_is_packages_used
+ .iter()
+ .map(|(p, &v)| (p.as_activations_key(), v)),
+ );
+
+ let mut by_name: HashMap<&'static str, Vec<PackageId>> = HashMap::new();
+
+ for p in registry.iter() {
+ by_name
+ .entry(p.name().as_str())
+ .or_default()
+ .push(p.package_id())
+ }
+
+ let empty_vec = vec![];
+
+ let mut graph: Graph<PackageId, ()> = Graph::new();
+
+ let mut version_selected_for: HashMap<
+ PackageId,
+ HashMap<Dependency, HashMap<_, varisat::Var>>,
+ > = HashMap::new();
+ // active packages need each of there `deps` to be satisfied
+ for p in registry.iter() {
+ graph.add(p.package_id());
+ for dep in p.dependencies() {
+ // This can more easily be written as:
+ // !is_active(p) or one of the things that match dep is_active
+ // All the complexity, from here to the end, is to support public and private dependencies!
+ let mut by_key: HashMap<_, Vec<varisat::Lit>> = HashMap::new();
+ for &m in by_name
+ .get(dep.package_name().as_str())
+ .unwrap_or(&empty_vec)
+ .iter()
+ .filter(|&p| dep.matches_id(*p))
+ {
+ graph.link(p.package_id(), m);
+ by_key
+ .entry(m.as_activations_key())
+ .or_default()
+ .push(var_for_is_packages_used[&m].positive());
+ }
+ let keys: HashMap<_, _> = by_key.keys().map(|&k| (k, cnf.new_var())).collect();
+
+ // if `p` is active then we need to select one of the keys
+ let matches: Vec<_> = keys
+ .values()
+ .map(|v| v.positive())
+ .chain(Some(var_for_is_packages_used[&p.package_id()].negative()))
+ .collect();
+ cnf.add_clause(&matches);
+
+ // if a key is active then we need to select one of the versions
+ for (key, vars) in by_key.iter() {
+ let mut matches = vars.clone();
+ matches.push(keys[key].negative());
+ cnf.add_clause(&matches);
+ }
+
+ version_selected_for
+ .entry(p.package_id())
+ .or_default()
+ .insert(dep.clone(), keys);
+ }
+ }
+
+ let topological_order = graph.sort();
+
+ // we already ensure there is only one version for each `activations_key` so we can think of
+ // `publicly_exports` as being in terms of a set of `activations_key`s
+ let mut publicly_exports: HashMap<_, HashMap<_, varisat::Var>> = HashMap::new();
+
+ for &key in by_activations_keys.keys() {
+ // everything publicly depends on itself
+ let var = publicly_exports
+ .entry(key)
+ .or_default()
+ .entry(key)
+ .or_insert_with(|| cnf.new_var());
+ cnf.add_clause(&[var.positive()]);
+ }
+
+ // if a `dep` is public then `p` `publicly_exports` all the things that the selected version `publicly_exports`
+ for &p in topological_order.iter() {
+ if let Some(deps) = version_selected_for.get(&p) {
+ let mut p_exports = publicly_exports.remove(&p.as_activations_key()).unwrap();
+ for (_, versions) in deps.iter().filter(|(d, _)| d.is_public()) {
+ for (ver, sel) in versions {
+ for (&export_pid, &export_var) in publicly_exports[ver].iter() {
+ let our_var =
+ p_exports.entry(export_pid).or_insert_with(|| cnf.new_var());
+ cnf.add_clause(&[
+ sel.negative(),
+ export_var.negative(),
+ our_var.positive(),
+ ]);
+ }
+ }
+ }
+ publicly_exports.insert(p.as_activations_key(), p_exports);
+ }
+ }
+
+ // we already ensure there is only one version for each `activations_key` so we can think of
+ // `can_see` as being in terms of a set of `activations_key`s
+ // and if `p` `publicly_exports` `export` then it `can_see` `export`
+ let mut can_see: HashMap<_, HashMap<_, varisat::Var>> = HashMap::new();
+
+ // if `p` has a `dep` that selected `ver` then it `can_see` all the things that the selected version `publicly_exports`
+ for (&p, deps) in version_selected_for.iter() {
+ let p_can_see = can_see.entry(p).or_default();
+ for (_, versions) in deps.iter() {
+ for (&ver, sel) in versions {
+ for (&export_pid, &export_var) in publicly_exports[&ver].iter() {
+ let our_var = p_can_see.entry(export_pid).or_insert_with(|| cnf.new_var());
+ cnf.add_clause(&[
+ sel.negative(),
+ export_var.negative(),
+ our_var.positive(),
+ ]);
+ }
+ }
+ }
+ }
+
+ // a package `can_see` only one version by each name
+ for (_, see) in can_see.iter() {
+ sat_at_most_one_by_key(&mut cnf, see.iter().map(|((name, _, _), &v)| (name, v)));
+ }
+ let mut solver = varisat::Solver::new();
+ solver.add_formula(&cnf);
+
+ // We dont need to `solve` now. We know that "use nothing" will satisfy all the clauses so far.
+ // But things run faster if we let it spend some time figuring out how the constraints interact before we add assumptions.
+ solver
+ .solve()
+ .expect("docs say it can't error in default config");
+ SatResolve(Rc::new(RefCell::new(SatResolveInner {
+ solver,
+ var_for_is_packages_used,
+ by_name,
+ })))
+ }
+ pub fn sat_resolve(&self, deps: &[Dependency]) -> bool {
+ let mut s = self.0.borrow_mut();
+ let mut assumption = vec![];
+ let mut this_call = None;
+
+ // the starting `deps` need to be satisfied
+ for dep in deps.iter() {
+ let empty_vec = vec![];
+ let matches: Vec<varisat::Lit> = s
+ .by_name
+ .get(dep.package_name().as_str())
+ .unwrap_or(&empty_vec)
+ .iter()
+ .filter(|&p| dep.matches_id(*p))
+ .map(|p| s.var_for_is_packages_used[p].positive())
+ .collect();
+ if matches.is_empty() {
+ return false;
+ } else if matches.len() == 1 {
+ assumption.extend_from_slice(&matches)
+ } else {
+ if this_call.is_none() {
+ let new_var = s.solver.new_var();
+ this_call = Some(new_var);
+ assumption.push(new_var.positive());
+ }
+ let mut matches = matches;
+ matches.push(this_call.unwrap().negative());
+ s.solver.add_clause(&matches);
+ }
+ }
+
+ s.solver.assume(&assumption);
+
+ s.solver
+ .solve()
+ .expect("docs say it can't error in default config")
+ }
+ pub fn sat_is_valid_solution(&self, pids: &[PackageId]) -> bool {
+ let mut s = self.0.borrow_mut();
+ for p in pids {
+ if p.name().as_str() != "root" && !s.var_for_is_packages_used.contains_key(p) {
+ return false;
+ }
+ }
+ let assumption: Vec<_> = s
+ .var_for_is_packages_used
+ .iter()
+ .map(|(p, v)| v.lit(pids.contains(p)))
+ .collect();
+
+ s.solver.assume(&assumption);
+
+ s.solver
+ .solve()
+ .expect("docs say it can't error in default config")
+ }
+ fn use_packages(&self) -> Option<String> {
+ self.0.borrow().solver.model().map(|lits| {
+ let lits: HashSet<_> = lits
+ .iter()
+ .filter(|l| l.is_positive())
+ .map(|l| l.var())
+ .collect();
+ let mut out = String::new();
+ out.push_str("used:\n");
+ for (p, v) in self.0.borrow().var_for_is_packages_used.iter() {
+ if lits.contains(v) {
+ writeln!(&mut out, " {}", p).unwrap();
+ }
+ }
+ out
+ })
+ }
+}
+
+pub trait ToDep {
+ fn to_dep(self) -> Dependency;
+}
+
+impl ToDep for &'static str {
+ fn to_dep(self) -> Dependency {
+ Dependency::parse(self, Some("1.0.0"), registry_loc()).unwrap()
+ }
+}
+
+impl ToDep for Dependency {
+ fn to_dep(self) -> Dependency {
+ self
+ }
+}
+
+pub trait ToPkgId {
+ fn to_pkgid(&self) -> PackageId;
+}
+
+impl ToPkgId for PackageId {
+ fn to_pkgid(&self) -> PackageId {
+ *self
+ }
+}
+
+impl<'a> ToPkgId for &'a str {
+ fn to_pkgid(&self) -> PackageId {
+ PackageId::new(*self, "1.0.0", registry_loc()).unwrap()
+ }
+}
+
+impl<T: AsRef<str>, U: AsRef<str>> ToPkgId for (T, U) {
+ fn to_pkgid(&self) -> PackageId {
+ let (name, vers) = self;
+ PackageId::new(name.as_ref(), vers.as_ref(), registry_loc()).unwrap()
+ }
+}
+
+#[macro_export]
+macro_rules! pkg {
+ ($pkgid:expr => [$($deps:expr),+ $(,)* ]) => ({
+ let d: Vec<Dependency> = vec![$($deps.to_dep()),+];
+ $crate::pkg_dep($pkgid, d)
+ });
+
+ ($pkgid:expr) => ({
+ $crate::pkg($pkgid)
+ })
+}
+
+fn registry_loc() -> SourceId {
+ lazy_static::lazy_static! {
+ static ref EXAMPLE_DOT_COM: SourceId =
+ SourceId::for_registry(&"https://example.com".into_url().unwrap()).unwrap();
+ }
+ *EXAMPLE_DOT_COM
+}
+
+pub fn pkg<T: ToPkgId>(name: T) -> Summary {
+ pkg_dep(name, Vec::new())
+}
+
+pub fn pkg_dep<T: ToPkgId>(name: T, dep: Vec<Dependency>) -> Summary {
+ let pkgid = name.to_pkgid();
+ let link = if pkgid.name().ends_with("-sys") {
+ Some(pkgid.name().as_str())
+ } else {
+ None
+ };
+ Summary::new(
+ &Config::default().unwrap(),
+ name.to_pkgid(),
+ dep,
+ &BTreeMap::new(),
+ link,
+ )
+ .unwrap()
+}
+
+pub fn pkg_id(name: &str) -> PackageId {
+ PackageId::new(name, "1.0.0", registry_loc()).unwrap()
+}
+
+fn pkg_id_loc(name: &str, loc: &str) -> PackageId {
+ let remote = loc.into_url();
+ let master = GitReference::Branch("master".to_string());
+ let source_id = SourceId::for_git(&remote.unwrap(), master).unwrap();
+
+ PackageId::new(name, "1.0.0", source_id).unwrap()
+}
+
+pub fn pkg_loc(name: &str, loc: &str) -> Summary {
+ let link = if name.ends_with("-sys") {
+ Some(name)
+ } else {
+ None
+ };
+ Summary::new(
+ &Config::default().unwrap(),
+ pkg_id_loc(name, loc),
+ Vec::new(),
+ &BTreeMap::new(),
+ link,
+ )
+ .unwrap()
+}
+
+pub fn remove_dep(sum: &Summary, ind: usize) -> Summary {
+ let mut deps = sum.dependencies().to_vec();
+ deps.remove(ind);
+ // note: more things will need to be copied over in the future, but it works for now.
+ Summary::new(
+ &Config::default().unwrap(),
+ sum.package_id(),
+ deps,
+ &BTreeMap::new(),
+ sum.links().map(|a| a.as_str()),
+ )
+ .unwrap()
+}
+
+pub fn dep(name: &str) -> Dependency {
+ dep_req(name, "*")
+}
+pub fn dep_req(name: &str, req: &str) -> Dependency {
+ Dependency::parse(name, Some(req), registry_loc()).unwrap()
+}
+pub fn dep_req_kind(name: &str, req: &str, kind: DepKind, public: bool) -> Dependency {
+ let mut dep = dep_req(name, req);
+ dep.set_kind(kind);
+ dep.set_public(public);
+ dep
+}
+
+pub fn dep_loc(name: &str, location: &str) -> Dependency {
+ let url = location.into_url().unwrap();
+ let master = GitReference::Branch("master".to_string());
+ let source_id = SourceId::for_git(&url, master).unwrap();
+ Dependency::parse(name, Some("1.0.0"), source_id).unwrap()
+}
+pub fn dep_kind(name: &str, kind: DepKind) -> Dependency {
+ dep(name).set_kind(kind).clone()
+}
+
+pub fn registry(pkgs: Vec<Summary>) -> Vec<Summary> {
+ pkgs
+}
+
+pub fn names<P: ToPkgId>(names: &[P]) -> Vec<PackageId> {
+ names.iter().map(|name| name.to_pkgid()).collect()
+}
+
+pub fn loc_names(names: &[(&'static str, &'static str)]) -> Vec<PackageId> {
+ names
+ .iter()
+ .map(|&(name, loc)| pkg_id_loc(name, loc))
+ .collect()
+}
+
+/// By default `Summary` and `Dependency` have a very verbose `Debug` representation.
+/// This replaces with a representation that uses constructors from this file.
+///
+/// If `registry_strategy` is improved to modify more fields
+/// then this needs to update to display the corresponding constructor.
+pub struct PrettyPrintRegistry(pub Vec<Summary>);
+
+impl fmt::Debug for PrettyPrintRegistry {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "vec![")?;
+ for s in &self.0 {
+ if s.dependencies().is_empty() {
+ write!(f, "pkg!((\"{}\", \"{}\")),", s.name(), s.version())?;
+ } else {
+ write!(f, "pkg!((\"{}\", \"{}\") => [", s.name(), s.version())?;
+ for d in s.dependencies() {
+ if d.kind() == DepKind::Normal
+ && &d.version_req().to_string() == "*"
+ && !d.is_public()
+ {
+ write!(f, "dep(\"{}\"),", d.name_in_toml())?;
+ } else if d.kind() == DepKind::Normal && !d.is_public() {
+ write!(
+ f,
+ "dep_req(\"{}\", \"{}\"),",
+ d.name_in_toml(),
+ d.version_req()
+ )?;
+ } else {
+ write!(
+ f,
+ "dep_req_kind(\"{}\", \"{}\", {}, {}),",
+ d.name_in_toml(),
+ d.version_req(),
+ match d.kind() {
+ DepKind::Development => "DepKind::Development",
+ DepKind::Build => "DepKind::Build",
+ DepKind::Normal => "DepKind::Normal",
+ },
+ d.is_public()
+ )?;
+ }
+ }
+ write!(f, "]),")?;
+ }
+ }
+ write!(f, "]")
+ }
+}
+
+#[test]
+fn meta_test_deep_pretty_print_registry() {
+ assert_eq!(
+ &format!(
+ "{:?}",
+ PrettyPrintRegistry(vec![
+ pkg!(("foo", "1.0.1") => [dep_req("bar", "1")]),
+ pkg!(("foo", "1.0.0") => [dep_req("bar", "2")]),
+ pkg!(("foo", "2.0.0") => [dep_req("bar", "*")]),
+ pkg!(("bar", "1.0.0") => [dep_req("baz", "=1.0.2"),
+ dep_req("other", "1")]),
+ pkg!(("bar", "2.0.0") => [dep_req("baz", "=1.0.1")]),
+ pkg!(("baz", "1.0.2") => [dep_req("other", "2")]),
+ pkg!(("baz", "1.0.1")),
+ pkg!(("cat", "1.0.2") => [dep_req_kind("other", "2", DepKind::Build, false)]),
+ pkg!(("cat", "1.0.3") => [dep_req_kind("other", "2", DepKind::Development, false)]),
+ pkg!(("dep_req", "1.0.0")),
+ pkg!(("dep_req", "2.0.0")),
+ ])
+ ),
+ "vec![pkg!((\"foo\", \"1.0.1\") => [dep_req(\"bar\", \"^1\"),]),\
+ pkg!((\"foo\", \"1.0.0\") => [dep_req(\"bar\", \"^2\"),]),\
+ pkg!((\"foo\", \"2.0.0\") => [dep(\"bar\"),]),\
+ pkg!((\"bar\", \"1.0.0\") => [dep_req(\"baz\", \"=1.0.2\"),dep_req(\"other\", \"^1\"),]),\
+ pkg!((\"bar\", \"2.0.0\") => [dep_req(\"baz\", \"=1.0.1\"),]),\
+ pkg!((\"baz\", \"1.0.2\") => [dep_req(\"other\", \"^2\"),]),\
+ pkg!((\"baz\", \"1.0.1\")),\
+ pkg!((\"cat\", \"1.0.2\") => [dep_req_kind(\"other\", \"^2\", DepKind::Build, false),]),\
+ pkg!((\"cat\", \"1.0.3\") => [dep_req_kind(\"other\", \"^2\", DepKind::Development, false),]),\
+ pkg!((\"dep_req\", \"1.0.0\")),\
+ pkg!((\"dep_req\", \"2.0.0\")),]"
+ )
+}
+
+/// This generates a random registry index.
+/// Unlike vec((Name, Ver, vec((Name, VerRq), ..), ..)
+/// This strategy has a high probability of having valid dependencies
+pub fn registry_strategy(
+ max_crates: usize,
+ max_versions: usize,
+ shrinkage: usize,
+) -> impl Strategy<Value = PrettyPrintRegistry> {
+ let name = string_regex("[A-Za-z][A-Za-z0-9_-]*(-sys)?").unwrap();
+
+ let raw_version = ..max_versions.pow(3);
+ let version_from_raw = move |r: usize| {
+ let major = ((r / max_versions) / max_versions) % max_versions;
+ let minor = (r / max_versions) % max_versions;
+ let patch = r % max_versions;
+ format!("{}.{}.{}", major, minor, patch)
+ };
+
+ // If this is false then the crate will depend on the nonexistent "bad"
+ // instead of the complex set we generated for it.
+ let allow_deps = prop::bool::weighted(0.99);
+
+ let list_of_versions =
+ btree_map(raw_version, allow_deps, 1..=max_versions).prop_map(move |ver| {
+ ver.into_iter()
+ .map(|a| (version_from_raw(a.0), a.1))
+ .collect::<Vec<_>>()
+ });
+
+ let list_of_crates_with_versions =
+ btree_map(name, list_of_versions, 1..=max_crates).prop_map(|mut vers| {
+ // root is the name of the thing being compiled
+ // so it would be confusing to have it in the index
+ vers.remove("root");
+ // bad is a name reserved for a dep that won't work
+ vers.remove("bad");
+ vers
+ });
+
+ // each version of each crate can depend on each crate smaller then it.
+ // In theory shrinkage should be 2, but in practice we get better trees with a larger value.
+ let max_deps = max_versions * (max_crates * (max_crates - 1)) / shrinkage;
+
+ let raw_version_range = (any::<Index>(), any::<Index>());
+ let raw_dependency = (
+ any::<Index>(),
+ any::<Index>(),
+ raw_version_range,
+ 0..=1,
+ Just(false),
+ // TODO: ^ this needs to be set back to `any::<bool>()` and work before public & private dependencies can stabilize
+ );
+
+ fn order_index(a: Index, b: Index, size: usize) -> (usize, usize) {
+ let (a, b) = (a.index(size), b.index(size));
+ (min(a, b), max(a, b))
+ }
+
+ let list_of_raw_dependency = vec(raw_dependency, ..=max_deps);
+
+ // By default a package depends only on other packages that have a smaller name,
+ // this helps make sure that all things in the resulting index are DAGs.
+ // If this is true then the DAG is maintained with grater instead.
+ let reverse_alphabetical = any::<bool>().no_shrink();
+
+ (
+ list_of_crates_with_versions,
+ list_of_raw_dependency,
+ reverse_alphabetical,
+ )
+ .prop_map(
+ |(crate_vers_by_name, raw_dependencies, reverse_alphabetical)| {
+ let list_of_pkgid: Vec<_> = crate_vers_by_name
+ .iter()
+ .flat_map(|(name, vers)| vers.iter().map(move |x| ((name.as_str(), &x.0), x.1)))
+ .collect();
+ let len_all_pkgid = list_of_pkgid.len();
+ let mut dependency_by_pkgid = vec![vec![]; len_all_pkgid];
+ for (a, b, (c, d), k, p) in raw_dependencies {
+ let (a, b) = order_index(a, b, len_all_pkgid);
+ let (a, b) = if reverse_alphabetical { (b, a) } else { (a, b) };
+ let ((dep_name, _), _) = list_of_pkgid[a];
+ if (list_of_pkgid[b].0).0 == dep_name {
+ continue;
+ }
+ let s = &crate_vers_by_name[dep_name];
+ let s_last_index = s.len() - 1;
+ let (c, d) = order_index(c, d, s.len());
+
+ dependency_by_pkgid[b].push(dep_req_kind(
+ dep_name,
+ &if c == 0 && d == s_last_index {
+ "*".to_string()
+ } else if c == 0 {
+ format!("<={}", s[d].0)
+ } else if d == s_last_index {
+ format!(">={}", s[c].0)
+ } else if c == d {
+ format!("={}", s[c].0)
+ } else {
+ format!(">={}, <={}", s[c].0, s[d].0)
+ },
+ match k {
+ 0 => DepKind::Normal,
+ 1 => DepKind::Build,
+ // => DepKind::Development, // Development has no impact so don't gen
+ _ => panic!("bad index for DepKind"),
+ },
+ p && k == 0,
+ ))
+ }
+
+ let mut out: Vec<Summary> = list_of_pkgid
+ .into_iter()
+ .zip(dependency_by_pkgid.into_iter())
+ .map(|(((name, ver), allow_deps), deps)| {
+ pkg_dep(
+ (name, ver).to_pkgid(),
+ if !allow_deps {
+ vec![dep_req("bad", "*")]
+ } else {
+ let mut deps = deps;
+ deps.sort_by_key(|d| d.name_in_toml());
+ deps.dedup_by_key(|d| d.name_in_toml());
+ deps
+ },
+ )
+ })
+ .collect();
+
+ if reverse_alphabetical {
+ // make sure the complicated cases are at the end
+ out.reverse();
+ }
+
+ PrettyPrintRegistry(out)
+ },
+ )
+}
+
+/// This test is to test the generator to ensure
+/// that it makes registries with large dependency trees
+#[test]
+fn meta_test_deep_trees_from_strategy() {
+ use proptest::strategy::ValueTree;
+ use proptest::test_runner::TestRunner;
+
+ let mut dis = [0; 21];
+
+ let strategy = registry_strategy(50, 20, 60);
+ let mut test_runner = TestRunner::deterministic();
+ for _ in 0..128 {
+ let PrettyPrintRegistry(input) = strategy
+ .new_tree(&mut TestRunner::new_with_rng(
+ Default::default(),
+ test_runner.new_rng(),
+ ))
+ .unwrap()
+ .current();
+ let reg = registry(input.clone());
+ for this in input.iter().rev().take(10) {
+ let res = resolve(
+ vec![dep_req(&this.name(), &format!("={}", this.version()))],
+ &reg,
+ );
+ dis[res
+ .as_ref()
+ .map(|x| min(x.len(), dis.len()) - 1)
+ .unwrap_or(0)] += 1;
+ if dis.iter().all(|&x| x > 0) {
+ return;
+ }
+ }
+ }
+
+ panic!(
+ "In 1280 tries we did not see a wide enough distribution of dependency trees! dis: {:?}",
+ dis
+ );
+}
+
+/// This test is to test the generator to ensure
+/// that it makes registries that include multiple versions of the same library
+#[test]
+fn meta_test_multiple_versions_strategy() {
+ use proptest::strategy::ValueTree;
+ use proptest::test_runner::TestRunner;
+
+ let mut dis = [0; 10];
+
+ let strategy = registry_strategy(50, 20, 60);
+ let mut test_runner = TestRunner::deterministic();
+ for _ in 0..128 {
+ let PrettyPrintRegistry(input) = strategy
+ .new_tree(&mut TestRunner::new_with_rng(
+ Default::default(),
+ test_runner.new_rng(),
+ ))
+ .unwrap()
+ .current();
+ let reg = registry(input.clone());
+ for this in input.iter().rev().take(10) {
+ let res = resolve(
+ vec![dep_req(&this.name(), &format!("={}", this.version()))],
+ &reg,
+ );
+ if let Ok(mut res) = res {
+ let res_len = res.len();
+ res.sort_by_key(|s| s.name());
+ res.dedup_by_key(|s| s.name());
+ dis[min(res_len - res.len(), dis.len() - 1)] += 1;
+ }
+ if dis.iter().all(|&x| x > 0) {
+ return;
+ }
+ }
+ }
+ panic!(
+ "In 1280 tries we did not see a wide enough distribution of multiple versions of the same library! dis: {:?}",
+ dis
+ );
+}
+
+/// Assert `xs` contains `elems`
+#[track_caller]
+pub fn assert_contains<A: PartialEq>(xs: &[A], elems: &[A]) {
+ for elem in elems {
+ assert!(xs.contains(elem));
+ }
+}
+
+#[track_caller]
+pub fn assert_same<A: PartialEq>(a: &[A], b: &[A]) {
+ assert_eq!(a.len(), b.len());
+ assert_contains(b, a);
+}
diff --git a/crates/resolver-tests/tests/resolve.rs b/crates/resolver-tests/tests/resolve.rs
new file mode 100644
index 0000000..9d8f25a
--- /dev/null
+++ b/crates/resolver-tests/tests/resolve.rs
@@ -0,0 +1,1504 @@
+use cargo::core::dependency::DepKind;
+use cargo::core::Dependency;
+use cargo::util::Config;
+use cargo_util::is_ci;
+
+use resolver_tests::{
+ assert_contains, assert_same, dep, dep_kind, dep_loc, dep_req, dep_req_kind, loc_names, names,
+ pkg, pkg_id, pkg_loc, registry, registry_strategy, remove_dep, resolve, resolve_and_validated,
+ resolve_with_config, PrettyPrintRegistry, SatResolve, ToDep, ToPkgId,
+};
+
+use proptest::prelude::*;
+
+// NOTE: proptest is a form of fuzz testing. It generates random input and makes sure that
+// certain universal truths are upheld. Therefore, it can pass when there is a problem,
+// but if it fails then there really is something wrong. When testing something as
+// complicated as the resolver, the problems can be very subtle and hard to generate.
+// We have had a history of these tests only failing on PRs long after a bug is introduced.
+// If you have one of these test fail please report it on #6258,
+// and if you did not change the resolver then feel free to retry without concern.
+proptest! {
+ #![proptest_config(ProptestConfig {
+ max_shrink_iters:
+ if is_ci() || !is_terminal::IsTerminal::is_terminal(&std::io::stderr()){
+ // This attempts to make sure that CI will fail fast,
+ 0
+ } else {
+ // but that local builds will give a small clear test case.
+ u32::MAX
+ },
+ result_cache: prop::test_runner::basic_result_cache,
+ .. ProptestConfig::default()
+ })]
+
+ /// NOTE: if you think this test has failed spuriously see the note at the top of this macro.
+ #[test]
+ fn prop_passes_validation(
+ PrettyPrintRegistry(input) in registry_strategy(50, 20, 60)
+ ) {
+ let reg = registry(input.clone());
+ let sat_resolve = SatResolve::new(&reg);
+ // there is only a small chance that any one
+ // crate will be interesting.
+ // So we try some of the most complicated.
+ for this in input.iter().rev().take(20) {
+ let _ = resolve_and_validated(
+ vec![dep_req(&this.name(), &format!("={}", this.version()))],
+ &reg,
+ Some(sat_resolve.clone()),
+ );
+ }
+ }
+
+ /// NOTE: if you think this test has failed spuriously see the note at the top of this macro.
+ #[test]
+ fn prop_minimum_version_errors_the_same(
+ PrettyPrintRegistry(input) in registry_strategy(50, 20, 60)
+ ) {
+ let mut config = Config::default().unwrap();
+ config.nightly_features_allowed = true;
+ config
+ .configure(
+ 1,
+ false,
+ None,
+ false,
+ false,
+ false,
+ &None,
+ &["minimal-versions".to_string()],
+ &[],
+ )
+ .unwrap();
+
+ let reg = registry(input.clone());
+ // there is only a small chance that any one
+ // crate will be interesting.
+ // So we try some of the most complicated.
+ for this in input.iter().rev().take(10) {
+ // minimal-versions change what order the candidates
+ // are tried but not the existence of a solution
+ let res = resolve(
+ vec![dep_req(&this.name(), &format!("={}", this.version()))],
+ &reg,
+ );
+
+ let mres = resolve_with_config(
+ vec![dep_req(&this.name(), &format!("={}", this.version()))],
+ &reg,
+ &config,
+ );
+
+ prop_assert_eq!(
+ res.is_ok(),
+ mres.is_ok(),
+ "minimal-versions and regular resolver disagree about whether `{} = \"={}\"` can resolve",
+ this.name(),
+ this.version()
+ )
+ }
+ }
+
+ /// NOTE: if you think this test has failed spuriously see the note at the top of this macro.
+ #[test]
+ fn prop_removing_a_dep_cant_break(
+ PrettyPrintRegistry(input) in registry_strategy(50, 20, 60),
+ indexes_to_remove in prop::collection::vec((any::<prop::sample::Index>(), any::<prop::sample::Index>()), ..10)
+ ) {
+ let reg = registry(input.clone());
+ let mut removed_input = input.clone();
+ for (summary_idx, dep_idx) in indexes_to_remove {
+ if !removed_input.is_empty() {
+ let summary_idx = summary_idx.index(removed_input.len());
+ let deps = removed_input[summary_idx].dependencies();
+ if !deps.is_empty() {
+ let new = remove_dep(&removed_input[summary_idx], dep_idx.index(deps.len()));
+ removed_input[summary_idx] = new;
+ }
+ }
+ }
+ let removed_reg = registry(removed_input);
+ // there is only a small chance that any one
+ // crate will be interesting.
+ // So we try some of the most complicated.
+ for this in input.iter().rev().take(10) {
+ if resolve(
+ vec![dep_req(&this.name(), &format!("={}", this.version()))],
+ &reg,
+ ).is_ok() {
+ prop_assert!(
+ resolve(
+ vec![dep_req(&this.name(), &format!("={}", this.version()))],
+ &removed_reg,
+ ).is_ok(),
+ "full index worked for `{} = \"={}\"` but removing some deps broke it!",
+ this.name(),
+ this.version(),
+ )
+ }
+ }
+ }
+
+ /// NOTE: if you think this test has failed spuriously see the note at the top of this macro.
+ #[test]
+ fn prop_limited_independence_of_irrelevant_alternatives(
+ PrettyPrintRegistry(input) in registry_strategy(50, 20, 60),
+ indexes_to_unpublish in prop::collection::vec(any::<prop::sample::Index>(), ..10)
+ ) {
+ let reg = registry(input.clone());
+ // there is only a small chance that any one
+ // crate will be interesting.
+ // So we try some of the most complicated.
+ for this in input.iter().rev().take(10) {
+ let res = resolve(
+ vec![dep_req(&this.name(), &format!("={}", this.version()))],
+ &reg,
+ );
+
+ match res {
+ Ok(r) => {
+ // If resolution was successful, then unpublishing a version of a crate
+ // that was not selected should not change that.
+ let not_selected: Vec<_> = input
+ .iter()
+ .cloned()
+ .filter(|x| !r.contains(&x.package_id()))
+ .collect();
+ if !not_selected.is_empty() {
+ let indexes_to_unpublish: Vec<_> = indexes_to_unpublish.iter().map(|x| x.get(&not_selected)).collect();
+
+ let new_reg = registry(
+ input
+ .iter()
+ .cloned()
+ .filter(|x| !indexes_to_unpublish.contains(&x))
+ .collect(),
+ );
+
+ let res = resolve(
+ vec![dep_req(&this.name(), &format!("={}", this.version()))],
+ &new_reg,
+ );
+
+ // Note: that we can not assert that the two `res` are identical
+ // as the resolver does depend on irrelevant alternatives.
+ // It uses how constrained a dependency requirement is
+ // to determine what order to evaluate requirements.
+
+ prop_assert!(
+ res.is_ok(),
+ "unpublishing {:?} stopped `{} = \"={}\"` from working",
+ indexes_to_unpublish.iter().map(|x| x.package_id()).collect::<Vec<_>>(),
+ this.name(),
+ this.version()
+ )
+ }
+ }
+
+ Err(_) => {
+ // If resolution was unsuccessful, then it should stay unsuccessful
+ // even if any version of a crate is unpublished.
+ let indexes_to_unpublish: Vec<_> = indexes_to_unpublish.iter().map(|x| x.get(&input)).collect();
+
+ let new_reg = registry(
+ input
+ .iter()
+ .cloned()
+ .filter(|x| !indexes_to_unpublish.contains(&x))
+ .collect(),
+ );
+
+ let res = resolve(
+ vec![dep_req(&this.name(), &format!("={}", this.version()))],
+ &new_reg,
+ );
+
+ prop_assert!(
+ res.is_err(),
+ "full index did not work for `{} = \"={}\"` but unpublishing {:?} fixed it!",
+ this.name(),
+ this.version(),
+ indexes_to_unpublish.iter().map(|x| x.package_id()).collect::<Vec<_>>(),
+ )
+ }
+ }
+ }
+ }
+}
+
+#[test]
+#[should_panic(expected = "pub dep")] // The error handling is not yet implemented.
+fn pub_fail() {
+ let input = vec![
+ pkg!(("a", "0.0.4")),
+ pkg!(("a", "0.0.5")),
+ pkg!(("e", "0.0.6") => [dep_req_kind("a", "<= 0.0.4", DepKind::Normal, true),]),
+ pkg!(("kB", "0.0.3") => [dep_req("a", ">= 0.0.5"),dep("e"),]),
+ ];
+ let reg = registry(input);
+ assert!(resolve_and_validated(vec![dep("kB")], &reg, None).is_err());
+}
+
+#[test]
+fn basic_public_dependency() {
+ let reg = registry(vec![
+ pkg!(("A", "0.1.0")),
+ pkg!(("A", "0.2.0")),
+ pkg!("B" => [dep_req_kind("A", "0.1", DepKind::Normal, true)]),
+ pkg!("C" => [dep("A"), dep("B")]),
+ ]);
+
+ let res = resolve_and_validated(vec![dep("C")], &reg, None).unwrap();
+ assert_same(
+ &res,
+ &names(&[
+ ("root", "1.0.0"),
+ ("C", "1.0.0"),
+ ("B", "1.0.0"),
+ ("A", "0.1.0"),
+ ]),
+ );
+}
+
+#[test]
+fn public_dependency_filling_in() {
+ // The resolver has an optimization where if a candidate to resolve a dependency
+ // has already bean activated then we skip looking at the candidates dependencies.
+ // However, we have to be careful as the new path may make pub dependencies invalid.
+
+ // Triggering this case requires dependencies to be resolved in a specific order.
+ // Fuzzing found this unintuitive case, that triggers this unfortunate order of operations:
+ // 1. `d`'s dep on `c` is resolved
+ // 2. `d`'s dep on `a` is resolved with `0.1.1`
+ // 3. `c`'s dep on `b` is resolved with `0.0.2`
+ // 4. `b`'s dep on `a` is resolved with `0.0.6` no pub dev conflict as `b` is private to `c`
+ // 5. `d`'s dep on `b` is resolved with `0.0.2` triggering the optimization.
+ // Do we notice that `d` has a pub dep conflict on `a`? Lets try it and see.
+ let reg = registry(vec![
+ pkg!(("a", "0.0.6")),
+ pkg!(("a", "0.1.1")),
+ pkg!(("b", "0.0.0") => [dep("bad")]),
+ pkg!(("b", "0.0.1") => [dep("bad")]),
+ pkg!(("b", "0.0.2") => [dep_req_kind("a", "=0.0.6", DepKind::Normal, true)]),
+ pkg!("c" => [dep_req("b", ">=0.0.1")]),
+ pkg!("d" => [dep("c"), dep("a"), dep("b")]),
+ ]);
+
+ let res = resolve_and_validated(vec![dep("d")], &reg, None).unwrap();
+ assert_same(
+ &res,
+ &names(&[
+ ("root", "1.0.0"),
+ ("d", "1.0.0"),
+ ("c", "1.0.0"),
+ ("b", "0.0.2"),
+ ("a", "0.0.6"),
+ ]),
+ );
+}
+
+#[test]
+fn public_dependency_filling_in_and_update() {
+ // The resolver has an optimization where if a candidate to resolve a dependency
+ // has already bean activated then we skip looking at the candidates dependencies.
+ // However, we have to be careful as the new path may make pub dependencies invalid.
+
+ // Triggering this case requires dependencies to be resolved in a specific order.
+ // Fuzzing found this unintuitive case, that triggers this unfortunate order of operations:
+ // 1. `D`'s dep on `B` is resolved
+ // 2. `D`'s dep on `C` is resolved
+ // 3. `B`'s dep on `A` is resolved with `0.0.0`
+ // 4. `C`'s dep on `B` triggering the optimization.
+ // So did we add `A 0.0.0` to the deps `C` can see?
+ // Or are we going to resolve `C`'s dep on `A` with `0.0.2`?
+ // Lets try it and see.
+ let reg = registry(vec![
+ pkg!(("A", "0.0.0")),
+ pkg!(("A", "0.0.2")),
+ pkg!("B" => [dep_req_kind("A", "=0.0.0", DepKind::Normal, true),]),
+ pkg!("C" => [dep("A"),dep("B")]),
+ pkg!("D" => [dep("B"),dep("C")]),
+ ]);
+ let res = resolve_and_validated(vec![dep("D")], &reg, None).unwrap();
+ assert_same(
+ &res,
+ &names(&[
+ ("root", "1.0.0"),
+ ("D", "1.0.0"),
+ ("C", "1.0.0"),
+ ("B", "1.0.0"),
+ ("A", "0.0.0"),
+ ]),
+ );
+}
+
+#[test]
+fn public_dependency_skipping() {
+ // When backtracking due to a failed dependency, if Cargo is
+ // trying to be clever and skip irrelevant dependencies, care must
+ // the effects of pub dep must be accounted for.
+ let input = vec![
+ pkg!(("a", "0.2.0")),
+ pkg!(("a", "2.0.0")),
+ pkg!(("b", "0.0.0") => [dep("bad")]),
+ pkg!(("b", "0.2.1") => [dep_req_kind("a", "0.2.0", DepKind::Normal, true)]),
+ pkg!("c" => [dep("a"),dep("b")]),
+ ];
+ let reg = registry(input);
+
+ resolve_and_validated(vec![dep("c")], &reg, None).unwrap();
+}
+
+#[test]
+fn public_dependency_skipping_in_backtracking() {
+ // When backtracking due to a failed dependency, if Cargo is
+ // trying to be clever and skip irrelevant dependencies, care must
+ // the effects of pub dep must be accounted for.
+ let input = vec![
+ pkg!(("A", "0.0.0") => [dep("bad")]),
+ pkg!(("A", "0.0.1") => [dep("bad")]),
+ pkg!(("A", "0.0.2") => [dep("bad")]),
+ pkg!(("A", "0.0.3") => [dep("bad")]),
+ pkg!(("A", "0.0.4")),
+ pkg!(("A", "0.0.5")),
+ pkg!("B" => [dep_req_kind("A", ">= 0.0.3", DepKind::Normal, true)]),
+ pkg!("C" => [dep_req("A", "<= 0.0.4"), dep("B")]),
+ ];
+ let reg = registry(input);
+
+ resolve_and_validated(vec![dep("C")], &reg, None).unwrap();
+}
+
+#[test]
+fn public_sat_topological_order() {
+ let input = vec![
+ pkg!(("a", "0.0.1")),
+ pkg!(("a", "0.0.0")),
+ pkg!(("b", "0.0.1") => [dep_req_kind("a", "= 0.0.1", DepKind::Normal, true),]),
+ pkg!(("b", "0.0.0") => [dep("bad"),]),
+ pkg!("A" => [dep_req("a", "= 0.0.0"),dep_req_kind("b", "*", DepKind::Normal, true)]),
+ ];
+
+ let reg = registry(input);
+ assert!(resolve_and_validated(vec![dep("A")], &reg, None).is_err());
+}
+
+#[test]
+fn public_sat_unused_makes_things_pub() {
+ let input = vec![
+ pkg!(("a", "0.0.1")),
+ pkg!(("a", "0.0.0")),
+ pkg!(("b", "8.0.1") => [dep_req_kind("a", "= 0.0.1", DepKind::Normal, true),]),
+ pkg!(("b", "8.0.0") => [dep_req("a", "= 0.0.1"),]),
+ pkg!("c" => [dep_req("b", "= 8.0.0"),dep_req("a", "= 0.0.0"),]),
+ ];
+ let reg = registry(input);
+
+ resolve_and_validated(vec![dep("c")], &reg, None).unwrap();
+}
+
+#[test]
+fn public_sat_unused_makes_things_pub_2() {
+ let input = vec![
+ pkg!(("c", "0.0.2")),
+ pkg!(("c", "0.0.1")),
+ pkg!(("a-sys", "0.0.2")),
+ pkg!(("a-sys", "0.0.1") => [dep_req_kind("c", "= 0.0.1", DepKind::Normal, true),]),
+ pkg!("P" => [dep_req_kind("a-sys", "*", DepKind::Normal, true),dep_req("c", "= 0.0.1"),]),
+ pkg!("A" => [dep("P"),dep_req("c", "= 0.0.2"),]),
+ ];
+ let reg = registry(input);
+
+ resolve_and_validated(vec![dep("A")], &reg, None).unwrap();
+}
+
+#[test]
+#[should_panic(expected = "assertion failed: !name.is_empty()")]
+fn test_dependency_with_empty_name() {
+ // Bug 5229, dependency-names must not be empty
+ "".to_dep();
+}
+
+#[test]
+fn test_resolving_empty_dependency_list() {
+ let res = resolve(Vec::new(), &registry(vec![])).unwrap();
+
+ assert_eq!(res, names(&["root"]));
+}
+
+#[test]
+fn test_resolving_only_package() {
+ let reg = registry(vec![pkg!("foo")]);
+ let res = resolve(vec![dep("foo")], &reg).unwrap();
+ assert_same(&res, &names(&["root", "foo"]));
+}
+
+#[test]
+fn test_resolving_one_dep() {
+ let reg = registry(vec![pkg!("foo"), pkg!("bar")]);
+ let res = resolve(vec![dep("foo")], &reg).unwrap();
+ assert_same(&res, &names(&["root", "foo"]));
+}
+
+#[test]
+fn test_resolving_multiple_deps() {
+ let reg = registry(vec![pkg!("foo"), pkg!("bar"), pkg!("baz")]);
+ let res = resolve(vec![dep("foo"), dep("baz")], &reg).unwrap();
+ assert_same(&res, &names(&["root", "foo", "baz"]));
+}
+
+#[test]
+fn test_resolving_transitive_deps() {
+ let reg = registry(vec![pkg!("foo"), pkg!("bar" => ["foo"])]);
+ let res = resolve(vec![dep("bar")], &reg).unwrap();
+
+ assert_same(&res, &names(&["root", "foo", "bar"]));
+}
+
+#[test]
+fn test_resolving_common_transitive_deps() {
+ let reg = registry(vec![pkg!("foo" => ["bar"]), pkg!("bar")]);
+ let res = resolve(vec![dep("foo"), dep("bar")], &reg).unwrap();
+
+ assert_same(&res, &names(&["root", "foo", "bar"]));
+}
+
+#[test]
+fn test_resolving_with_same_name() {
+ let list = vec![
+ pkg_loc("foo", "https://first.example.com"),
+ pkg_loc("bar", "https://second.example.com"),
+ ];
+
+ let reg = registry(list);
+ let res = resolve(
+ vec![
+ dep_loc("foo", "https://first.example.com"),
+ dep_loc("bar", "https://second.example.com"),
+ ],
+ &reg,
+ )
+ .unwrap();
+
+ let mut names = loc_names(&[
+ ("foo", "https://first.example.com"),
+ ("bar", "https://second.example.com"),
+ ]);
+
+ names.push(pkg_id("root"));
+ assert_same(&res, &names);
+}
+
+#[test]
+fn test_resolving_with_dev_deps() {
+ let reg = registry(vec![
+ pkg!("foo" => ["bar", dep_kind("baz", DepKind::Development)]),
+ pkg!("baz" => ["bat", dep_kind("bam", DepKind::Development)]),
+ pkg!("bar"),
+ pkg!("bat"),
+ ]);
+
+ let res = resolve(
+ vec![dep("foo"), dep_kind("baz", DepKind::Development)],
+ &reg,
+ )
+ .unwrap();
+
+ assert_same(&res, &names(&["root", "foo", "bar", "baz", "bat"]));
+}
+
+#[test]
+fn resolving_with_many_versions() {
+ let reg = registry(vec![pkg!(("foo", "1.0.1")), pkg!(("foo", "1.0.2"))]);
+
+ let res = resolve(vec![dep("foo")], &reg).unwrap();
+
+ assert_same(&res, &names(&[("root", "1.0.0"), ("foo", "1.0.2")]));
+}
+
+#[test]
+fn resolving_with_specific_version() {
+ let reg = registry(vec![pkg!(("foo", "1.0.1")), pkg!(("foo", "1.0.2"))]);
+
+ let res = resolve(vec![dep_req("foo", "=1.0.1")], &reg).unwrap();
+
+ assert_same(&res, &names(&[("root", "1.0.0"), ("foo", "1.0.1")]));
+}
+
+#[test]
+fn test_resolving_maximum_version_with_transitive_deps() {
+ let reg = registry(vec![
+ pkg!(("util", "1.2.2")),
+ pkg!(("util", "1.0.0")),
+ pkg!(("util", "1.1.1")),
+ pkg!("foo" => [dep_req("util", "1.0.0")]),
+ pkg!("bar" => [dep_req("util", ">=1.0.1")]),
+ ]);
+
+ let res = resolve(vec![dep_req("foo", "1.0.0"), dep_req("bar", "1.0.0")], &reg).unwrap();
+
+ assert_contains(
+ &res,
+ &names(&[
+ ("root", "1.0.0"),
+ ("foo", "1.0.0"),
+ ("bar", "1.0.0"),
+ ("util", "1.2.2"),
+ ]),
+ );
+ assert!(!res.contains(&("util", "1.0.1").to_pkgid()));
+ assert!(!res.contains(&("util", "1.1.1").to_pkgid()));
+}
+
+#[test]
+fn test_resolving_minimum_version_with_transitive_deps() {
+ let reg = registry(vec![
+ pkg!(("util", "1.2.2")),
+ pkg!(("util", "1.0.0")),
+ pkg!(("util", "1.1.1")),
+ pkg!("foo" => [dep_req("util", "1.0.0")]),
+ pkg!("bar" => [dep_req("util", ">=1.0.1")]),
+ ]);
+
+ let mut config = Config::default().unwrap();
+ // -Z minimal-versions
+ // When the minimal-versions config option is specified then the lowest
+ // possible version of a package should be selected. "util 1.0.0" can't be
+ // selected because of the requirements of "bar", so the minimum version
+ // must be 1.1.1.
+ config.nightly_features_allowed = true;
+ config
+ .configure(
+ 1,
+ false,
+ None,
+ false,
+ false,
+ false,
+ &None,
+ &["minimal-versions".to_string()],
+ &[],
+ )
+ .unwrap();
+
+ let res = resolve_with_config(
+ vec![dep_req("foo", "1.0.0"), dep_req("bar", "1.0.0")],
+ &reg,
+ &config,
+ )
+ .unwrap();
+
+ assert_contains(
+ &res,
+ &names(&[
+ ("root", "1.0.0"),
+ ("foo", "1.0.0"),
+ ("bar", "1.0.0"),
+ ("util", "1.1.1"),
+ ]),
+ );
+ assert!(!res.contains(&("util", "1.2.2").to_pkgid()));
+ assert!(!res.contains(&("util", "1.0.0").to_pkgid()));
+}
+
+#[test]
+fn resolving_incompat_versions() {
+ let reg = registry(vec![
+ pkg!(("foo", "1.0.1")),
+ pkg!(("foo", "1.0.2")),
+ pkg!("bar" => [dep_req("foo", "=1.0.2")]),
+ ]);
+
+ assert!(resolve(vec![dep_req("foo", "=1.0.1"), dep("bar")], &reg).is_err());
+}
+
+#[test]
+fn resolving_wrong_case_from_registry() {
+ // In the future we may #5678 allow this to happen.
+ // For back compatibility reasons, we probably won't.
+ // But we may want to future prove ourselves by understanding it.
+ // This test documents the current behavior.
+ let reg = registry(vec![pkg!(("foo", "1.0.0")), pkg!("bar" => ["Foo"])]);
+
+ assert!(resolve(vec![dep("bar")], &reg).is_err());
+}
+
+#[test]
+fn resolving_mis_hyphenated_from_registry() {
+ // In the future we may #2775 allow this to happen.
+ // For back compatibility reasons, we probably won't.
+ // But we may want to future prove ourselves by understanding it.
+ // This test documents the current behavior.
+ let reg = registry(vec![pkg!(("fo-o", "1.0.0")), pkg!("bar" => ["fo_o"])]);
+
+ assert!(resolve(vec![dep("bar")], &reg).is_err());
+}
+
+#[test]
+fn resolving_backtrack() {
+ let reg = registry(vec![
+ pkg!(("foo", "1.0.2") => [dep("bar")]),
+ pkg!(("foo", "1.0.1") => [dep("baz")]),
+ pkg!("bar" => [dep_req("foo", "=2.0.2")]),
+ pkg!("baz"),
+ ]);
+
+ let res = resolve(vec![dep_req("foo", "^1")], &reg).unwrap();
+
+ assert_contains(
+ &res,
+ &names(&[("root", "1.0.0"), ("foo", "1.0.1"), ("baz", "1.0.0")]),
+ );
+}
+
+#[test]
+fn resolving_backtrack_features() {
+ // test for cargo/issues/4347
+ let mut bad = dep("bar");
+ bad.set_features(vec!["bad"]);
+
+ let reg = registry(vec![
+ pkg!(("foo", "1.0.2") => [bad]),
+ pkg!(("foo", "1.0.1") => [dep("bar")]),
+ pkg!("bar"),
+ ]);
+
+ let res = resolve(vec![dep_req("foo", "^1")], &reg).unwrap();
+
+ assert_contains(
+ &res,
+ &names(&[("root", "1.0.0"), ("foo", "1.0.1"), ("bar", "1.0.0")]),
+ );
+}
+
+#[test]
+fn resolving_allows_multiple_compatible_versions() {
+ let reg = registry(vec![
+ pkg!(("foo", "1.0.0")),
+ pkg!(("foo", "2.0.0")),
+ pkg!(("foo", "0.1.0")),
+ pkg!(("foo", "0.2.0")),
+ pkg!("bar" => ["d1", "d2", "d3", "d4"]),
+ pkg!("d1" => [dep_req("foo", "1")]),
+ pkg!("d2" => [dep_req("foo", "2")]),
+ pkg!("d3" => [dep_req("foo", "0.1")]),
+ pkg!("d4" => [dep_req("foo", "0.2")]),
+ ]);
+
+ let res = resolve(vec![dep("bar")], &reg).unwrap();
+
+ assert_same(
+ &res,
+ &names(&[
+ ("root", "1.0.0"),
+ ("foo", "1.0.0"),
+ ("foo", "2.0.0"),
+ ("foo", "0.1.0"),
+ ("foo", "0.2.0"),
+ ("d1", "1.0.0"),
+ ("d2", "1.0.0"),
+ ("d3", "1.0.0"),
+ ("d4", "1.0.0"),
+ ("bar", "1.0.0"),
+ ]),
+ );
+}
+
+#[test]
+fn resolving_with_deep_backtracking() {
+ let reg = registry(vec![
+ pkg!(("foo", "1.0.1") => [dep_req("bar", "1")]),
+ pkg!(("foo", "1.0.0") => [dep_req("bar", "2")]),
+ pkg!(("bar", "1.0.0") => [dep_req("baz", "=1.0.2"),
+ dep_req("other", "1")]),
+ pkg!(("bar", "2.0.0") => [dep_req("baz", "=1.0.1")]),
+ pkg!(("baz", "1.0.2") => [dep_req("other", "2")]),
+ pkg!(("baz", "1.0.1")),
+ pkg!(("dep_req", "1.0.0")),
+ pkg!(("dep_req", "2.0.0")),
+ ]);
+
+ let res = resolve(vec![dep_req("foo", "1")], &reg).unwrap();
+
+ assert_same(
+ &res,
+ &names(&[
+ ("root", "1.0.0"),
+ ("foo", "1.0.0"),
+ ("bar", "2.0.0"),
+ ("baz", "1.0.1"),
+ ]),
+ );
+}
+
+#[test]
+fn resolving_with_sys_crates() {
+ // This is based on issues/4902
+ // With `l` a normal library we get 2copies so everyone gets the newest compatible.
+ // But `l-sys` a library with a links attribute we make sure there is only one.
+ let reg = registry(vec![
+ pkg!(("l-sys", "0.9.1")),
+ pkg!(("l-sys", "0.10.0")),
+ pkg!(("l", "0.9.1")),
+ pkg!(("l", "0.10.0")),
+ pkg!(("d", "1.0.0") => [dep_req("l-sys", ">=0.8.0, <=0.10.0"), dep_req("l", ">=0.8.0, <=0.10.0")]),
+ pkg!(("r", "1.0.0") => [dep_req("l-sys", "0.9"), dep_req("l", "0.9")]),
+ ]);
+
+ let res = resolve(vec![dep_req("d", "1"), dep_req("r", "1")], &reg).unwrap();
+
+ assert_same(
+ &res,
+ &names(&[
+ ("root", "1.0.0"),
+ ("d", "1.0.0"),
+ ("r", "1.0.0"),
+ ("l-sys", "0.9.1"),
+ ("l", "0.9.1"),
+ ("l", "0.10.0"),
+ ]),
+ );
+}
+
+#[test]
+fn resolving_with_constrained_sibling_backtrack_parent() {
+ // There is no point in considering all of the backtrack_trap{1,2}
+ // candidates since they can't change the result of failing to
+ // resolve 'constrained'. Cargo should (ideally) skip past them and resume
+ // resolution once the activation of the parent, 'bar', is rolled back.
+ // Note that the traps are slightly more constrained to make sure they
+ // get picked first.
+ let mut reglist = vec![
+ pkg!(("foo", "1.0.0") => [dep_req("bar", "1.0"),
+ dep_req("constrained", "=1.0.0")]),
+ pkg!(("bar", "1.0.0") => [dep_req("backtrack_trap1", "1.0.2"),
+ dep_req("backtrack_trap2", "1.0.2"),
+ dep_req("constrained", "1.0.0")]),
+ pkg!(("constrained", "1.0.0")),
+ pkg!(("backtrack_trap1", "1.0.0")),
+ pkg!(("backtrack_trap2", "1.0.0")),
+ ];
+ // Bump this to make the test harder - it adds more versions of bar that will
+ // fail to resolve, and more versions of the traps to consider.
+ const NUM_BARS_AND_TRAPS: usize = 50; // minimum 2
+ for i in 1..NUM_BARS_AND_TRAPS {
+ let vsn = format!("1.0.{}", i);
+ reglist.push(
+ pkg!(("bar", vsn.clone()) => [dep_req("backtrack_trap1", "1.0.2"),
+ dep_req("backtrack_trap2", "1.0.2"),
+ dep_req("constrained", "1.0.1")]),
+ );
+ reglist.push(pkg!(("backtrack_trap1", vsn.clone())));
+ reglist.push(pkg!(("backtrack_trap2", vsn.clone())));
+ reglist.push(pkg!(("constrained", vsn.clone())));
+ }
+ let reg = registry(reglist);
+
+ let res = resolve(vec![dep_req("foo", "1")], &reg).unwrap();
+
+ assert_contains(
+ &res,
+ &names(&[
+ ("root", "1.0.0"),
+ ("foo", "1.0.0"),
+ ("bar", "1.0.0"),
+ ("constrained", "1.0.0"),
+ ]),
+ );
+}
+
+#[test]
+fn resolving_with_many_equivalent_backtracking() {
+ let mut reglist = Vec::new();
+
+ const DEPTH: usize = 200;
+ const BRANCHING_FACTOR: usize = 100;
+
+ // Each level depends on the next but the last level does not exist.
+ // Without cashing we need to test every path to the last level O(BRANCHING_FACTOR ^ DEPTH)
+ // and this test will time out. With cashing we need to discover that none of these
+ // can be activated O(BRANCHING_FACTOR * DEPTH)
+ for l in 0..DEPTH {
+ let name = format!("level{}", l);
+ let next = format!("level{}", l + 1);
+ for i in 1..BRANCHING_FACTOR {
+ let vsn = format!("1.0.{}", i);
+ reglist.push(pkg!((name.as_str(), vsn.as_str()) => [dep(next.as_str())]));
+ }
+ }
+
+ let reg = registry(reglist.clone());
+
+ let res = resolve(vec![dep("level0")], &reg);
+
+ assert!(res.is_err());
+
+ // It is easy to write code that quickly returns an error.
+ // Lets make sure we can find a good answer if it is there.
+ reglist.push(pkg!(("level0", "1.0.0")));
+
+ let reg = registry(reglist.clone());
+
+ let res = resolve(vec![dep("level0")], &reg).unwrap();
+
+ assert_contains(&res, &names(&[("root", "1.0.0"), ("level0", "1.0.0")]));
+
+ // Make sure we have not special case no candidates.
+ reglist.push(pkg!(("constrained", "1.1.0")));
+ reglist.push(pkg!(("constrained", "1.0.0")));
+ reglist.push(
+ pkg!((format!("level{}", DEPTH).as_str(), "1.0.0") => [dep_req("constrained", "=1.0.0")]),
+ );
+
+ let reg = registry(reglist.clone());
+
+ let res = resolve(vec![dep("level0"), dep("constrained")], &reg).unwrap();
+
+ assert_contains(
+ &res,
+ &names(&[
+ ("root", "1.0.0"),
+ ("level0", "1.0.0"),
+ ("constrained", "1.1.0"),
+ ]),
+ );
+
+ let reg = registry(reglist.clone());
+
+ let res = resolve(vec![dep_req("level0", "1.0.1"), dep("constrained")], &reg).unwrap();
+
+ assert_contains(
+ &res,
+ &names(&[
+ ("root", "1.0.0"),
+ (format!("level{}", DEPTH).as_str(), "1.0.0"),
+ ("constrained", "1.0.0"),
+ ]),
+ );
+
+ let reg = registry(reglist);
+
+ let res = resolve(
+ vec![dep_req("level0", "1.0.1"), dep_req("constrained", "1.1.0")],
+ &reg,
+ );
+
+ assert!(res.is_err());
+}
+
+#[test]
+fn resolving_with_deep_traps() {
+ let mut reglist = Vec::new();
+
+ const DEPTH: usize = 200;
+ const BRANCHING_FACTOR: usize = 100;
+
+ // Each backtrack_trap depends on the next, and adds a backtrack frame.
+ // None of witch is going to help with `bad`.
+ for l in 0..DEPTH {
+ let name = format!("backtrack_trap{}", l);
+ let next = format!("backtrack_trap{}", l + 1);
+ for i in 1..BRANCHING_FACTOR {
+ let vsn = format!("1.0.{}", i);
+ reglist.push(pkg!((name.as_str(), vsn.as_str()) => [dep(next.as_str())]));
+ }
+ }
+ {
+ let name = format!("backtrack_trap{}", DEPTH);
+ for i in 1..BRANCHING_FACTOR {
+ let vsn = format!("1.0.{}", i);
+ reglist.push(pkg!((name.as_str(), vsn.as_str())));
+ }
+ }
+ {
+ // slightly less constrained to make sure `cloaking` gets picked last.
+ for i in 1..(BRANCHING_FACTOR + 10) {
+ let vsn = format!("1.0.{}", i);
+ reglist.push(pkg!(("cloaking", vsn.as_str()) => [dep_req("bad", "1.0.1")]));
+ }
+ }
+
+ let reg = registry(reglist);
+
+ let res = resolve(vec![dep("backtrack_trap0"), dep("cloaking")], &reg);
+
+ assert!(res.is_err());
+}
+
+#[test]
+fn resolving_with_constrained_cousins_backtrack() {
+ let mut reglist = Vec::new();
+
+ const DEPTH: usize = 100;
+ const BRANCHING_FACTOR: usize = 50;
+
+ // Each backtrack_trap depends on the next.
+ // The last depends on a specific ver of constrained.
+ for l in 0..DEPTH {
+ let name = format!("backtrack_trap{}", l);
+ let next = format!("backtrack_trap{}", l + 1);
+ for i in 1..BRANCHING_FACTOR {
+ let vsn = format!("1.0.{}", i);
+ reglist.push(pkg!((name.as_str(), vsn.as_str()) => [dep(next.as_str())]));
+ }
+ }
+ {
+ let name = format!("backtrack_trap{}", DEPTH);
+ for i in 1..BRANCHING_FACTOR {
+ let vsn = format!("1.0.{}", i);
+ reglist.push(
+ pkg!((name.as_str(), vsn.as_str()) => [dep_req("constrained", ">=1.1.0, <=2.0.0")]),
+ );
+ }
+ }
+ {
+ // slightly less constrained to make sure `constrained` gets picked last.
+ for i in 0..(BRANCHING_FACTOR + 10) {
+ let vsn = format!("1.0.{}", i);
+ reglist.push(pkg!(("constrained", vsn.as_str())));
+ }
+ reglist.push(pkg!(("constrained", "1.1.0")));
+ reglist.push(pkg!(("constrained", "2.0.0")));
+ reglist.push(pkg!(("constrained", "2.0.1")));
+ }
+ reglist.push(pkg!(("cloaking", "1.0.0") => [dep_req("constrained", "~1.0.0")]));
+
+ let reg = registry(reglist.clone());
+
+ // `backtrack_trap0 = "*"` is a lot of ways of saying `constrained = ">=1.1.0, <=2.0.0"`
+ // but `constrained= "2.0.1"` is already picked.
+ // Only then to try and solve `constrained= "~1.0.0"` which is incompatible.
+ let res = resolve(
+ vec![
+ dep("backtrack_trap0"),
+ dep_req("constrained", "2.0.1"),
+ dep("cloaking"),
+ ],
+ &reg,
+ );
+
+ assert!(res.is_err());
+
+ // Each level depends on the next but the last depends on incompatible deps.
+ // Let's make sure that we can cache that a dep has incompatible deps.
+ for l in 0..DEPTH {
+ let name = format!("level{}", l);
+ let next = format!("level{}", l + 1);
+ for i in 1..BRANCHING_FACTOR {
+ let vsn = format!("1.0.{}", i);
+ reglist.push(pkg!((name.as_str(), vsn.as_str()) => [dep(next.as_str())]));
+ }
+ }
+ reglist.push(
+ pkg!((format!("level{}", DEPTH).as_str(), "1.0.0") => [dep("backtrack_trap0"),
+ dep("cloaking")
+ ]),
+ );
+
+ let reg = registry(reglist);
+
+ let res = resolve(vec![dep("level0"), dep_req("constrained", "2.0.1")], &reg);
+
+ assert!(res.is_err());
+
+ let res = resolve(vec![dep("level0"), dep_req("constrained", "2.0.0")], &reg).unwrap();
+
+ assert_contains(
+ &res,
+ &names(&[("constrained", "2.0.0"), ("cloaking", "1.0.0")]),
+ );
+}
+
+#[test]
+fn resolving_with_constrained_sibling_backtrack_activation() {
+ // It makes sense to resolve most-constrained deps first, but
+ // with that logic the backtrack traps here come between the two
+ // attempted resolutions of 'constrained'. When backtracking,
+ // cargo should skip past them and resume resolution once the
+ // number of activations for 'constrained' changes.
+ let mut reglist = vec![
+ pkg!(("foo", "1.0.0") => [dep_req("bar", "=1.0.0"),
+ dep_req("backtrack_trap1", "1.0"),
+ dep_req("backtrack_trap2", "1.0"),
+ dep_req("constrained", "<=1.0.60")]),
+ pkg!(("bar", "1.0.0") => [dep_req("constrained", ">=1.0.60")]),
+ ];
+ // Bump these to make the test harder, but you'll also need to
+ // change the version constraints on `constrained` above. To correctly
+ // exercise Cargo, the relationship between the values is:
+ // NUM_CONSTRAINED - vsn < NUM_TRAPS < vsn
+ // to make sure the traps are resolved between `constrained`.
+ const NUM_TRAPS: usize = 45; // min 1
+ const NUM_CONSTRAINED: usize = 100; // min 1
+ for i in 0..NUM_TRAPS {
+ let vsn = format!("1.0.{}", i);
+ reglist.push(pkg!(("backtrack_trap1", vsn.clone())));
+ reglist.push(pkg!(("backtrack_trap2", vsn.clone())));
+ }
+ for i in 0..NUM_CONSTRAINED {
+ let vsn = format!("1.0.{}", i);
+ reglist.push(pkg!(("constrained", vsn.clone())));
+ }
+ let reg = registry(reglist);
+
+ let res = resolve(vec![dep_req("foo", "1")], &reg).unwrap();
+
+ assert_contains(
+ &res,
+ &names(&[
+ ("root", "1.0.0"),
+ ("foo", "1.0.0"),
+ ("bar", "1.0.0"),
+ ("constrained", "1.0.60"),
+ ]),
+ );
+}
+
+#[test]
+fn resolving_with_public_constrained_sibling() {
+ // It makes sense to resolve most-constrained deps first, but
+ // with that logic the backtrack traps here come between the two
+ // attempted resolutions of 'constrained'. When backtracking,
+ // cargo should skip past them and resume resolution once the
+ // number of activations for 'constrained' changes.
+ let mut reglist = vec![
+ pkg!(("foo", "1.0.0") => [dep_req("bar", "=1.0.0"),
+ dep_req("backtrack_trap1", "1.0"),
+ dep_req("backtrack_trap2", "1.0"),
+ dep_req("constrained", "<=60")]),
+ pkg!(("bar", "1.0.0") => [dep_req_kind("constrained", ">=60", DepKind::Normal, true)]),
+ ];
+ // Bump these to make the test harder, but you'll also need to
+ // change the version constraints on `constrained` above. To correctly
+ // exercise Cargo, the relationship between the values is:
+ // NUM_CONSTRAINED - vsn < NUM_TRAPS < vsn
+ // to make sure the traps are resolved between `constrained`.
+ const NUM_TRAPS: usize = 45; // min 1
+ const NUM_CONSTRAINED: usize = 100; // min 1
+ for i in 0..NUM_TRAPS {
+ let vsn = format!("1.0.{}", i);
+ reglist.push(pkg!(("backtrack_trap1", vsn.clone())));
+ reglist.push(pkg!(("backtrack_trap2", vsn.clone())));
+ }
+ for i in 0..NUM_CONSTRAINED {
+ let vsn = format!("{}.0.0", i);
+ reglist.push(pkg!(("constrained", vsn.clone())));
+ }
+ let reg = registry(reglist);
+
+ let _ = resolve_and_validated(vec![dep_req("foo", "1")], &reg, None);
+}
+
+#[test]
+fn resolving_with_constrained_sibling_transitive_dep_effects() {
+ // When backtracking due to a failed dependency, if Cargo is
+ // trying to be clever and skip irrelevant dependencies, care must
+ // be taken to not miss the transitive effects of alternatives. E.g.
+ // in the right-to-left resolution of the graph below, B may
+ // affect whether D is successfully resolved.
+ //
+ // A
+ // / | \
+ // B C D
+ // | |
+ // C D
+ let reg = registry(vec![
+ pkg!(("A", "1.0.0") => [dep_req("B", "1.0"),
+ dep_req("C", "1.0"),
+ dep_req("D", "1.0.100")]),
+ pkg!(("B", "1.0.0") => [dep_req("C", ">=1.0.0")]),
+ pkg!(("B", "1.0.1") => [dep_req("C", ">=1.0.1")]),
+ pkg!(("C", "1.0.0") => [dep_req("D", "1.0.0")]),
+ pkg!(("C", "1.0.1") => [dep_req("D", ">=1.0.1,<1.0.100")]),
+ pkg!(("C", "1.0.2") => [dep_req("D", ">=1.0.2,<1.0.100")]),
+ pkg!(("D", "1.0.0")),
+ pkg!(("D", "1.0.1")),
+ pkg!(("D", "1.0.2")),
+ pkg!(("D", "1.0.100")),
+ pkg!(("D", "1.0.101")),
+ pkg!(("D", "1.0.102")),
+ pkg!(("D", "1.0.103")),
+ pkg!(("D", "1.0.104")),
+ pkg!(("D", "1.0.105")),
+ ]);
+
+ let res = resolve(vec![dep_req("A", "1")], &reg).unwrap();
+
+ assert_same(
+ &res,
+ &names(&[
+ ("root", "1.0.0"),
+ ("A", "1.0.0"),
+ ("B", "1.0.0"),
+ ("C", "1.0.0"),
+ ("D", "1.0.105"),
+ ]),
+ );
+}
+
+#[test]
+fn incomplete_information_skipping() {
+ // When backtracking due to a failed dependency, if Cargo is
+ // trying to be clever and skip irrelevant dependencies, care must
+ // be taken to not miss the transitive effects of alternatives.
+ // Fuzzing discovered that for some reason cargo was skipping based
+ // on incomplete information in the following case:
+ // minimized bug found in:
+ // https://github.com/rust-lang/cargo/commit/003c29b0c71e5ea28fbe8e72c148c755c9f3f8d9
+ let input = vec![
+ pkg!(("a", "1.0.0")),
+ pkg!(("a", "1.1.0")),
+ pkg!("b" => [dep("a")]),
+ pkg!(("c", "1.0.0")),
+ pkg!(("c", "1.1.0")),
+ pkg!("d" => [dep_req("c", "=1.0")]),
+ pkg!(("e", "1.0.0")),
+ pkg!(("e", "1.1.0") => [dep_req("c", "1.1")]),
+ pkg!("to_yank"),
+ pkg!(("f", "1.0.0") => [
+ dep("to_yank"),
+ dep("d"),
+ ]),
+ pkg!(("f", "1.1.0") => [dep("d")]),
+ pkg!("g" => [
+ dep("b"),
+ dep("e"),
+ dep("f"),
+ ]),
+ ];
+ let reg = registry(input.clone());
+
+ let res = resolve(vec![dep("g")], &reg).unwrap();
+ let package_to_yank = "to_yank".to_pkgid();
+ // this package is not used in the resolution.
+ assert!(!res.contains(&package_to_yank));
+ // so when we yank it
+ let new_reg = registry(
+ input
+ .iter()
+ .cloned()
+ .filter(|x| package_to_yank != x.package_id())
+ .collect(),
+ );
+ assert_eq!(input.len(), new_reg.len() + 1);
+ // it should still build
+ assert!(resolve(vec![dep("g")], &new_reg).is_ok());
+}
+
+#[test]
+fn incomplete_information_skipping_2() {
+ // When backtracking due to a failed dependency, if Cargo is
+ // trying to be clever and skip irrelevant dependencies, care must
+ // be taken to not miss the transitive effects of alternatives.
+ // Fuzzing discovered that for some reason cargo was skipping based
+ // on incomplete information in the following case:
+ // https://github.com/rust-lang/cargo/commit/003c29b0c71e5ea28fbe8e72c148c755c9f3f8d9
+ let input = vec![
+ pkg!(("b", "3.8.10")),
+ pkg!(("b", "8.7.4")),
+ pkg!(("b", "9.4.6")),
+ pkg!(("c", "1.8.8")),
+ pkg!(("c", "10.2.5")),
+ pkg!(("d", "4.1.2") => [
+ dep_req("bad", "=6.10.9"),
+ ]),
+ pkg!(("d", "5.5.6")),
+ pkg!(("d", "5.6.10")),
+ pkg!(("to_yank", "8.0.1")),
+ pkg!(("to_yank", "8.8.1")),
+ pkg!(("e", "4.7.8") => [
+ dep_req("d", ">=5.5.6, <=5.6.10"),
+ dep_req("to_yank", "=8.0.1"),
+ ]),
+ pkg!(("e", "7.4.9") => [
+ dep_req("bad", "=4.7.5"),
+ ]),
+ pkg!("f" => [
+ dep_req("d", ">=4.1.2, <=5.5.6"),
+ ]),
+ pkg!("g" => [
+ dep("bad"),
+ ]),
+ pkg!(("h", "3.8.3") => [
+ dep("g"),
+ ]),
+ pkg!(("h", "6.8.3") => [
+ dep("f"),
+ ]),
+ pkg!(("h", "8.1.9") => [
+ dep_req("to_yank", "=8.8.1"),
+ ]),
+ pkg!("i" => [
+ dep("b"),
+ dep("c"),
+ dep("e"),
+ dep("h"),
+ ]),
+ ];
+ let reg = registry(input.clone());
+
+ let res = resolve(vec![dep("i")], &reg).unwrap();
+ let package_to_yank = ("to_yank", "8.8.1").to_pkgid();
+ // this package is not used in the resolution.
+ assert!(!res.contains(&package_to_yank));
+ // so when we yank it
+ let new_reg = registry(
+ input
+ .iter()
+ .cloned()
+ .filter(|x| package_to_yank != x.package_id())
+ .collect(),
+ );
+ assert_eq!(input.len(), new_reg.len() + 1);
+ // it should still build
+ assert!(resolve(vec![dep("i")], &new_reg).is_ok());
+}
+
+#[test]
+fn incomplete_information_skipping_3() {
+ // When backtracking due to a failed dependency, if Cargo is
+ // trying to be clever and skip irrelevant dependencies, care must
+ // be taken to not miss the transitive effects of alternatives.
+ // Fuzzing discovered that for some reason cargo was skipping based
+ // on incomplete information in the following case:
+ // minimized bug found in:
+ // https://github.com/rust-lang/cargo/commit/003c29b0c71e5ea28fbe8e72c148c755c9f3f8d9
+ let input = vec![
+ pkg! {("to_yank", "3.0.3")},
+ pkg! {("to_yank", "3.3.0")},
+ pkg! {("to_yank", "3.3.1")},
+ pkg! {("a", "3.3.0") => [
+ dep_req("to_yank", "=3.0.3"),
+ ] },
+ pkg! {("a", "3.3.2") => [
+ dep_req("to_yank", "<=3.3.0"),
+ ] },
+ pkg! {("b", "0.1.3") => [
+ dep_req("a", "=3.3.0"),
+ ] },
+ pkg! {("b", "2.0.2") => [
+ dep_req("to_yank", "3.3.0"),
+ dep("a"),
+ ] },
+ pkg! {("b", "2.3.3") => [
+ dep_req("to_yank", "3.3.0"),
+ dep_req("a", "=3.3.0"),
+ ] },
+ ];
+ let reg = registry(input.clone());
+
+ let res = resolve(vec![dep("b")], &reg).unwrap();
+ let package_to_yank = ("to_yank", "3.0.3").to_pkgid();
+ // this package is not used in the resolution.
+ assert!(!res.contains(&package_to_yank));
+ // so when we yank it
+ let new_reg = registry(
+ input
+ .iter()
+ .cloned()
+ .filter(|x| package_to_yank != x.package_id())
+ .collect(),
+ );
+ assert_eq!(input.len(), new_reg.len() + 1);
+ // it should still build
+ assert!(resolve(vec![dep("b")], &new_reg).is_ok());
+}
+
+#[test]
+fn resolving_but_no_exists() {
+ let reg = registry(vec![]);
+
+ let res = resolve(vec![dep_req("foo", "1")], &reg);
+ assert!(res.is_err());
+
+ assert_eq!(
+ res.err().unwrap().to_string(),
+ "no matching package named `foo` found\n\
+ location searched: registry `https://example.com/`\n\
+ required by package `root v1.0.0 (registry `https://example.com/`)`\
+ "
+ );
+}
+
+#[test]
+fn resolving_cycle() {
+ let reg = registry(vec![pkg!("foo" => ["foo"])]);
+
+ let _ = resolve(vec![dep_req("foo", "1")], &reg);
+}
+
+#[test]
+fn hard_equality() {
+ let reg = registry(vec![
+ pkg!(("foo", "1.0.1")),
+ pkg!(("foo", "1.0.0")),
+ pkg!(("bar", "1.0.0") => [dep_req("foo", "1.0.0")]),
+ ]);
+
+ let res = resolve(vec![dep_req("bar", "1"), dep_req("foo", "=1.0.0")], &reg).unwrap();
+
+ assert_same(
+ &res,
+ &names(&[("root", "1.0.0"), ("foo", "1.0.0"), ("bar", "1.0.0")]),
+ );
+}
+
+#[test]
+fn large_conflict_cache() {
+ let mut input = vec![
+ pkg!(("last", "0.0.0") => [dep("bad")]), // just to make sure last is less constrained
+ ];
+ let mut root_deps = vec![dep("last")];
+ const NUM_VERSIONS: u8 = 20;
+ for name in 0..=NUM_VERSIONS {
+ // a large number of conflicts can easily be generated by a sys crate.
+ let sys_name = format!("{}-sys", (b'a' + name) as char);
+ let in_len = input.len();
+ input.push(pkg!(("last", format!("{}.0.0", in_len)) => [dep_req(&sys_name, "=0.0.0")]));
+ root_deps.push(dep_req(&sys_name, ">= 0.0.1"));
+
+ // a large number of conflicts can also easily be generated by a major release version.
+ let plane_name = format!("{}", (b'a' + name) as char);
+ let in_len = input.len();
+ input.push(pkg!(("last", format!("{}.0.0", in_len)) => [dep_req(&plane_name, "=1.0.0")]));
+ root_deps.push(dep_req(&plane_name, ">= 1.0.1"));
+
+ for i in 0..=NUM_VERSIONS {
+ input.push(pkg!((&sys_name, format!("{}.0.0", i))));
+ input.push(pkg!((&plane_name, format!("1.0.{}", i))));
+ }
+ }
+ let reg = registry(input);
+ let _ = resolve(root_deps, &reg);
+}
+
+#[test]
+fn off_by_one_bug() {
+ let input = vec![
+ pkg!(("A-sys", "0.0.1")),
+ pkg!(("A-sys", "0.0.4")),
+ pkg!(("A-sys", "0.0.6")),
+ pkg!(("A-sys", "0.0.7")),
+ pkg!(("NA", "0.0.0") => [dep_req("A-sys", "<= 0.0.5"),]),
+ pkg!(("NA", "0.0.1") => [dep_req("A-sys", ">= 0.0.6, <= 0.0.8"),]),
+ pkg!(("a", "0.0.1")),
+ pkg!(("a", "0.0.2")),
+ pkg!(("aa", "0.0.0") => [dep_req("A-sys", ">= 0.0.4, <= 0.0.6"),dep_req("NA", "<= 0.0.0"),]),
+ pkg!(("f", "0.0.3") => [dep("NA"),dep_req("a", "<= 0.0.2"),dep("aa"),]),
+ ];
+
+ let reg = registry(input);
+ let _ = resolve_and_validated(vec![dep("f")], &reg, None);
+}
+
+#[test]
+fn conflict_store_bug() {
+ let input = vec![
+ pkg!(("A", "0.0.3")),
+ pkg!(("A", "0.0.5")),
+ pkg!(("A", "0.0.9") => [dep("bad"),]),
+ pkg!(("A", "0.0.10") => [dep("bad"),]),
+ pkg!(("L-sys", "0.0.1") => [dep("bad"),]),
+ pkg!(("L-sys", "0.0.5")),
+ pkg!(("R", "0.0.4") => [
+ dep_req("L-sys", "= 0.0.5"),
+ ]),
+ pkg!(("R", "0.0.6")),
+ pkg!(("a-sys", "0.0.5")),
+ pkg!(("a-sys", "0.0.11")),
+ pkg!(("c", "0.0.12") => [
+ dep_req("R", ">= 0.0.3, <= 0.0.4"),
+ ]),
+ pkg!(("c", "0.0.13") => [
+ dep_req("a-sys", ">= 0.0.8, <= 0.0.11"),
+ ]),
+ pkg!(("c0", "0.0.6") => [
+ dep_req("L-sys", "<= 0.0.2"),
+ ]),
+ pkg!(("c0", "0.0.10") => [
+ dep_req("A", ">= 0.0.9, <= 0.0.10"),
+ dep_req("a-sys", "= 0.0.5"),
+ ]),
+ pkg!("j" => [
+ dep_req("A", ">= 0.0.3, <= 0.0.5"),
+ dep_req("R", ">=0.0.4, <= 0.0.6"),
+ dep_req("c", ">= 0.0.9"),
+ dep_req("c0", ">= 0.0.6"),
+ ]),
+ ];
+
+ let reg = registry(input);
+ let _ = resolve_and_validated(vec![dep("j")], &reg, None);
+}
+
+#[test]
+fn conflict_store_more_then_one_match() {
+ let input = vec![
+ pkg!(("A", "0.0.0")),
+ pkg!(("A", "0.0.1")),
+ pkg!(("A-sys", "0.0.0")),
+ pkg!(("A-sys", "0.0.1")),
+ pkg!(("A-sys", "0.0.2")),
+ pkg!(("A-sys", "0.0.3")),
+ pkg!(("A-sys", "0.0.12")),
+ pkg!(("A-sys", "0.0.16")),
+ pkg!(("B-sys", "0.0.0")),
+ pkg!(("B-sys", "0.0.1")),
+ pkg!(("B-sys", "0.0.2") => [dep_req("A-sys", "= 0.0.12"),]),
+ pkg!(("BA-sys", "0.0.0") => [dep_req("A-sys","= 0.0.16"),]),
+ pkg!(("BA-sys", "0.0.1") => [dep("bad"),]),
+ pkg!(("BA-sys", "0.0.2") => [dep("bad"),]),
+ pkg!("nA" => [
+ dep("A"),
+ dep_req("A-sys", "<= 0.0.3"),
+ dep("B-sys"),
+ dep("BA-sys"),
+ ]),
+ ];
+ let reg = registry(input);
+ let _ = resolve_and_validated(vec![dep("nA")], &reg, None);
+}
+
+#[test]
+fn bad_lockfile_from_8249() {
+ let input = vec![
+ pkg!(("a-sys", "0.2.0")),
+ pkg!(("a-sys", "0.1.0")),
+ pkg!(("b", "0.1.0") => [
+ dep_req("a-sys", "0.1"), // should be optional: true, but not deeded for now
+ ]),
+ pkg!(("c", "1.0.0") => [
+ dep_req("b", "=0.1.0"),
+ ]),
+ pkg!("foo" => [
+ dep_req("a-sys", "=0.2.0"),
+ {
+ let mut b = dep_req("b", "=0.1.0");
+ b.set_features(vec!["a-sys"]);
+ b
+ },
+ dep_req("c", "=1.0.0"),
+ ]),
+ ];
+ let reg = registry(input);
+ let _ = resolve_and_validated(vec![dep("foo")], &reg, None);
+}
+
+#[test]
+fn cyclic_good_error_message() {
+ let input = vec![
+ pkg!(("A", "0.0.0") => [dep("C")]),
+ pkg!(("B", "0.0.0") => [dep("C")]),
+ pkg!(("C", "0.0.0") => [dep("A")]),
+ ];
+ let reg = registry(input);
+ let error = resolve(vec![dep("A"), dep("B")], &reg).unwrap_err();
+ println!("{}", error);
+ assert_eq!("\
+cyclic package dependency: package `A v0.0.0 (registry `https://example.com/`)` depends on itself. Cycle:
+package `A v0.0.0 (registry `https://example.com/`)`
+ ... which satisfies dependency `A = \"*\"` of package `C v0.0.0 (registry `https://example.com/`)`
+ ... which satisfies dependency `C = \"*\"` of package `A v0.0.0 (registry `https://example.com/`)`\
+", error.to_string());
+}